1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
64 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
67 #define min(A,B) ((A) < (B) ? (A) : (B))
68 #define max(A,B) ((A) > (B) ? (A) : (B))
70 /* Structure used to define the rs6000 stack */
71 typedef struct rs6000_stack {
72 int first_gp_reg_save; /* first callee saved GP register used */
73 int first_fp_reg_save; /* first callee saved FP register used */
74 int first_altivec_reg_save; /* first callee saved AltiVec register used */
75 int lr_save_p; /* true if the link reg needs to be saved */
76 int cr_save_p; /* true if the CR reg needs to be saved */
77 unsigned int vrsave_mask; /* mask of vec registers to save */
78 int toc_save_p; /* true if the TOC needs to be saved */
79 int push_p; /* true if we need to allocate stack space */
80 int calls_p; /* true if the function makes any calls */
81 enum rs6000_abi abi; /* which ABI to use */
82 int gp_save_offset; /* offset to save GP regs from initial SP */
83 int fp_save_offset; /* offset to save FP regs from initial SP */
84 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
85 int lr_save_offset; /* offset to save LR from initial SP */
86 int cr_save_offset; /* offset to save CR from initial SP */
87 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
88 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
89 int toc_save_offset; /* offset to save the TOC pointer */
90 int varargs_save_offset; /* offset to save the varargs registers */
91 int ehrd_offset; /* offset to EH return data */
92 int reg_size; /* register size (4 or 8) */
93 int varargs_size; /* size to hold V.4 args passed in regs */
94 HOST_WIDE_INT vars_size; /* variable save area size */
95 int parm_size; /* outgoing parameter size */
96 int save_size; /* save area size */
97 int fixed_size; /* fixed size of stack frame */
98 int gp_size; /* size of saved GP registers */
99 int fp_size; /* size of saved FP registers */
100 int altivec_size; /* size of saved AltiVec registers */
101 int cr_size; /* size to hold CR if not in save_size */
102 int lr_size; /* size to hold LR if not in save_size */
103 int vrsave_size; /* size to hold VRSAVE if not in save_size */
104 int altivec_padding_size; /* size of altivec alignment padding if
106 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
107 int spe_padding_size;
108 int toc_size; /* size to hold TOC if not in save_size */
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used;
113 /* Target cpu type */
115 enum processor_type rs6000_cpu;
116 struct rs6000_cpu_select rs6000_select[3] =
118 /* switch name, tune arch */
119 { (const char *)0, "--with-cpu=", 1, 1 },
120 { (const char *)0, "-mcpu=", 1, 1 },
121 { (const char *)0, "-mtune=", 1, 0 },
124 /* Always emit branch hint bits. */
125 static GTY(()) bool rs6000_always_hint;
127 /* Schedule instructions for group formation. */
128 static GTY(()) bool rs6000_sched_groups;
130 /* Support adjust_priority scheduler hook
131 and -mprioritize-restricted-insns= option. */
132 const char *rs6000_sched_restricted_insns_priority_str;
133 int rs6000_sched_restricted_insns_priority;
135 /* Support for -msched-costly-dep option. */
136 const char *rs6000_sched_costly_dep_str;
137 enum rs6000_dependence_cost rs6000_sched_costly_dep;
139 /* Support for -minsert-sched-nops option. */
140 const char *rs6000_sched_insert_nops_str;
141 enum rs6000_nop_insertion rs6000_sched_insert_nops;
143 /* Size of long double */
144 const char *rs6000_long_double_size_string;
145 int rs6000_long_double_type_size;
147 /* Whether -mabi=altivec has appeared */
148 int rs6000_altivec_abi;
150 /* Whether VRSAVE instructions should be generated. */
151 int rs6000_altivec_vrsave;
153 /* String from -mvrsave= option. */
154 const char *rs6000_altivec_vrsave_string;
156 /* Nonzero if we want SPE ABI extensions. */
159 /* Whether isel instructions should be generated. */
162 /* Whether SPE simd instructions should be generated. */
165 /* Nonzero if floating point operations are done in the GPRs. */
166 int rs6000_float_gprs = 0;
168 /* String from -mfloat-gprs=. */
169 const char *rs6000_float_gprs_string;
171 /* String from -misel=. */
172 const char *rs6000_isel_string;
174 /* String from -mspe=. */
175 const char *rs6000_spe_string;
177 /* Set to nonzero once AIX common-mode calls have been defined. */
178 static GTY(()) int common_mode_defined;
180 /* Save information from a "cmpxx" operation until the branch or scc is
182 rtx rs6000_compare_op0, rs6000_compare_op1;
183 int rs6000_compare_fp_p;
185 /* Label number of label created for -mrelocatable, to call to so we can
186 get the address of the GOT section */
187 int rs6000_pic_labelno;
190 /* Which abi to adhere to */
191 const char *rs6000_abi_name;
193 /* Semantics of the small data area */
194 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
196 /* Which small data model to use */
197 const char *rs6000_sdata_name = (char *)0;
199 /* Counter for labels which are to be placed in .fixup. */
200 int fixuplabelno = 0;
203 /* Bit size of immediate TLS offsets and string from which it is decoded. */
204 int rs6000_tls_size = 32;
205 const char *rs6000_tls_size_string;
207 /* ABI enumeration available for subtarget to use. */
208 enum rs6000_abi rs6000_current_abi;
210 /* ABI string from -mabi= option. */
211 const char *rs6000_abi_string;
214 const char *rs6000_debug_name;
215 int rs6000_debug_stack; /* debug stack applications */
216 int rs6000_debug_arg; /* debug argument handling */
219 static GTY(()) tree opaque_V2SI_type_node;
220 static GTY(()) tree opaque_V2SF_type_node;
221 static GTY(()) tree opaque_p_V2SI_type_node;
222 static GTY(()) tree V16QI_type_node;
223 static GTY(()) tree V2SI_type_node;
224 static GTY(()) tree V2SF_type_node;
225 static GTY(()) tree V4HI_type_node;
226 static GTY(()) tree V4SI_type_node;
227 static GTY(()) tree V4SF_type_node;
228 static GTY(()) tree V8HI_type_node;
229 static GTY(()) tree unsigned_V16QI_type_node;
230 static GTY(()) tree unsigned_V8HI_type_node;
231 static GTY(()) tree unsigned_V4SI_type_node;
232 static GTY(()) tree bool_char_type_node; /* __bool char */
233 static GTY(()) tree bool_short_type_node; /* __bool short */
234 static GTY(()) tree bool_int_type_node; /* __bool int */
235 static GTY(()) tree pixel_type_node; /* __pixel */
236 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
237 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
238 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
239 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
241 int rs6000_warn_altivec_long = 1; /* On by default. */
242 const char *rs6000_warn_altivec_long_switch;
244 const char *rs6000_traceback_name;
246 traceback_default = 0,
252 /* Flag to say the TOC is initialized */
254 char toc_label_name[10];
256 /* Alias set for saves and restores from the rs6000 stack. */
257 static GTY(()) int rs6000_sr_alias_set;
259 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
260 The only place that looks at this is rs6000_set_default_type_attributes;
261 everywhere else should rely on the presence or absence of a longcall
262 attribute on the function declaration. */
263 int rs6000_default_long_calls;
264 const char *rs6000_longcall_switch;
266 /* Control alignment for fields within structures. */
267 /* String from -malign-XXXXX. */
268 const char *rs6000_alignment_string;
269 int rs6000_alignment_flags;
271 struct builtin_description
273 /* mask is not const because we're going to alter it below. This
274 nonsense will go away when we rewrite the -march infrastructure
275 to give us more target flag bits. */
277 const enum insn_code icode;
278 const char *const name;
279 const enum rs6000_builtins code;
282 static bool rs6000_function_ok_for_sibcall (tree, tree);
283 static int num_insns_constant_wide (HOST_WIDE_INT);
284 static void validate_condition_mode (enum rtx_code, enum machine_mode);
285 static rtx rs6000_generate_compare (enum rtx_code);
286 static void rs6000_maybe_dead (rtx);
287 static void rs6000_emit_stack_tie (void);
288 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
289 static rtx spe_synthesize_frame_save (rtx);
290 static bool spe_func_has_64bit_regs_p (void);
291 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
293 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
294 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
295 static unsigned rs6000_hash_constant (rtx);
296 static unsigned toc_hash_function (const void *);
297 static int toc_hash_eq (const void *, const void *);
298 static int constant_pool_expr_1 (rtx, int *, int *);
299 static bool constant_pool_expr_p (rtx);
300 static bool toc_relative_expr_p (rtx);
301 static bool legitimate_small_data_p (enum machine_mode, rtx);
302 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
303 static bool legitimate_indexed_address_p (rtx, int);
304 static bool legitimate_indirect_address_p (rtx, int);
305 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
306 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
307 static struct machine_function * rs6000_init_machine_status (void);
308 static bool rs6000_assemble_integer (rtx, unsigned int, int);
309 #ifdef HAVE_GAS_HIDDEN
310 static void rs6000_assemble_visibility (tree, int);
312 static int rs6000_ra_ever_killed (void);
313 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
314 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
315 static const char *rs6000_mangle_fundamental_type (tree);
316 extern const struct attribute_spec rs6000_attribute_table[];
317 static void rs6000_set_default_type_attributes (tree);
318 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
319 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
320 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
322 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
323 static bool rs6000_return_in_memory (tree, tree);
324 static void rs6000_file_start (void);
326 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
327 static void rs6000_elf_asm_out_constructor (rtx, int);
328 static void rs6000_elf_asm_out_destructor (rtx, int);
329 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
330 static void rs6000_elf_unique_section (tree, int);
331 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
332 unsigned HOST_WIDE_INT);
333 static void rs6000_elf_encode_section_info (tree, rtx, int)
335 static bool rs6000_elf_in_small_data_p (tree);
338 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
339 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
340 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
341 static void rs6000_xcoff_unique_section (tree, int);
342 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
343 unsigned HOST_WIDE_INT);
344 static const char * rs6000_xcoff_strip_name_encoding (const char *);
345 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
346 static void rs6000_xcoff_file_start (void);
347 static void rs6000_xcoff_file_end (void);
350 static bool rs6000_binds_local_p (tree);
352 static int rs6000_use_dfa_pipeline_interface (void);
353 static int rs6000_variable_issue (FILE *, int, rtx, int);
354 static bool rs6000_rtx_costs (rtx, int, int, int *);
355 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
356 static bool is_microcoded_insn (rtx);
357 static int is_dispatch_slot_restricted (rtx);
358 static bool is_cracked_insn (rtx);
359 static bool is_branch_slot_insn (rtx);
360 static int rs6000_adjust_priority (rtx, int);
361 static int rs6000_issue_rate (void);
362 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
363 static rtx get_next_active_insn (rtx, rtx);
364 static bool insn_terminates_group_p (rtx , enum group_termination);
365 static bool is_costly_group (rtx *, rtx);
366 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
367 static int redefine_groups (FILE *, int, rtx, rtx);
368 static int pad_groups (FILE *, int, rtx, rtx);
369 static void rs6000_sched_finish (FILE *, int);
370 static int rs6000_use_sched_lookahead (void);
372 static void rs6000_init_builtins (void);
373 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
374 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
375 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
376 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
377 static void altivec_init_builtins (void);
378 static void rs6000_common_init_builtins (void);
379 static void rs6000_init_libfuncs (void);
381 static void enable_mask_for_builtins (struct builtin_description *, int,
382 enum rs6000_builtins,
383 enum rs6000_builtins);
384 static tree build_opaque_vector_type (tree, int);
385 static void spe_init_builtins (void);
386 static rtx spe_expand_builtin (tree, rtx, bool *);
387 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
388 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
389 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
390 static rs6000_stack_t *rs6000_stack_info (void);
391 static void debug_stack_info (rs6000_stack_t *);
393 static rtx altivec_expand_builtin (tree, rtx, bool *);
394 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
395 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
396 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
397 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
398 static rtx altivec_expand_predicate_builtin (enum insn_code,
399 const char *, tree, rtx);
400 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
401 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
402 static void rs6000_parse_abi_options (void);
403 static void rs6000_parse_alignment_option (void);
404 static void rs6000_parse_tls_size_option (void);
405 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
406 static int first_altivec_reg_to_save (void);
407 static unsigned int compute_vrsave_mask (void);
408 static void is_altivec_return_reg (rtx, void *);
409 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
410 int easy_vector_constant (rtx, enum machine_mode);
411 static int easy_vector_same (rtx, enum machine_mode);
412 static int easy_vector_splat_const (int, enum machine_mode);
413 static bool is_ev64_opaque_type (tree);
414 static rtx rs6000_dwarf_register_span (rtx);
415 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
416 static rtx rs6000_tls_get_addr (void);
417 static rtx rs6000_got_sym (void);
418 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
419 static const char *rs6000_get_some_local_dynamic_name (void);
420 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
421 static rtx rs6000_complex_function_value (enum machine_mode);
422 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
423 enum machine_mode, tree);
424 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
425 enum machine_mode, tree, int);
426 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
427 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
428 enum machine_mode, tree,
431 static void macho_branch_islands (void);
432 static void add_compiler_branch_island (tree, tree, int);
433 static int no_previous_def (tree function_name);
434 static tree get_prev_label (tree function_name);
437 static tree rs6000_build_builtin_va_list (void);
439 /* Hash table stuff for keeping track of TOC entries. */
441 struct toc_hash_struct GTY(())
443 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
444 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
446 enum machine_mode key_mode;
450 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
452 /* Default register names. */
453 char rs6000_reg_names[][8] =
455 "0", "1", "2", "3", "4", "5", "6", "7",
456 "8", "9", "10", "11", "12", "13", "14", "15",
457 "16", "17", "18", "19", "20", "21", "22", "23",
458 "24", "25", "26", "27", "28", "29", "30", "31",
459 "0", "1", "2", "3", "4", "5", "6", "7",
460 "8", "9", "10", "11", "12", "13", "14", "15",
461 "16", "17", "18", "19", "20", "21", "22", "23",
462 "24", "25", "26", "27", "28", "29", "30", "31",
463 "mq", "lr", "ctr","ap",
464 "0", "1", "2", "3", "4", "5", "6", "7",
466 /* AltiVec registers. */
467 "0", "1", "2", "3", "4", "5", "6", "7",
468 "8", "9", "10", "11", "12", "13", "14", "15",
469 "16", "17", "18", "19", "20", "21", "22", "23",
470 "24", "25", "26", "27", "28", "29", "30", "31",
476 #ifdef TARGET_REGNAMES
477 static const char alt_reg_names[][8] =
479 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
480 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
481 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
482 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
483 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
484 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
485 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
486 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
487 "mq", "lr", "ctr", "ap",
488 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
490 /* AltiVec registers. */
491 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
492 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
493 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
494 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
501 #ifndef MASK_STRICT_ALIGN
502 #define MASK_STRICT_ALIGN 0
504 #ifndef TARGET_PROFILE_KERNEL
505 #define TARGET_PROFILE_KERNEL 0
508 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
509 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
511 /* Return 1 for a symbol ref for a thread-local storage symbol. */
512 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
513 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
515 /* Initialize the GCC target structure. */
516 #undef TARGET_ATTRIBUTE_TABLE
517 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
518 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
519 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
521 #undef TARGET_ASM_ALIGNED_DI_OP
522 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
524 /* Default unaligned ops are only provided for ELF. Find the ops needed
525 for non-ELF systems. */
526 #ifndef OBJECT_FORMAT_ELF
528 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
530 #undef TARGET_ASM_UNALIGNED_HI_OP
531 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
532 #undef TARGET_ASM_UNALIGNED_SI_OP
533 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
534 #undef TARGET_ASM_UNALIGNED_DI_OP
535 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
538 #undef TARGET_ASM_UNALIGNED_HI_OP
539 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
540 #undef TARGET_ASM_UNALIGNED_SI_OP
541 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
545 /* This hook deals with fixups for relocatable code and DI-mode objects
547 #undef TARGET_ASM_INTEGER
548 #define TARGET_ASM_INTEGER rs6000_assemble_integer
550 #ifdef HAVE_GAS_HIDDEN
551 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
552 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
555 #undef TARGET_HAVE_TLS
556 #define TARGET_HAVE_TLS HAVE_AS_TLS
558 #undef TARGET_CANNOT_FORCE_CONST_MEM
559 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
561 #undef TARGET_ASM_FUNCTION_PROLOGUE
562 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
563 #undef TARGET_ASM_FUNCTION_EPILOGUE
564 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
566 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
567 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
568 #undef TARGET_SCHED_VARIABLE_ISSUE
569 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
571 #undef TARGET_SCHED_ISSUE_RATE
572 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
573 #undef TARGET_SCHED_ADJUST_COST
574 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
575 #undef TARGET_SCHED_ADJUST_PRIORITY
576 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
577 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
578 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
579 #undef TARGET_SCHED_FINISH
580 #define TARGET_SCHED_FINISH rs6000_sched_finish
582 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
583 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
585 #undef TARGET_INIT_BUILTINS
586 #define TARGET_INIT_BUILTINS rs6000_init_builtins
588 #undef TARGET_EXPAND_BUILTIN
589 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
591 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
592 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
594 #undef TARGET_INIT_LIBFUNCS
595 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
598 #undef TARGET_BINDS_LOCAL_P
599 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
602 #undef TARGET_ASM_OUTPUT_MI_THUNK
603 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
605 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
606 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
608 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
609 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
611 #undef TARGET_RTX_COSTS
612 #define TARGET_RTX_COSTS rs6000_rtx_costs
613 #undef TARGET_ADDRESS_COST
614 #define TARGET_ADDRESS_COST hook_int_rtx_0
616 #undef TARGET_VECTOR_OPAQUE_P
617 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
619 #undef TARGET_DWARF_REGISTER_SPAN
620 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
622 /* On rs6000, function arguments are promoted, as are function return
624 #undef TARGET_PROMOTE_FUNCTION_ARGS
625 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
626 #undef TARGET_PROMOTE_FUNCTION_RETURN
627 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
629 #undef TARGET_RETURN_IN_MEMORY
630 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
632 #undef TARGET_SETUP_INCOMING_VARARGS
633 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
635 /* Always strict argument naming on rs6000. */
636 #undef TARGET_STRICT_ARGUMENT_NAMING
637 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
638 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
639 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
640 #undef TARGET_SPLIT_COMPLEX_ARG
641 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
643 #undef TARGET_BUILD_BUILTIN_VA_LIST
644 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
646 struct gcc_target targetm = TARGET_INITIALIZER;
648 /* Override command line options. Mostly we process the processor
649 type and sometimes adjust other TARGET_ options. */
652 rs6000_override_options (const char *default_cpu)
655 struct rs6000_cpu_select *ptr;
658 /* Simplifications for entries below. */
661 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
662 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
665 /* This table occasionally claims that a processor does not support
666 a particular feature even though it does, but the feature is slower
667 than the alternative. Thus, it shouldn't be relied on as a
668 complete description of the processor's support.
670 Please keep this list in order, and don't forget to update the
671 documentation in invoke.texi when adding a new processor or
675 const char *const name; /* Canonical processor name. */
676 const enum processor_type processor; /* Processor type enum value. */
677 const int target_enable; /* Target flags to enable. */
678 } const processor_target_table[]
679 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
680 {"403", PROCESSOR_PPC403,
681 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
682 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
683 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
684 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
685 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
686 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
687 {"601", PROCESSOR_PPC601,
688 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
689 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
690 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
691 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
692 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
693 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
694 {"620", PROCESSOR_PPC620,
695 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
696 {"630", PROCESSOR_PPC630,
697 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
698 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
699 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
700 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
701 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
702 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
703 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
704 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
705 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
706 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
707 {"970", PROCESSOR_POWER4,
708 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
709 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
710 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
711 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
712 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
713 {"G5", PROCESSOR_POWER4,
714 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
715 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
716 {"power2", PROCESSOR_POWER,
717 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
718 {"power3", PROCESSOR_PPC630,
719 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
720 {"power4", PROCESSOR_POWER4,
721 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
722 {"power5", PROCESSOR_POWER5,
723 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
724 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
725 {"powerpc64", PROCESSOR_POWERPC64,
726 POWERPC_BASE_MASK | MASK_POWERPC64},
727 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
728 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
729 {"rios2", PROCESSOR_RIOS2,
730 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
731 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
732 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
733 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
736 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
738 /* Some OSs don't support saving the high part of 64-bit registers on
739 context switch. Other OSs don't support saving Altivec registers.
740 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
741 settings; if the user wants either, the user must explicitly specify
742 them and we won't interfere with the user's specification. */
745 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
746 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
747 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
750 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
751 #ifdef OS_MISSING_POWERPC64
752 if (OS_MISSING_POWERPC64)
753 set_masks &= ~MASK_POWERPC64;
755 #ifdef OS_MISSING_ALTIVEC
756 if (OS_MISSING_ALTIVEC)
757 set_masks &= ~MASK_ALTIVEC;
760 /* Don't override these by the processor default if given explicitly. */
761 set_masks &= ~(target_flags_explicit
762 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
764 /* Identify the processor type. */
765 rs6000_select[0].string = default_cpu;
766 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
768 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
770 ptr = &rs6000_select[i];
771 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
773 for (j = 0; j < ptt_size; j++)
774 if (! strcmp (ptr->string, processor_target_table[j].name))
777 rs6000_cpu = processor_target_table[j].processor;
781 target_flags &= ~set_masks;
782 target_flags |= (processor_target_table[j].target_enable
789 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
796 /* If we are optimizing big endian systems for space, use the load/store
797 multiple and string instructions. */
798 if (BYTES_BIG_ENDIAN && optimize_size)
799 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
801 /* Don't allow -mmultiple or -mstring on little endian systems
802 unless the cpu is a 750, because the hardware doesn't support the
803 instructions used in little endian mode, and causes an alignment
804 trap. The 750 does not cause an alignment trap (except when the
805 target is unaligned). */
807 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
811 target_flags &= ~MASK_MULTIPLE;
812 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
813 warning ("-mmultiple is not supported on little endian systems");
818 target_flags &= ~MASK_STRING;
819 if ((target_flags_explicit & MASK_STRING) != 0)
820 warning ("-mstring is not supported on little endian systems");
824 /* Set debug flags */
825 if (rs6000_debug_name)
827 if (! strcmp (rs6000_debug_name, "all"))
828 rs6000_debug_stack = rs6000_debug_arg = 1;
829 else if (! strcmp (rs6000_debug_name, "stack"))
830 rs6000_debug_stack = 1;
831 else if (! strcmp (rs6000_debug_name, "arg"))
832 rs6000_debug_arg = 1;
834 error ("unknown -mdebug-%s switch", rs6000_debug_name);
837 if (rs6000_traceback_name)
839 if (! strncmp (rs6000_traceback_name, "full", 4))
840 rs6000_traceback = traceback_full;
841 else if (! strncmp (rs6000_traceback_name, "part", 4))
842 rs6000_traceback = traceback_part;
843 else if (! strncmp (rs6000_traceback_name, "no", 2))
844 rs6000_traceback = traceback_none;
846 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
847 rs6000_traceback_name);
850 /* Set size of long double */
851 rs6000_long_double_type_size = 64;
852 if (rs6000_long_double_size_string)
855 int size = strtol (rs6000_long_double_size_string, &tail, 10);
856 if (*tail != '\0' || (size != 64 && size != 128))
857 error ("Unknown switch -mlong-double-%s",
858 rs6000_long_double_size_string);
860 rs6000_long_double_type_size = size;
863 /* Set Altivec ABI as default for powerpc64 linux. */
864 if (TARGET_ELF && TARGET_64BIT)
866 rs6000_altivec_abi = 1;
867 rs6000_altivec_vrsave = 1;
870 /* Handle -mabi= options. */
871 rs6000_parse_abi_options ();
873 /* Handle -malign-XXXXX option. */
874 rs6000_parse_alignment_option ();
876 /* Handle generic -mFOO=YES/NO options. */
877 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
878 &rs6000_altivec_vrsave);
879 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
881 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
882 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
885 /* Handle -mtls-size option. */
886 rs6000_parse_tls_size_option ();
888 #ifdef SUBTARGET_OVERRIDE_OPTIONS
889 SUBTARGET_OVERRIDE_OPTIONS;
891 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
892 SUBSUBTARGET_OVERRIDE_OPTIONS;
898 error ("AltiVec and E500 instructions cannot coexist");
900 /* The e500 does not have string instructions, and we set
901 MASK_STRING above when optimizing for size. */
902 if ((target_flags & MASK_STRING) != 0)
903 target_flags = target_flags & ~MASK_STRING;
905 /* No SPE means 64-bit long doubles, even if an E500. */
906 if (rs6000_spe_string != 0
907 && !strcmp (rs6000_spe_string, "no"))
908 rs6000_long_double_type_size = 64;
910 else if (rs6000_select[1].string != NULL)
912 /* For the powerpc-eabispe configuration, we set all these by
913 default, so let's unset them if we manually set another
914 CPU that is not the E500. */
915 if (rs6000_abi_string == 0)
917 if (rs6000_spe_string == 0)
919 if (rs6000_float_gprs_string == 0)
920 rs6000_float_gprs = 0;
921 if (rs6000_isel_string == 0)
923 if (rs6000_long_double_size_string == 0)
924 rs6000_long_double_type_size = 64;
927 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
928 && rs6000_cpu != PROCESSOR_POWER5);
929 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
930 || rs6000_cpu == PROCESSOR_POWER5);
932 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
933 using TARGET_OPTIONS to handle a toggle switch, but we're out of
934 bits in target_flags so TARGET_SWITCHES cannot be used.
935 Assumption here is that rs6000_longcall_switch points into the
936 text of the complete option, rather than being a copy, so we can
937 scan back for the presence or absence of the no- modifier. */
938 if (rs6000_longcall_switch)
940 const char *base = rs6000_longcall_switch;
941 while (base[-1] != 'm') base--;
943 if (*rs6000_longcall_switch != '\0')
944 error ("invalid option `%s'", base);
945 rs6000_default_long_calls = (base[0] != 'n');
948 /* Handle -m(no-)warn-altivec-long similarly. */
949 if (rs6000_warn_altivec_long_switch)
951 const char *base = rs6000_warn_altivec_long_switch;
952 while (base[-1] != 'm') base--;
954 if (*rs6000_warn_altivec_long_switch != '\0')
955 error ("invalid option `%s'", base);
956 rs6000_warn_altivec_long = (base[0] != 'n');
959 /* Handle -mprioritize-restricted-insns option. */
960 rs6000_sched_restricted_insns_priority
961 = (rs6000_sched_groups ? 1 : 0);
962 if (rs6000_sched_restricted_insns_priority_str)
963 rs6000_sched_restricted_insns_priority =
964 atoi (rs6000_sched_restricted_insns_priority_str);
966 /* Handle -msched-costly-dep option. */
967 rs6000_sched_costly_dep
968 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
969 if (rs6000_sched_costly_dep_str)
971 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
972 rs6000_sched_costly_dep = no_dep_costly;
973 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
974 rs6000_sched_costly_dep = all_deps_costly;
975 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
976 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
977 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
978 rs6000_sched_costly_dep = store_to_load_dep_costly;
980 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
983 /* Handle -minsert-sched-nops option. */
984 rs6000_sched_insert_nops
985 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
986 if (rs6000_sched_insert_nops_str)
988 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
989 rs6000_sched_insert_nops = sched_finish_none;
990 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
991 rs6000_sched_insert_nops = sched_finish_pad_groups;
992 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
993 rs6000_sched_insert_nops = sched_finish_regroup_exact;
995 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
998 #ifdef TARGET_REGNAMES
999 /* If the user desires alternate register names, copy in the
1000 alternate names now. */
1001 if (TARGET_REGNAMES)
1002 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1005 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1006 If -maix-struct-return or -msvr4-struct-return was explicitly
1007 used, don't override with the ABI default. */
1008 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1010 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1011 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1013 target_flags |= MASK_AIX_STRUCT_RET;
1016 if (TARGET_LONG_DOUBLE_128
1017 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1018 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1020 /* Allocate an alias set for register saves & restores from stack. */
1021 rs6000_sr_alias_set = new_alias_set ();
1024 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1026 /* We can only guarantee the availability of DI pseudo-ops when
1027 assembling for 64-bit targets. */
1030 targetm.asm_out.aligned_op.di = NULL;
1031 targetm.asm_out.unaligned_op.di = NULL;
1034 /* Set maximum branch target alignment at two instructions, eight bytes. */
1035 align_jumps_max_skip = 8;
1036 align_loops_max_skip = 8;
1038 /* Arrange to save and restore machine status around nested functions. */
1039 init_machine_status = rs6000_init_machine_status;
1041 /* We should always be splitting complex arguments, but we can't break
1042 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1043 if (DEFAULT_ABI != ABI_AIX)
1044 targetm.calls.split_complex_arg = NULL;
1047 /* Handle generic options of the form -mfoo=yes/no.
1048 NAME is the option name.
1049 VALUE is the option value.
1050 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1051 whether the option value is 'yes' or 'no' respectively. */
1053 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1057 else if (!strcmp (value, "yes"))
1059 else if (!strcmp (value, "no"))
1062 error ("unknown -m%s= option specified: '%s'", name, value);
1065 /* Handle -mabi= options. */
1067 rs6000_parse_abi_options (void)
1069 if (rs6000_abi_string == 0)
1071 else if (! strcmp (rs6000_abi_string, "altivec"))
1073 rs6000_altivec_abi = 1;
1076 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1077 rs6000_altivec_abi = 0;
1078 else if (! strcmp (rs6000_abi_string, "spe"))
1081 rs6000_altivec_abi = 0;
1082 if (!TARGET_SPE_ABI)
1083 error ("not configured for ABI: '%s'", rs6000_abi_string);
1086 else if (! strcmp (rs6000_abi_string, "no-spe"))
1089 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1092 /* Handle -malign-XXXXXX options. */
1094 rs6000_parse_alignment_option (void)
1096 if (rs6000_alignment_string == 0)
1098 else if (! strcmp (rs6000_alignment_string, "power"))
1099 rs6000_alignment_flags = MASK_ALIGN_POWER;
1100 else if (! strcmp (rs6000_alignment_string, "natural"))
1101 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1103 error ("unknown -malign-XXXXX option specified: '%s'",
1104 rs6000_alignment_string);
1107 /* Validate and record the size specified with the -mtls-size option. */
1110 rs6000_parse_tls_size_option (void)
1112 if (rs6000_tls_size_string == 0)
1114 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1115 rs6000_tls_size = 16;
1116 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1117 rs6000_tls_size = 32;
1118 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1119 rs6000_tls_size = 64;
1121 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1125 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1129 /* Do anything needed at the start of the asm file. */
1132 rs6000_file_start (void)
1136 const char *start = buffer;
1137 struct rs6000_cpu_select *ptr;
1138 const char *default_cpu = TARGET_CPU_DEFAULT;
1139 FILE *file = asm_out_file;
1141 default_file_start ();
1143 #ifdef TARGET_BI_ARCH
1144 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1148 if (flag_verbose_asm)
1150 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1151 rs6000_select[0].string = default_cpu;
1153 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1155 ptr = &rs6000_select[i];
1156 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1158 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1163 #ifdef USING_ELFOS_H
1164 switch (rs6000_sdata)
1166 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1167 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1168 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1169 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1172 if (rs6000_sdata && g_switch_value)
1174 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1185 /* Return nonzero if this function is known to have a null epilogue. */
1188 direct_return (void)
1190 if (reload_completed)
1192 rs6000_stack_t *info = rs6000_stack_info ();
1194 if (info->first_gp_reg_save == 32
1195 && info->first_fp_reg_save == 64
1196 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1197 && ! info->lr_save_p
1198 && ! info->cr_save_p
1199 && info->vrsave_mask == 0
1207 /* Returns 1 always. */
1210 any_operand (rtx op ATTRIBUTE_UNUSED,
1211 enum machine_mode mode ATTRIBUTE_UNUSED)
1216 /* Returns 1 if op is the count register. */
1218 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1220 if (GET_CODE (op) != REG)
1223 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1226 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1232 /* Returns 1 if op is an altivec register. */
1234 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1237 return (register_operand (op, mode)
1238 && (GET_CODE (op) != REG
1239 || REGNO (op) > FIRST_PSEUDO_REGISTER
1240 || ALTIVEC_REGNO_P (REGNO (op))));
1244 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1246 if (GET_CODE (op) != REG)
1249 if (XER_REGNO_P (REGNO (op)))
1255 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1256 by such constants completes more quickly. */
1259 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1261 return ( GET_CODE (op) == CONST_INT
1262 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1265 /* Return 1 if OP is a constant that can fit in a D field. */
1268 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1270 return (GET_CODE (op) == CONST_INT
1271 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1274 /* Similar for an unsigned D field. */
1277 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1279 return (GET_CODE (op) == CONST_INT
1280 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1283 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1286 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1288 return (GET_CODE (op) == CONST_INT
1289 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1292 /* Returns 1 if OP is a CONST_INT that is a positive value
1293 and an exact power of 2. */
1296 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1298 return (GET_CODE (op) == CONST_INT
1300 && exact_log2 (INTVAL (op)) >= 0);
1303 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1307 gpc_reg_operand (rtx op, enum machine_mode mode)
1309 return (register_operand (op, mode)
1310 && (GET_CODE (op) != REG
1311 || (REGNO (op) >= ARG_POINTER_REGNUM
1312 && !XER_REGNO_P (REGNO (op)))
1313 || REGNO (op) < MQ_REGNO));
1316 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1320 cc_reg_operand (rtx op, enum machine_mode mode)
1322 return (register_operand (op, mode)
1323 && (GET_CODE (op) != REG
1324 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1325 || CR_REGNO_P (REGNO (op))));
1328 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1329 CR field that isn't CR0. */
1332 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1334 return (register_operand (op, mode)
1335 && (GET_CODE (op) != REG
1336 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1337 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1340 /* Returns 1 if OP is either a constant integer valid for a D-field or
1341 a non-special register. If a register, it must be in the proper
1342 mode unless MODE is VOIDmode. */
1345 reg_or_short_operand (rtx op, enum machine_mode mode)
1347 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1350 /* Similar, except check if the negation of the constant would be
1351 valid for a D-field. */
1354 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1356 if (GET_CODE (op) == CONST_INT)
1357 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1359 return gpc_reg_operand (op, mode);
1362 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1363 a non-special register. If a register, it must be in the proper
1364 mode unless MODE is VOIDmode. */
1367 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1369 if (gpc_reg_operand (op, mode))
1371 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1378 /* Return 1 if the operand is either a register or an integer whose
1379 high-order 16 bits are zero. */
1382 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1384 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1387 /* Return 1 is the operand is either a non-special register or ANY
1388 constant integer. */
1391 reg_or_cint_operand (rtx op, enum machine_mode mode)
1393 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1396 /* Return 1 is the operand is either a non-special register or ANY
1397 32-bit signed constant integer. */
1400 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1402 return (gpc_reg_operand (op, mode)
1403 || (GET_CODE (op) == CONST_INT
1404 #if HOST_BITS_PER_WIDE_INT != 32
1405 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1406 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1411 /* Return 1 is the operand is either a non-special register or a 32-bit
1412 signed constant integer valid for 64-bit addition. */
1415 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1417 return (gpc_reg_operand (op, mode)
1418 || (GET_CODE (op) == CONST_INT
1419 #if HOST_BITS_PER_WIDE_INT == 32
1420 && INTVAL (op) < 0x7fff8000
1422 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1428 /* Return 1 is the operand is either a non-special register or a 32-bit
1429 signed constant integer valid for 64-bit subtraction. */
1432 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1434 return (gpc_reg_operand (op, mode)
1435 || (GET_CODE (op) == CONST_INT
1436 #if HOST_BITS_PER_WIDE_INT == 32
1437 && (- INTVAL (op)) < 0x7fff8000
1439 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1445 /* Return 1 is the operand is either a non-special register or ANY
1446 32-bit unsigned constant integer. */
1449 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1451 if (GET_CODE (op) == CONST_INT)
1453 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1455 if (GET_MODE_BITSIZE (mode) <= 32)
1458 if (INTVAL (op) < 0)
1462 return ((INTVAL (op) & GET_MODE_MASK (mode)
1463 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1465 else if (GET_CODE (op) == CONST_DOUBLE)
1467 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1471 return CONST_DOUBLE_HIGH (op) == 0;
1474 return gpc_reg_operand (op, mode);
1477 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1480 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1482 return (GET_CODE (op) == SYMBOL_REF
1483 || GET_CODE (op) == CONST
1484 || GET_CODE (op) == LABEL_REF);
1487 /* Return 1 if the operand is a simple references that can be loaded via
1488 the GOT (labels involving addition aren't allowed). */
1491 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1493 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1496 /* Return the number of instructions it takes to form a constant in an
1497 integer register. */
1500 num_insns_constant_wide (HOST_WIDE_INT value)
1502 /* signed constant loadable with {cal|addi} */
1503 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1506 /* constant loadable with {cau|addis} */
1507 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1510 #if HOST_BITS_PER_WIDE_INT == 64
1511 else if (TARGET_POWERPC64)
1513 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1514 HOST_WIDE_INT high = value >> 31;
1516 if (high == 0 || high == -1)
1522 return num_insns_constant_wide (high) + 1;
1524 return (num_insns_constant_wide (high)
1525 + num_insns_constant_wide (low) + 1);
1534 num_insns_constant (rtx op, enum machine_mode mode)
1536 if (GET_CODE (op) == CONST_INT)
1538 #if HOST_BITS_PER_WIDE_INT == 64
1539 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1540 && mask64_operand (op, mode))
1544 return num_insns_constant_wide (INTVAL (op));
1547 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1552 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1553 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1554 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1557 else if (GET_CODE (op) == CONST_DOUBLE)
1563 int endian = (WORDS_BIG_ENDIAN == 0);
1565 if (mode == VOIDmode || mode == DImode)
1567 high = CONST_DOUBLE_HIGH (op);
1568 low = CONST_DOUBLE_LOW (op);
1572 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1573 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1575 low = l[1 - endian];
1579 return (num_insns_constant_wide (low)
1580 + num_insns_constant_wide (high));
1584 if (high == 0 && low >= 0)
1585 return num_insns_constant_wide (low);
1587 else if (high == -1 && low < 0)
1588 return num_insns_constant_wide (low);
1590 else if (mask64_operand (op, mode))
1594 return num_insns_constant_wide (high) + 1;
1597 return (num_insns_constant_wide (high)
1598 + num_insns_constant_wide (low) + 1);
1606 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1607 register with one instruction per word. We only do this if we can
1608 safely read CONST_DOUBLE_{LOW,HIGH}. */
1611 easy_fp_constant (rtx op, enum machine_mode mode)
1613 if (GET_CODE (op) != CONST_DOUBLE
1614 || GET_MODE (op) != mode
1615 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1618 /* Consider all constants with -msoft-float to be easy. */
1619 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1623 /* If we are using V.4 style PIC, consider all constants to be hard. */
1624 if (flag_pic && DEFAULT_ABI == ABI_V4)
1627 #ifdef TARGET_RELOCATABLE
1628 /* Similarly if we are using -mrelocatable, consider all constants
1630 if (TARGET_RELOCATABLE)
1639 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1640 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1642 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1643 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1644 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1645 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1648 else if (mode == DFmode)
1653 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1654 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1656 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1657 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1660 else if (mode == SFmode)
1665 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1666 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1668 return num_insns_constant_wide (l) == 1;
1671 else if (mode == DImode)
1672 return ((TARGET_POWERPC64
1673 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1674 || (num_insns_constant (op, DImode) <= 2));
1676 else if (mode == SImode)
1682 /* Returns the constant for the splat instruction, if exists. */
1685 easy_vector_splat_const (int cst, enum machine_mode mode)
1690 if (EASY_VECTOR_15 (cst)
1691 || EASY_VECTOR_15_ADD_SELF (cst))
1693 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1697 if (EASY_VECTOR_15 (cst)
1698 || EASY_VECTOR_15_ADD_SELF (cst))
1700 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1704 if (EASY_VECTOR_15 (cst)
1705 || EASY_VECTOR_15_ADD_SELF (cst))
1714 /* Return nonzero if all elements of a vector have the same value. */
1717 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1721 units = CONST_VECTOR_NUNITS (op);
1723 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1724 for (i = 1; i < units; ++i)
1725 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1727 if (i == units && easy_vector_splat_const (cst, mode))
1732 /* Return 1 if the operand is a CONST_INT and can be put into a
1733 register without using memory. */
1736 easy_vector_constant (rtx op, enum machine_mode mode)
1740 if (GET_CODE (op) != CONST_VECTOR
1745 if (zero_constant (op, mode)
1746 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1747 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1750 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1753 if (TARGET_SPE && mode == V1DImode)
1756 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1757 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1759 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1761 evmergelo r0, r0, r0
1764 I don't know how efficient it would be to allow bigger constants,
1765 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1766 instructions is better than a 64-bit memory load, but I don't
1767 have the e500 timing specs. */
1768 if (TARGET_SPE && mode == V2SImode
1769 && cst >= -0x7fff && cst <= 0x7fff
1770 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1774 && easy_vector_same (op, mode))
1776 cst = easy_vector_splat_const (cst, mode);
1777 if (EASY_VECTOR_15_ADD_SELF (cst)
1778 || EASY_VECTOR_15 (cst))
1784 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1787 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1791 && GET_CODE (op) == CONST_VECTOR
1792 && easy_vector_same (op, mode))
1794 cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
1795 if (EASY_VECTOR_15_ADD_SELF (cst))
1801 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
1804 gen_easy_vector_constant_add_self (rtx op)
1808 units = GET_MODE_NUNITS (GET_MODE (op));
1809 v = rtvec_alloc (units);
1811 for (i = 0; i < units; i++)
1813 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
1814 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
1818 output_vec_const_move (rtx *operands)
1821 enum machine_mode mode;
1827 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1828 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1829 mode = GET_MODE (dest);
1833 if (zero_constant (vec, mode))
1834 return "vxor %0,%0,%0";
1835 else if (easy_vector_constant (vec, mode))
1837 operands[1] = GEN_INT (cst);
1841 if (EASY_VECTOR_15 (cst))
1843 operands[1] = GEN_INT (cst);
1844 return "vspltisw %0,%1";
1846 else if (EASY_VECTOR_15_ADD_SELF (cst))
1850 if (EASY_VECTOR_15 (cst))
1852 operands[1] = GEN_INT (cst);
1853 return "vspltish %0,%1";
1855 else if (EASY_VECTOR_15_ADD_SELF (cst))
1859 if (EASY_VECTOR_15 (cst))
1861 operands[1] = GEN_INT (cst);
1862 return "vspltisb %0,%1";
1864 else if (EASY_VECTOR_15_ADD_SELF (cst))
1876 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1877 pattern of V1DI, V4HI, and V2SF.
1879 FIXME: We should probably return # and add post reload
1880 splitters for these, but this way is so easy ;-).
1882 operands[1] = GEN_INT (cst);
1883 operands[2] = GEN_INT (cst2);
1885 return "li %0,%1\n\tevmergelo %0,%0,%0";
1887 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1893 /* Return 1 if the operand is the constant 0. This works for scalars
1894 as well as vectors. */
1896 zero_constant (rtx op, enum machine_mode mode)
1898 return op == CONST0_RTX (mode);
1901 /* Return 1 if the operand is 0.0. */
1903 zero_fp_constant (rtx op, enum machine_mode mode)
1905 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1908 /* Return 1 if the operand is in volatile memory. Note that during
1909 the RTL generation phase, memory_operand does not return TRUE for
1910 volatile memory references. So this function allows us to
1911 recognize volatile references where its safe. */
1914 volatile_mem_operand (rtx op, enum machine_mode mode)
1916 if (GET_CODE (op) != MEM)
1919 if (!MEM_VOLATILE_P (op))
1922 if (mode != GET_MODE (op))
1925 if (reload_completed)
1926 return memory_operand (op, mode);
1928 if (reload_in_progress)
1929 return strict_memory_address_p (mode, XEXP (op, 0));
1931 return memory_address_p (mode, XEXP (op, 0));
1934 /* Return 1 if the operand is an offsettable memory operand. */
1937 offsettable_mem_operand (rtx op, enum machine_mode mode)
1939 return ((GET_CODE (op) == MEM)
1940 && offsettable_address_p (reload_completed || reload_in_progress,
1941 mode, XEXP (op, 0)));
1944 /* Return 1 if the operand is either an easy FP constant (see above) or
1948 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1950 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1953 /* Return 1 if the operand is either a non-special register or an item
1954 that can be used as the operand of a `mode' add insn. */
1957 add_operand (rtx op, enum machine_mode mode)
1959 if (GET_CODE (op) == CONST_INT)
1960 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1961 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1963 return gpc_reg_operand (op, mode);
1966 /* Return 1 if OP is a constant but not a valid add_operand. */
1969 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1971 return (GET_CODE (op) == CONST_INT
1972 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1973 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1976 /* Return 1 if the operand is a non-special register or a constant that
1977 can be used as the operand of an OR or XOR insn on the RS/6000. */
1980 logical_operand (rtx op, enum machine_mode mode)
1982 HOST_WIDE_INT opl, oph;
1984 if (gpc_reg_operand (op, mode))
1987 if (GET_CODE (op) == CONST_INT)
1989 opl = INTVAL (op) & GET_MODE_MASK (mode);
1991 #if HOST_BITS_PER_WIDE_INT <= 32
1992 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1996 else if (GET_CODE (op) == CONST_DOUBLE)
1998 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2001 opl = CONST_DOUBLE_LOW (op);
2002 oph = CONST_DOUBLE_HIGH (op);
2009 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2010 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2013 /* Return 1 if C is a constant that is not a logical operand (as
2014 above), but could be split into one. */
2017 non_logical_cint_operand (rtx op, enum machine_mode mode)
2019 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2020 && ! logical_operand (op, mode)
2021 && reg_or_logical_cint_operand (op, mode));
2024 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2025 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2026 Reject all ones and all zeros, since these should have been optimized
2027 away and confuse the making of MB and ME. */
2030 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2032 HOST_WIDE_INT c, lsb;
2034 if (GET_CODE (op) != CONST_INT)
2039 /* Fail in 64-bit mode if the mask wraps around because the upper
2040 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2041 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2044 /* We don't change the number of transitions by inverting,
2045 so make sure we start with the LS bit zero. */
2049 /* Reject all zeros or all ones. */
2053 /* Find the first transition. */
2056 /* Invert to look for a second transition. */
2059 /* Erase first transition. */
2062 /* Find the second transition (if any). */
2065 /* Match if all the bits above are 1's (or c is zero). */
2069 /* Return 1 for the PowerPC64 rlwinm corner case. */
2072 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2074 HOST_WIDE_INT c, lsb;
2076 if (GET_CODE (op) != CONST_INT)
2081 if ((c & 0x80000001) != 0x80000001)
2095 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2096 It is if there are no more than one 1->0 or 0->1 transitions.
2097 Reject all zeros, since zero should have been optimized away and
2098 confuses the making of MB and ME. */
2101 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2103 if (GET_CODE (op) == CONST_INT)
2105 HOST_WIDE_INT c, lsb;
2109 /* Reject all zeros. */
2113 /* We don't change the number of transitions by inverting,
2114 so make sure we start with the LS bit zero. */
2118 /* Find the transition, and check that all bits above are 1's. */
2121 /* Match if all the bits above are 1's (or c is zero). */
2127 /* Like mask64_operand, but allow up to three transitions. This
2128 predicate is used by insn patterns that generate two rldicl or
2129 rldicr machine insns. */
2132 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2134 if (GET_CODE (op) == CONST_INT)
2136 HOST_WIDE_INT c, lsb;
2140 /* Disallow all zeros. */
2144 /* We don't change the number of transitions by inverting,
2145 so make sure we start with the LS bit zero. */
2149 /* Find the first transition. */
2152 /* Invert to look for a second transition. */
2155 /* Erase first transition. */
2158 /* Find the second transition. */
2161 /* Invert to look for a third transition. */
2164 /* Erase second transition. */
2167 /* Find the third transition (if any). */
2170 /* Match if all the bits above are 1's (or c is zero). */
2176 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2177 implement ANDing by the mask IN. */
2179 build_mask64_2_operands (rtx in, rtx *out)
2181 #if HOST_BITS_PER_WIDE_INT >= 64
2182 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2185 if (GET_CODE (in) != CONST_INT)
2191 /* Assume c initially something like 0x00fff000000fffff. The idea
2192 is to rotate the word so that the middle ^^^^^^ group of zeros
2193 is at the MS end and can be cleared with an rldicl mask. We then
2194 rotate back and clear off the MS ^^ group of zeros with a
2196 c = ~c; /* c == 0xff000ffffff00000 */
2197 lsb = c & -c; /* lsb == 0x0000000000100000 */
2198 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2199 c = ~c; /* c == 0x00fff000000fffff */
2200 c &= -lsb; /* c == 0x00fff00000000000 */
2201 lsb = c & -c; /* lsb == 0x0000100000000000 */
2202 c = ~c; /* c == 0xff000fffffffffff */
2203 c &= -lsb; /* c == 0xff00000000000000 */
2205 while ((lsb >>= 1) != 0)
2206 shift++; /* shift == 44 on exit from loop */
2207 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2208 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2209 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2213 /* Assume c initially something like 0xff000f0000000000. The idea
2214 is to rotate the word so that the ^^^ middle group of zeros
2215 is at the LS end and can be cleared with an rldicr mask. We then
2216 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2218 lsb = c & -c; /* lsb == 0x0000010000000000 */
2219 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2220 c = ~c; /* c == 0x00fff0ffffffffff */
2221 c &= -lsb; /* c == 0x00fff00000000000 */
2222 lsb = c & -c; /* lsb == 0x0000100000000000 */
2223 c = ~c; /* c == 0xff000fffffffffff */
2224 c &= -lsb; /* c == 0xff00000000000000 */
2226 while ((lsb >>= 1) != 0)
2227 shift++; /* shift == 44 on exit from loop */
2228 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2229 m1 >>= shift; /* m1 == 0x0000000000000fff */
2230 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2233 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2234 masks will be all 1's. We are guaranteed more than one transition. */
2235 out[0] = GEN_INT (64 - shift);
2236 out[1] = GEN_INT (m1);
2237 out[2] = GEN_INT (shift);
2238 out[3] = GEN_INT (m2);
2246 /* Return 1 if the operand is either a non-special register or a constant
2247 that can be used as the operand of a PowerPC64 logical AND insn. */
2250 and64_operand (rtx op, enum machine_mode mode)
2252 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2253 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2255 return (logical_operand (op, mode) || mask64_operand (op, mode));
2258 /* Like the above, but also match constants that can be implemented
2259 with two rldicl or rldicr insns. */
2262 and64_2_operand (rtx op, enum machine_mode mode)
2264 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2265 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2267 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2270 /* Return 1 if the operand is either a non-special register or a
2271 constant that can be used as the operand of an RS/6000 logical AND insn. */
2274 and_operand (rtx op, enum machine_mode mode)
2276 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2277 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2279 return (logical_operand (op, mode) || mask_operand (op, mode));
2282 /* Return 1 if the operand is a general register or memory operand. */
2285 reg_or_mem_operand (rtx op, enum machine_mode mode)
2287 return (gpc_reg_operand (op, mode)
2288 || memory_operand (op, mode)
2289 || macho_lo_sum_memory_operand (op, mode)
2290 || volatile_mem_operand (op, mode));
2293 /* Return 1 if the operand is a general register or memory operand without
2294 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2298 lwa_operand (rtx op, enum machine_mode mode)
2302 if (reload_completed && GET_CODE (inner) == SUBREG)
2303 inner = SUBREG_REG (inner);
2305 return gpc_reg_operand (inner, mode)
2306 || (memory_operand (inner, mode)
2307 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2308 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2309 && (GET_CODE (XEXP (inner, 0)) != PLUS
2310 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2311 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2314 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2317 symbol_ref_operand (rtx op, enum machine_mode mode)
2319 if (mode != VOIDmode && GET_MODE (op) != mode)
2322 return (GET_CODE (op) == SYMBOL_REF
2323 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2326 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2327 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2330 call_operand (rtx op, enum machine_mode mode)
2332 if (mode != VOIDmode && GET_MODE (op) != mode)
2335 return (GET_CODE (op) == SYMBOL_REF
2336 || (GET_CODE (op) == REG
2337 && (REGNO (op) == LINK_REGISTER_REGNUM
2338 || REGNO (op) == COUNT_REGISTER_REGNUM
2339 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2342 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2346 current_file_function_operand (rtx op,
2347 enum machine_mode mode ATTRIBUTE_UNUSED)
2349 return (GET_CODE (op) == SYMBOL_REF
2350 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2351 && (SYMBOL_REF_LOCAL_P (op)
2352 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2355 /* Return 1 if this operand is a valid input for a move insn. */
2358 input_operand (rtx op, enum machine_mode mode)
2360 /* Memory is always valid. */
2361 if (memory_operand (op, mode))
2364 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2365 if (GET_CODE (op) == CONSTANT_P_RTX)
2368 /* For floating-point, easy constants are valid. */
2369 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2371 && easy_fp_constant (op, mode))
2374 /* Allow any integer constant. */
2375 if (GET_MODE_CLASS (mode) == MODE_INT
2376 && (GET_CODE (op) == CONST_INT
2377 || GET_CODE (op) == CONST_DOUBLE))
2380 /* Allow easy vector constants. */
2381 if (GET_CODE (op) == CONST_VECTOR
2382 && easy_vector_constant (op, mode))
2385 /* For floating-point or multi-word mode, the only remaining valid type
2387 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2388 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2389 return register_operand (op, mode);
2391 /* The only cases left are integral modes one word or smaller (we
2392 do not get called for MODE_CC values). These can be in any
2394 if (register_operand (op, mode))
2397 /* A SYMBOL_REF referring to the TOC is valid. */
2398 if (legitimate_constant_pool_address_p (op))
2401 /* A constant pool expression (relative to the TOC) is valid */
2402 if (toc_relative_expr_p (op))
2405 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2407 if (DEFAULT_ABI == ABI_V4
2408 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2409 && small_data_operand (op, Pmode))
2416 /* Darwin, AIX increases natural record alignment to doubleword if the first
2417 field is an FP double while the FP fields remain word aligned. */
2420 rs6000_special_round_type_align (tree type, int computed, int specified)
2422 tree field = TYPE_FIELDS (type);
2424 /* Skip all the static variables only if ABI is greater than
2426 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2427 field = TREE_CHAIN (field);
2429 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2430 return MAX (computed, specified);
2432 return MAX (MAX (computed, specified), 64);
2435 /* Return 1 for an operand in small memory on V.4/eabi. */
2438 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2439 enum machine_mode mode ATTRIBUTE_UNUSED)
2444 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2447 if (DEFAULT_ABI != ABI_V4)
2450 if (GET_CODE (op) == SYMBOL_REF)
2453 else if (GET_CODE (op) != CONST
2454 || GET_CODE (XEXP (op, 0)) != PLUS
2455 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2456 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2461 rtx sum = XEXP (op, 0);
2462 HOST_WIDE_INT summand;
2464 /* We have to be careful here, because it is the referenced address
2465 that must be 32k from _SDA_BASE_, not just the symbol. */
2466 summand = INTVAL (XEXP (sum, 1));
2467 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2470 sym_ref = XEXP (sum, 0);
2473 return SYMBOL_REF_SMALL_P (sym_ref);
2479 /* Return true, if operand is a memory operand and has a
2480 displacement divisible by 4. */
2483 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2488 if (!memory_operand (op, mode))
2491 addr = XEXP (op, 0);
2492 if (GET_CODE (addr) == PLUS
2493 && GET_CODE (XEXP (addr, 0)) == REG
2494 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2495 off = INTVAL (XEXP (addr, 1));
2497 return (off % 4) == 0;
2500 /* Return true if either operand is a general purpose register. */
2503 gpr_or_gpr_p (rtx op0, rtx op1)
2505 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2506 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2510 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2513 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2515 switch (GET_CODE(op))
2518 if (RS6000_SYMBOL_REF_TLS_P (op))
2520 else if (CONSTANT_POOL_ADDRESS_P (op))
2522 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2530 else if (! strcmp (XSTR (op, 0), toc_label_name))
2539 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2540 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2542 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2551 constant_pool_expr_p (rtx op)
2555 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2559 toc_relative_expr_p (rtx op)
2563 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2566 /* SPE offset addressing is limited to 5-bits worth of double words. */
2567 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2570 legitimate_constant_pool_address_p (rtx x)
2573 && GET_CODE (x) == PLUS
2574 && GET_CODE (XEXP (x, 0)) == REG
2575 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2576 && constant_pool_expr_p (XEXP (x, 1)));
2580 legitimate_small_data_p (enum machine_mode mode, rtx x)
2582 return (DEFAULT_ABI == ABI_V4
2583 && !flag_pic && !TARGET_TOC
2584 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2585 && small_data_operand (x, mode));
2589 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2591 unsigned HOST_WIDE_INT offset, extra;
2593 if (GET_CODE (x) != PLUS)
2595 if (GET_CODE (XEXP (x, 0)) != REG)
2597 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2599 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2602 offset = INTVAL (XEXP (x, 1));
2610 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2611 which leaves the only valid constant offset of zero, which by
2612 canonicalization rules is also invalid. */
2619 /* SPE vector modes. */
2620 return SPE_CONST_OFFSET_OK (offset);
2624 if (mode == DFmode || !TARGET_POWERPC64)
2626 else if (offset & 3)
2632 if (mode == TFmode || !TARGET_POWERPC64)
2634 else if (offset & 3)
2645 return (offset < 0x10000) && (offset + extra < 0x10000);
2649 legitimate_indexed_address_p (rtx x, int strict)
2653 if (GET_CODE (x) != PLUS)
2658 if (!REG_P (op0) || !REG_P (op1))
2661 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2662 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2663 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2664 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2668 legitimate_indirect_address_p (rtx x, int strict)
2670 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2674 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2676 if (!TARGET_MACHO || !flag_pic
2677 || mode != SImode || GET_CODE(x) != MEM)
2681 if (GET_CODE (x) != LO_SUM)
2683 if (GET_CODE (XEXP (x, 0)) != REG)
2685 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2689 return CONSTANT_P (x);
2693 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2695 if (GET_CODE (x) != LO_SUM)
2697 if (GET_CODE (XEXP (x, 0)) != REG)
2699 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2703 if (TARGET_ELF || TARGET_MACHO)
2705 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2709 if (GET_MODE_NUNITS (mode) != 1)
2711 if (GET_MODE_BITSIZE (mode) > 32
2712 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2715 return CONSTANT_P (x);
2722 /* Try machine-dependent ways of modifying an illegitimate address
2723 to be legitimate. If we find one, return the new, valid address.
2724 This is used from only one place: `memory_address' in explow.c.
2726 OLDX is the address as it was before break_out_memory_refs was
2727 called. In some cases it is useful to look at this to decide what
2730 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2732 It is always safe for this function to do nothing. It exists to
2733 recognize opportunities to optimize the output.
2735 On RS/6000, first check for the sum of a register with a constant
2736 integer that is out of range. If so, generate code to add the
2737 constant with the low-order 16 bits masked to the register and force
2738 this result into another register (this can be done with `cau').
2739 Then generate an address of REG+(CONST&0xffff), allowing for the
2740 possibility of bit 16 being a one.
2742 Then check for the sum of a register and something not constant, try to
2743 load the other things into a register and return the sum. */
2746 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2747 enum machine_mode mode)
2749 if (GET_CODE (x) == SYMBOL_REF)
2751 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2753 return rs6000_legitimize_tls_address (x, model);
2756 if (GET_CODE (x) == PLUS
2757 && GET_CODE (XEXP (x, 0)) == REG
2758 && GET_CODE (XEXP (x, 1)) == CONST_INT
2759 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2761 HOST_WIDE_INT high_int, low_int;
2763 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2764 high_int = INTVAL (XEXP (x, 1)) - low_int;
2765 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2766 GEN_INT (high_int)), 0);
2767 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2769 else if (GET_CODE (x) == PLUS
2770 && GET_CODE (XEXP (x, 0)) == REG
2771 && GET_CODE (XEXP (x, 1)) != CONST_INT
2772 && GET_MODE_NUNITS (mode) == 1
2773 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2775 || (mode != DFmode && mode != TFmode))
2776 && (TARGET_POWERPC64 || mode != DImode)
2779 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2780 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2782 else if (ALTIVEC_VECTOR_MODE (mode))
2786 /* Make sure both operands are registers. */
2787 if (GET_CODE (x) == PLUS)
2788 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2789 force_reg (Pmode, XEXP (x, 1)));
2791 reg = force_reg (Pmode, x);
2794 else if (SPE_VECTOR_MODE (mode))
2796 /* We accept [reg + reg] and [reg + OFFSET]. */
2798 if (GET_CODE (x) == PLUS)
2800 rtx op1 = XEXP (x, 0);
2801 rtx op2 = XEXP (x, 1);
2803 op1 = force_reg (Pmode, op1);
2805 if (GET_CODE (op2) != REG
2806 && (GET_CODE (op2) != CONST_INT
2807 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2808 op2 = force_reg (Pmode, op2);
2810 return gen_rtx_PLUS (Pmode, op1, op2);
2813 return force_reg (Pmode, x);
2819 && GET_CODE (x) != CONST_INT
2820 && GET_CODE (x) != CONST_DOUBLE
2822 && GET_MODE_NUNITS (mode) == 1
2823 && (GET_MODE_BITSIZE (mode) <= 32
2824 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2826 rtx reg = gen_reg_rtx (Pmode);
2827 emit_insn (gen_elf_high (reg, x));
2828 return gen_rtx_LO_SUM (Pmode, reg, x);
2830 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2833 && ! MACHO_DYNAMIC_NO_PIC_P
2835 && GET_CODE (x) != CONST_INT
2836 && GET_CODE (x) != CONST_DOUBLE
2838 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2842 rtx reg = gen_reg_rtx (Pmode);
2843 emit_insn (gen_macho_high (reg, x));
2844 return gen_rtx_LO_SUM (Pmode, reg, x);
2847 && constant_pool_expr_p (x)
2848 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2850 return create_TOC_reference (x);
2856 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2857 We need to emit DTP-relative relocations. */
2860 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2865 fputs ("\t.long\t", file);
2868 fputs (DOUBLE_INT_ASM_OP, file);
2873 output_addr_const (file, x);
2874 fputs ("@dtprel+0x8000", file);
2877 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2879 static GTY(()) rtx rs6000_tls_symbol;
2881 rs6000_tls_get_addr (void)
2883 if (!rs6000_tls_symbol)
2884 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2886 return rs6000_tls_symbol;
2889 /* Construct the SYMBOL_REF for TLS GOT references. */
2891 static GTY(()) rtx rs6000_got_symbol;
2893 rs6000_got_sym (void)
2895 if (!rs6000_got_symbol)
2897 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2898 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2899 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2902 return rs6000_got_symbol;
2905 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2906 this (thread-local) address. */
2909 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2913 dest = gen_reg_rtx (Pmode);
2914 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2920 tlsreg = gen_rtx_REG (Pmode, 13);
2921 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2925 tlsreg = gen_rtx_REG (Pmode, 2);
2926 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2930 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2934 tmp = gen_reg_rtx (Pmode);
2937 tlsreg = gen_rtx_REG (Pmode, 13);
2938 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2942 tlsreg = gen_rtx_REG (Pmode, 2);
2943 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2947 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2949 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2954 rtx r3, got, tga, tmp1, tmp2, eqv;
2957 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2961 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2964 rtx gsym = rs6000_got_sym ();
2965 got = gen_reg_rtx (Pmode);
2967 rs6000_emit_move (got, gsym, Pmode);
2971 static int tls_got_labelno = 0;
2972 rtx tempLR, lab, tmp3, mem;
2975 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2976 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2977 tempLR = gen_reg_rtx (Pmode);
2978 tmp1 = gen_reg_rtx (Pmode);
2979 tmp2 = gen_reg_rtx (Pmode);
2980 tmp3 = gen_reg_rtx (Pmode);
2981 mem = gen_rtx_MEM (Pmode, tmp1);
2982 RTX_UNCHANGING_P (mem) = 1;
2984 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2986 emit_move_insn (tmp1, tempLR);
2987 emit_move_insn (tmp2, mem);
2988 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2989 last = emit_move_insn (got, tmp3);
2990 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2992 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2994 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3000 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3002 r3 = gen_rtx_REG (Pmode, 3);
3004 insn = gen_tls_gd_64 (r3, got, addr);
3006 insn = gen_tls_gd_32 (r3, got, addr);
3009 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3010 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3011 insn = emit_call_insn (insn);
3012 CONST_OR_PURE_CALL_P (insn) = 1;
3013 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3014 insn = get_insns ();
3016 emit_libcall_block (insn, dest, r3, addr);
3018 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3020 r3 = gen_rtx_REG (Pmode, 3);
3022 insn = gen_tls_ld_64 (r3, got);
3024 insn = gen_tls_ld_32 (r3, got);
3027 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3028 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3029 insn = emit_call_insn (insn);
3030 CONST_OR_PURE_CALL_P (insn) = 1;
3031 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3032 insn = get_insns ();
3034 tmp1 = gen_reg_rtx (Pmode);
3035 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3037 emit_libcall_block (insn, tmp1, r3, eqv);
3038 if (rs6000_tls_size == 16)
3041 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3043 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3045 else if (rs6000_tls_size == 32)
3047 tmp2 = gen_reg_rtx (Pmode);
3049 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3051 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3054 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3056 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3060 tmp2 = gen_reg_rtx (Pmode);
3062 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3064 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3066 insn = gen_rtx_SET (Pmode, dest,
3067 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3073 /* IE, or 64 bit offset LE. */
3074 tmp2 = gen_reg_rtx (Pmode);
3076 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3078 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3081 insn = gen_tls_tls_64 (dest, tmp2, addr);
3083 insn = gen_tls_tls_32 (dest, tmp2, addr);
3091 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3092 instruction definitions. */
3095 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3097 return RS6000_SYMBOL_REF_TLS_P (x);
3100 /* Return 1 if X contains a thread-local symbol. */
3103 rs6000_tls_referenced_p (rtx x)
3105 if (! TARGET_HAVE_TLS)
3108 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3111 /* Return 1 if *X is a thread-local symbol. This is the same as
3112 rs6000_tls_symbol_ref except for the type of the unused argument. */
3115 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3117 return RS6000_SYMBOL_REF_TLS_P (*x);
3120 /* The convention appears to be to define this wherever it is used.
3121 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3122 is now used here. */
3123 #ifndef REG_MODE_OK_FOR_BASE_P
3124 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3127 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3128 replace the input X, or the original X if no replacement is called for.
3129 The output parameter *WIN is 1 if the calling macro should goto WIN,
3132 For RS/6000, we wish to handle large displacements off a base
3133 register by splitting the addend across an addiu/addis and the mem insn.
3134 This cuts number of extra insns needed from 3 to 1.
3136 On Darwin, we use this to generate code for floating point constants.
3137 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3138 The Darwin code is inside #if TARGET_MACHO because only then is
3139 machopic_function_base_name() defined. */
3141 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3142 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3144 /* We must recognize output that we have already generated ourselves. */
3145 if (GET_CODE (x) == PLUS
3146 && GET_CODE (XEXP (x, 0)) == PLUS
3147 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3148 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3149 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3151 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3152 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3153 opnum, (enum reload_type)type);
3159 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3160 && GET_CODE (x) == LO_SUM
3161 && GET_CODE (XEXP (x, 0)) == PLUS
3162 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3163 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3164 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3165 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3166 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3167 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3168 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3170 /* Result of previous invocation of this function on Darwin
3171 floating point constant. */
3172 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3173 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3174 opnum, (enum reload_type)type);
3179 if (GET_CODE (x) == PLUS
3180 && GET_CODE (XEXP (x, 0)) == REG
3181 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3182 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3183 && GET_CODE (XEXP (x, 1)) == CONST_INT
3184 && !SPE_VECTOR_MODE (mode)
3185 && !ALTIVEC_VECTOR_MODE (mode))
3187 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3188 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3190 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3192 /* Check for 32-bit overflow. */
3193 if (high + low != val)
3199 /* Reload the high part into a base reg; leave the low part
3200 in the mem directly. */
3202 x = gen_rtx_PLUS (GET_MODE (x),
3203 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3207 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3208 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3209 opnum, (enum reload_type)type);
3214 if (GET_CODE (x) == SYMBOL_REF
3215 && DEFAULT_ABI == ABI_DARWIN
3216 && !ALTIVEC_VECTOR_MODE (mode)
3217 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3218 /* Don't do this for TFmode, since the result isn't offsettable. */
3223 rtx offset = gen_rtx_CONST (Pmode,
3224 gen_rtx_MINUS (Pmode, x,
3225 gen_rtx_SYMBOL_REF (Pmode,
3226 machopic_function_base_name ())));
3227 x = gen_rtx_LO_SUM (GET_MODE (x),
3228 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3229 gen_rtx_HIGH (Pmode, offset)), offset);
3232 x = gen_rtx_LO_SUM (GET_MODE (x),
3233 gen_rtx_HIGH (Pmode, x), x);
3235 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3236 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3237 opnum, (enum reload_type)type);
3243 && constant_pool_expr_p (x)
3244 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3246 (x) = create_TOC_reference (x);
3254 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3255 that is a valid memory address for an instruction.
3256 The MODE argument is the machine mode for the MEM expression
3257 that wants to use this address.
3259 On the RS/6000, there are four valid address: a SYMBOL_REF that
3260 refers to a constant pool entry of an address (or the sum of it
3261 plus a constant), a short (16-bit signed) constant plus a register,
3262 the sum of two registers, or a register indirect, possibly with an
3263 auto-increment. For DFmode and DImode with a constant plus register,
3264 we must ensure that both words are addressable or PowerPC64 with offset
3267 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3268 32-bit DImode, TImode), indexed addressing cannot be used because
3269 adjacent memory cells are accessed by adding word-sized offsets
3270 during assembly output. */
3272 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3274 if (RS6000_SYMBOL_REF_TLS_P (x))
3276 if (legitimate_indirect_address_p (x, reg_ok_strict))
3278 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3279 && !ALTIVEC_VECTOR_MODE (mode)
3280 && !SPE_VECTOR_MODE (mode)
3282 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3284 if (legitimate_small_data_p (mode, x))
3286 if (legitimate_constant_pool_address_p (x))
3288 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3290 && GET_CODE (x) == PLUS
3291 && GET_CODE (XEXP (x, 0)) == REG
3292 && (XEXP (x, 0) == virtual_stack_vars_rtx
3293 || XEXP (x, 0) == arg_pointer_rtx)
3294 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3296 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3299 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3301 || (mode != DFmode && mode != TFmode))
3302 && (TARGET_POWERPC64 || mode != DImode)
3303 && legitimate_indexed_address_p (x, reg_ok_strict))
3305 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3310 /* Go to LABEL if ADDR (a legitimate address expression)
3311 has an effect that depends on the machine mode it is used for.
3313 On the RS/6000 this is true of all integral offsets (since AltiVec
3314 modes don't allow them) or is a pre-increment or decrement.
3316 ??? Except that due to conceptual problems in offsettable_address_p
3317 we can't really report the problems of integral offsets. So leave
3318 this assuming that the adjustable offset must be valid for the
3319 sub-words of a TFmode operand, which is what we had before. */
3322 rs6000_mode_dependent_address (rtx addr)
3324 switch (GET_CODE (addr))
3327 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3329 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3330 return val + 12 + 0x8000 >= 0x10000;
3339 return TARGET_UPDATE;
3348 /* Return number of consecutive hard regs needed starting at reg REGNO
3349 to hold something of mode MODE.
3350 This is ordinarily the length in words of a value of mode MODE
3351 but can be less for certain modes in special long registers.
3353 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3354 scalar instructions. The upper 32 bits are only available to the
3357 POWER and PowerPC GPRs hold 32 bits worth;
3358 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3361 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3363 if (FP_REGNO_P (regno))
3364 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3366 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3367 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3369 if (ALTIVEC_REGNO_P (regno))
3371 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3373 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3376 /* Try to output insns to set TARGET equal to the constant C if it can
3377 be done in less than N insns. Do all computations in MODE.
3378 Returns the place where the output has been placed if it can be
3379 done and the insns have been emitted. If it would take more than N
3380 insns, zero is returned and no insns and emitted. */
3383 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3384 rtx source, int n ATTRIBUTE_UNUSED)
3386 rtx result, insn, set;
3387 HOST_WIDE_INT c0, c1;
3389 if (mode == QImode || mode == HImode)
3392 dest = gen_reg_rtx (mode);
3393 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3396 else if (mode == SImode)
3398 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3400 emit_insn (gen_rtx_SET (VOIDmode, result,
3401 GEN_INT (INTVAL (source)
3402 & (~ (HOST_WIDE_INT) 0xffff))));
3403 emit_insn (gen_rtx_SET (VOIDmode, dest,
3404 gen_rtx_IOR (SImode, result,
3405 GEN_INT (INTVAL (source) & 0xffff))));
3408 else if (mode == DImode)
3410 if (GET_CODE (source) == CONST_INT)
3412 c0 = INTVAL (source);
3415 else if (GET_CODE (source) == CONST_DOUBLE)
3417 #if HOST_BITS_PER_WIDE_INT >= 64
3418 c0 = CONST_DOUBLE_LOW (source);
3421 c0 = CONST_DOUBLE_LOW (source);
3422 c1 = CONST_DOUBLE_HIGH (source);
3428 result = rs6000_emit_set_long_const (dest, c0, c1);
3433 insn = get_last_insn ();
3434 set = single_set (insn);
3435 if (! CONSTANT_P (SET_SRC (set)))
3436 set_unique_reg_note (insn, REG_EQUAL, source);
3441 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3442 fall back to a straight forward decomposition. We do this to avoid
3443 exponential run times encountered when looking for longer sequences
3444 with rs6000_emit_set_const. */
3446 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3448 if (!TARGET_POWERPC64)
3450 rtx operand1, operand2;
3452 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3454 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3456 emit_move_insn (operand1, GEN_INT (c1));
3457 emit_move_insn (operand2, GEN_INT (c2));
3461 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3464 ud2 = (c1 & 0xffff0000) >> 16;
3465 #if HOST_BITS_PER_WIDE_INT >= 64
3469 ud4 = (c2 & 0xffff0000) >> 16;
3471 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3472 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3475 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3477 emit_move_insn (dest, GEN_INT (ud1));
3480 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3481 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3484 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3487 emit_move_insn (dest, GEN_INT (ud2 << 16));
3489 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3491 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3492 || (ud4 == 0 && ! (ud3 & 0x8000)))
3495 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3498 emit_move_insn (dest, GEN_INT (ud3 << 16));
3501 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3502 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3504 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3509 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3512 emit_move_insn (dest, GEN_INT (ud4 << 16));
3515 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3517 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3519 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3520 GEN_INT (ud2 << 16)));
3522 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3528 /* Emit a move from SOURCE to DEST in mode MODE. */
3530 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3534 operands[1] = source;
3536 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3537 if (GET_CODE (operands[1]) == CONST_DOUBLE
3538 && ! FLOAT_MODE_P (mode)
3539 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3541 /* FIXME. This should never happen. */
3542 /* Since it seems that it does, do the safe thing and convert
3544 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3546 if (GET_CODE (operands[1]) == CONST_DOUBLE
3547 && ! FLOAT_MODE_P (mode)
3548 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3549 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3550 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3551 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3554 /* Check if GCC is setting up a block move that will end up using FP
3555 registers as temporaries. We must make sure this is acceptable. */
3556 if (GET_CODE (operands[0]) == MEM
3557 && GET_CODE (operands[1]) == MEM
3559 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3560 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3561 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3562 ? 32 : MEM_ALIGN (operands[0])))
3563 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3565 : MEM_ALIGN (operands[1]))))
3566 && ! MEM_VOLATILE_P (operands [0])
3567 && ! MEM_VOLATILE_P (operands [1]))
3569 emit_move_insn (adjust_address (operands[0], SImode, 0),
3570 adjust_address (operands[1], SImode, 0));
3571 emit_move_insn (adjust_address (operands[0], SImode, 4),
3572 adjust_address (operands[1], SImode, 4));
3576 if (!no_new_pseudos)
3578 if (GET_CODE (operands[1]) == MEM && optimize > 0
3579 && (mode == QImode || mode == HImode || mode == SImode)
3580 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3582 rtx reg = gen_reg_rtx (word_mode);
3584 emit_insn (gen_rtx_SET (word_mode, reg,
3585 gen_rtx_ZERO_EXTEND (word_mode,
3587 operands[1] = gen_lowpart (mode, reg);
3589 if (GET_CODE (operands[0]) != REG)
3590 operands[1] = force_reg (mode, operands[1]);
3593 if (mode == SFmode && ! TARGET_POWERPC
3594 && TARGET_HARD_FLOAT && TARGET_FPRS
3595 && GET_CODE (operands[0]) == MEM)
3599 if (reload_in_progress || reload_completed)
3600 regnum = true_regnum (operands[1]);
3601 else if (GET_CODE (operands[1]) == REG)
3602 regnum = REGNO (operands[1]);
3606 /* If operands[1] is a register, on POWER it may have
3607 double-precision data in it, so truncate it to single
3609 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3612 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3613 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3614 operands[1] = newreg;
3618 /* Recognize the case where operand[1] is a reference to thread-local
3619 data and load its address to a register. */
3620 if (GET_CODE (operands[1]) == SYMBOL_REF)
3622 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3624 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3627 /* Handle the case where reload calls us with an invalid address. */
3628 if (reload_in_progress && mode == Pmode
3629 && (! general_operand (operands[1], mode)
3630 || ! nonimmediate_operand (operands[0], mode)))
3633 /* Handle the case of CONSTANT_P_RTX. */
3634 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3637 /* 128-bit constant floating-point values on Darwin should really be
3638 loaded as two parts. */
3639 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3640 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3641 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3643 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3644 know how to get a DFmode SUBREG of a TFmode. */
3645 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3646 simplify_gen_subreg (DImode, operands[1], mode, 0),
3648 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3649 GET_MODE_SIZE (DImode)),
3650 simplify_gen_subreg (DImode, operands[1], mode,
3651 GET_MODE_SIZE (DImode)),
3656 /* FIXME: In the long term, this switch statement should go away
3657 and be replaced by a sequence of tests based on things like
3663 if (CONSTANT_P (operands[1])
3664 && GET_CODE (operands[1]) != CONST_INT)
3665 operands[1] = force_const_mem (mode, operands[1]);
3671 if (CONSTANT_P (operands[1])
3672 && ! easy_fp_constant (operands[1], mode))
3673 operands[1] = force_const_mem (mode, operands[1]);
3684 if (CONSTANT_P (operands[1])
3685 && !easy_vector_constant (operands[1], mode))
3686 operands[1] = force_const_mem (mode, operands[1]);
3691 /* Use default pattern for address of ELF small data */
3694 && DEFAULT_ABI == ABI_V4
3695 && (GET_CODE (operands[1]) == SYMBOL_REF
3696 || GET_CODE (operands[1]) == CONST)
3697 && small_data_operand (operands[1], mode))
3699 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3703 if (DEFAULT_ABI == ABI_V4
3704 && mode == Pmode && mode == SImode
3705 && flag_pic == 1 && got_operand (operands[1], mode))
3707 emit_insn (gen_movsi_got (operands[0], operands[1]));
3711 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3715 && CONSTANT_P (operands[1])
3716 && GET_CODE (operands[1]) != HIGH
3717 && GET_CODE (operands[1]) != CONST_INT)
3719 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3721 /* If this is a function address on -mcall-aixdesc,
3722 convert it to the address of the descriptor. */
3723 if (DEFAULT_ABI == ABI_AIX
3724 && GET_CODE (operands[1]) == SYMBOL_REF
3725 && XSTR (operands[1], 0)[0] == '.')
3727 const char *name = XSTR (operands[1], 0);
3729 while (*name == '.')
3731 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3732 CONSTANT_POOL_ADDRESS_P (new_ref)
3733 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3734 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3735 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3736 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3737 operands[1] = new_ref;
3740 if (DEFAULT_ABI == ABI_DARWIN)
3743 if (MACHO_DYNAMIC_NO_PIC_P)
3745 /* Take care of any required data indirection. */
3746 operands[1] = rs6000_machopic_legitimize_pic_address (
3747 operands[1], mode, operands[0]);
3748 if (operands[0] != operands[1])
3749 emit_insn (gen_rtx_SET (VOIDmode,
3750 operands[0], operands[1]));
3754 emit_insn (gen_macho_high (target, operands[1]));
3755 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3759 emit_insn (gen_elf_high (target, operands[1]));
3760 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3764 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3765 and we have put it in the TOC, we just need to make a TOC-relative
3768 && GET_CODE (operands[1]) == SYMBOL_REF
3769 && constant_pool_expr_p (operands[1])
3770 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3771 get_pool_mode (operands[1])))
3773 operands[1] = create_TOC_reference (operands[1]);
3775 else if (mode == Pmode
3776 && CONSTANT_P (operands[1])
3777 && ((GET_CODE (operands[1]) != CONST_INT
3778 && ! easy_fp_constant (operands[1], mode))
3779 || (GET_CODE (operands[1]) == CONST_INT
3780 && num_insns_constant (operands[1], mode) > 2)
3781 || (GET_CODE (operands[0]) == REG
3782 && FP_REGNO_P (REGNO (operands[0]))))
3783 && GET_CODE (operands[1]) != HIGH
3784 && ! legitimate_constant_pool_address_p (operands[1])
3785 && ! toc_relative_expr_p (operands[1]))
3787 /* Emit a USE operation so that the constant isn't deleted if
3788 expensive optimizations are turned on because nobody
3789 references it. This should only be done for operands that
3790 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3791 This should not be done for operands that contain LABEL_REFs.
3792 For now, we just handle the obvious case. */
3793 if (GET_CODE (operands[1]) != LABEL_REF)
3794 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3797 /* Darwin uses a special PIC legitimizer. */
3798 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3801 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3803 if (operands[0] != operands[1])
3804 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3809 /* If we are to limit the number of things we put in the TOC and
3810 this is a symbol plus a constant we can add in one insn,
3811 just put the symbol in the TOC and add the constant. Don't do
3812 this if reload is in progress. */
3813 if (GET_CODE (operands[1]) == CONST
3814 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3815 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3816 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3817 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3818 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3819 && ! side_effects_p (operands[0]))
3822 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3823 rtx other = XEXP (XEXP (operands[1], 0), 1);
3825 sym = force_reg (mode, sym);
3827 emit_insn (gen_addsi3 (operands[0], sym, other));
3829 emit_insn (gen_adddi3 (operands[0], sym, other));
3833 operands[1] = force_const_mem (mode, operands[1]);
3836 && constant_pool_expr_p (XEXP (operands[1], 0))
3837 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3838 get_pool_constant (XEXP (operands[1], 0)),
3839 get_pool_mode (XEXP (operands[1], 0))))
3842 = gen_rtx_MEM (mode,
3843 create_TOC_reference (XEXP (operands[1], 0)));
3844 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3845 RTX_UNCHANGING_P (operands[1]) = 1;
3851 if (GET_CODE (operands[0]) == MEM
3852 && GET_CODE (XEXP (operands[0], 0)) != REG
3853 && ! reload_in_progress)
3855 = replace_equiv_address (operands[0],
3856 copy_addr_to_reg (XEXP (operands[0], 0)));
3858 if (GET_CODE (operands[1]) == MEM
3859 && GET_CODE (XEXP (operands[1], 0)) != REG
3860 && ! reload_in_progress)
3862 = replace_equiv_address (operands[1],
3863 copy_addr_to_reg (XEXP (operands[1], 0)));
3866 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3868 gen_rtx_SET (VOIDmode,
3869 operands[0], operands[1]),
3870 gen_rtx_CLOBBER (VOIDmode,
3871 gen_rtx_SCRATCH (SImode)))));
3880 /* Above, we may have called force_const_mem which may have returned
3881 an invalid address. If we can, fix this up; otherwise, reload will
3882 have to deal with it. */
3883 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3884 operands[1] = validize_mem (operands[1]);
3887 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3890 /* Nonzero if we can use a floating-point register to pass this arg. */
3891 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3892 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3893 && (CUM)->fregno <= FP_ARG_MAX_REG \
3894 && TARGET_HARD_FLOAT && TARGET_FPRS)
3896 /* Nonzero if we can use an AltiVec register to pass this arg. */
3897 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3898 (ALTIVEC_VECTOR_MODE (MODE) \
3899 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3900 && TARGET_ALTIVEC_ABI \
3903 /* Return a nonzero value to say to return the function value in
3904 memory, just as large structures are always returned. TYPE will be
3905 the data type of the value, and FNTYPE will be the type of the
3906 function doing the returning, or @code{NULL} for libcalls.
3908 The AIX ABI for the RS/6000 specifies that all structures are
3909 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3910 specifies that structures <= 8 bytes are returned in r3/r4, but a
3911 draft put them in memory, and GCC used to implement the draft
3912 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3913 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3914 compatibility can change DRAFT_V4_STRUCT_RET to override the
3915 default, and -m switches get the final word. See
3916 rs6000_override_options for more details.
3918 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3919 long double support is enabled. These values are returned in memory.
3921 int_size_in_bytes returns -1 for variable size objects, which go in
3922 memory always. The cast to unsigned makes -1 > 8. */
3925 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3927 if (AGGREGATE_TYPE_P (type)
3928 && (TARGET_AIX_STRUCT_RET
3929 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3931 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3936 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3937 for a call to a function whose data type is FNTYPE.
3938 For a library call, FNTYPE is 0.
3940 For incoming args we set the number of arguments in the prototype large
3941 so we never return a PARALLEL. */
3944 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3945 rtx libname ATTRIBUTE_UNUSED, int incoming,
3946 int libcall, int n_named_args)
3948 static CUMULATIVE_ARGS zero_cumulative;
3950 *cum = zero_cumulative;
3952 cum->fregno = FP_ARG_MIN_REG;
3953 cum->vregno = ALTIVEC_ARG_MIN_REG;
3954 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3955 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3956 ? CALL_LIBCALL : CALL_NORMAL);
3957 cum->sysv_gregno = GP_ARG_MIN_REG;
3958 cum->stdarg = fntype
3959 && (TYPE_ARG_TYPES (fntype) != 0
3960 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3961 != void_type_node));
3963 cum->nargs_prototype = 0;
3964 if (incoming || cum->prototype)
3965 cum->nargs_prototype = n_named_args;
3967 /* Check for a longcall attribute. */
3969 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3970 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3971 cum->call_cookie = CALL_LONG;
3973 if (TARGET_DEBUG_ARG)
3975 fprintf (stderr, "\ninit_cumulative_args:");
3978 tree ret_type = TREE_TYPE (fntype);
3979 fprintf (stderr, " ret code = %s,",
3980 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3983 if (cum->call_cookie & CALL_LONG)
3984 fprintf (stderr, " longcall,");
3986 fprintf (stderr, " proto = %d, nargs = %d\n",
3987 cum->prototype, cum->nargs_prototype);
3992 && TARGET_ALTIVEC_ABI
3993 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
3995 error ("Cannot return value in vector register because"
3996 " altivec instructions are disabled, use -maltivec"
3997 " to enable them.");
4001 /* If defined, a C expression which determines whether, and in which
4002 direction, to pad out an argument with extra space. The value
4003 should be of type `enum direction': either `upward' to pad above
4004 the argument, `downward' to pad below, or `none' to inhibit
4007 For the AIX ABI structs are always stored left shifted in their
4011 function_arg_padding (enum machine_mode mode, tree type)
4013 #ifndef AGGREGATE_PADDING_FIXED
4014 #define AGGREGATE_PADDING_FIXED 0
4016 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4017 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4020 if (!AGGREGATE_PADDING_FIXED)
4022 /* GCC used to pass structures of the same size as integer types as
4023 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4024 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4025 passed padded downward, except that -mstrict-align further
4026 muddied the water in that multi-component structures of 2 and 4
4027 bytes in size were passed padded upward.
4029 The following arranges for best compatibility with previous
4030 versions of gcc, but removes the -mstrict-align dependency. */
4031 if (BYTES_BIG_ENDIAN)
4033 HOST_WIDE_INT size = 0;
4035 if (mode == BLKmode)
4037 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4038 size = int_size_in_bytes (type);
4041 size = GET_MODE_SIZE (mode);
4043 if (size == 1 || size == 2 || size == 4)
4049 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4051 if (type != 0 && AGGREGATE_TYPE_P (type))
4055 /* Fall back to the default. */
4056 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4059 /* If defined, a C expression that gives the alignment boundary, in bits,
4060 of an argument with the specified mode and type. If it is not defined,
4061 PARM_BOUNDARY is used for all arguments.
4063 V.4 wants long longs to be double word aligned. */
4066 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4068 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
4070 else if (SPE_VECTOR_MODE (mode))
4072 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4075 return PARM_BOUNDARY;
4078 /* Compute the size (in words) of a function argument. */
4080 static unsigned long
4081 rs6000_arg_size (enum machine_mode mode, tree type)
4085 if (mode != BLKmode)
4086 size = GET_MODE_SIZE (mode);
4088 size = int_size_in_bytes (type);
4091 return (size + 3) >> 2;
4093 return (size + 7) >> 3;
4096 /* Update the data in CUM to advance over an argument
4097 of mode MODE and data type TYPE.
4098 (TYPE is null for libcalls where that information may not be available.) */
4101 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4102 tree type, int named)
4104 cum->nargs_prototype--;
4106 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4108 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4111 if (!TARGET_ALTIVEC)
4112 error ("Cannot pass argument in vector register because"
4113 " altivec instructions are disabled, use -maltivec"
4114 " to enable them.");
4116 /* PowerPC64 Linux and AIX allocates GPRs for a vector argument
4117 even if it is going to be passed in a vector register.
4118 Darwin does the same for variable-argument functions. */
4119 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4120 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4124 /* Vector parameters must be 16-byte aligned. This places
4125 them at 2 mod 4 in terms of words in 32-bit mode, since
4126 the parameter save area starts at offset 24 from the
4127 stack. In 64-bit mode, they just have to start on an
4128 even word, since the parameter save area is 16-byte
4129 aligned. Space for GPRs is reserved even if the argument
4130 will be passed in memory. */
4132 align = ((6 - (cum->words & 3)) & 3);
4134 align = cum->words & 1;
4135 cum->words += align + rs6000_arg_size (mode, type);
4137 if (TARGET_DEBUG_ARG)
4139 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4141 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4142 cum->nargs_prototype, cum->prototype,
4143 GET_MODE_NAME (mode));
4147 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4149 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4151 else if (DEFAULT_ABI == ABI_V4)
4153 if (TARGET_HARD_FLOAT && TARGET_FPRS
4154 && (mode == SFmode || mode == DFmode))
4156 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4161 cum->words += cum->words & 1;
4162 cum->words += rs6000_arg_size (mode, type);
4168 int gregno = cum->sysv_gregno;
4170 /* Aggregates and IEEE quad get passed by reference. */
4171 if ((type && AGGREGATE_TYPE_P (type))
4175 n_words = rs6000_arg_size (mode, type);
4177 /* Long long and SPE vectors are put in odd registers. */
4178 if (n_words == 2 && (gregno & 1) == 0)
4181 /* Long long and SPE vectors are not split between registers
4183 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4185 /* Long long is aligned on the stack. */
4187 cum->words += cum->words & 1;
4188 cum->words += n_words;
4191 /* Note: continuing to accumulate gregno past when we've started
4192 spilling to the stack indicates the fact that we've started
4193 spilling to the stack to expand_builtin_saveregs. */
4194 cum->sysv_gregno = gregno + n_words;
4197 if (TARGET_DEBUG_ARG)
4199 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4200 cum->words, cum->fregno);
4201 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4202 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4203 fprintf (stderr, "mode = %4s, named = %d\n",
4204 GET_MODE_NAME (mode), named);
4209 int align = (TARGET_32BIT && (cum->words & 1) != 0
4210 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4212 cum->words += align + rs6000_arg_size (mode, type);
4214 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4215 && TARGET_HARD_FLOAT && TARGET_FPRS)
4216 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4218 if (TARGET_DEBUG_ARG)
4220 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4221 cum->words, cum->fregno);
4222 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4223 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4224 fprintf (stderr, "named = %d, align = %d\n", named, align);
4229 /* Determine where to put a SIMD argument on the SPE. */
4232 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4237 int gregno = cum->sysv_gregno;
4238 int n_words = rs6000_arg_size (mode, type);
4240 /* SPE vectors are put in odd registers. */
4241 if (n_words == 2 && (gregno & 1) == 0)
4244 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4247 enum machine_mode m = SImode;
4249 r1 = gen_rtx_REG (m, gregno);
4250 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4251 r2 = gen_rtx_REG (m, gregno + 1);
4252 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4253 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4260 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4261 return gen_rtx_REG (mode, cum->sysv_gregno);
4267 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4270 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4271 tree type, int align_words)
4275 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4276 in vararg list into zero, one or two GPRs */
4277 if (align_words >= GP_ARG_NUM_REG)
4278 return gen_rtx_PARALLEL (DFmode,
4280 gen_rtx_EXPR_LIST (VOIDmode,
4281 NULL_RTX, const0_rtx),
4282 gen_rtx_EXPR_LIST (VOIDmode,
4286 else if (align_words + rs6000_arg_size (mode, type)
4288 /* If this is partially on the stack, then we only
4289 include the portion actually in registers here. */
4290 return gen_rtx_PARALLEL (DFmode,
4292 gen_rtx_EXPR_LIST (VOIDmode,
4293 gen_rtx_REG (SImode,
4297 gen_rtx_EXPR_LIST (VOIDmode,
4302 /* split a DFmode arg into two GPRs */
4303 return gen_rtx_PARALLEL (DFmode,
4305 gen_rtx_EXPR_LIST (VOIDmode,
4306 gen_rtx_REG (SImode,
4310 gen_rtx_EXPR_LIST (VOIDmode,
4311 gen_rtx_REG (SImode,
4315 gen_rtx_EXPR_LIST (VOIDmode,
4316 gen_rtx_REG (mode, cum->fregno),
4319 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4321 else if (mode == DImode)
4323 if (align_words < GP_ARG_NUM_REG - 1)
4324 return gen_rtx_PARALLEL (DImode,
4326 gen_rtx_EXPR_LIST (VOIDmode,
4327 gen_rtx_REG (SImode,
4331 gen_rtx_EXPR_LIST (VOIDmode,
4332 gen_rtx_REG (SImode,
4336 else if (align_words == GP_ARG_NUM_REG - 1)
4337 return gen_rtx_PARALLEL (DImode,
4339 gen_rtx_EXPR_LIST (VOIDmode,
4340 NULL_RTX, const0_rtx),
4341 gen_rtx_EXPR_LIST (VOIDmode,
4342 gen_rtx_REG (SImode,
4347 else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4350 int size = int_size_in_bytes (type);
4351 int no_units = ((size - 1) / 4) + 1;
4352 int max_no_words = GP_ARG_NUM_REG - align_words;
4353 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4354 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4356 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4358 for (k=0; k < rtlvec_len; k++)
4359 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4360 gen_rtx_REG (SImode,
4363 k == 0 ? const0_rtx : GEN_INT (k*4));
4365 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4371 /* Determine where to put an argument to a function.
4372 Value is zero to push the argument on the stack,
4373 or a hard register in which to store the argument.
4375 MODE is the argument's machine mode.
4376 TYPE is the data type of the argument (as a tree).
4377 This is null for libcalls where that information may
4379 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4380 the preceding args and about the function being called.
4381 NAMED is nonzero if this argument is a named parameter
4382 (otherwise it is an extra parameter matching an ellipsis).
4384 On RS/6000 the first eight words of non-FP are normally in registers
4385 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4386 Under V.4, the first 8 FP args are in registers.
4388 If this is floating-point and no prototype is specified, we use
4389 both an FP and integer register (or possibly FP reg and stack). Library
4390 functions (when CALL_LIBCALL is set) always have the proper types for args,
4391 so we can pass the FP value just in one register. emit_library_function
4392 doesn't support PARALLEL anyway. */
4395 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4396 tree type, int named)
4398 enum rs6000_abi abi = DEFAULT_ABI;
4400 /* Return a marker to indicate whether CR1 needs to set or clear the
4401 bit that V.4 uses to say fp args were passed in registers.
4402 Assume that we don't need the marker for software floating point,
4403 or compiler generated library calls. */
4404 if (mode == VOIDmode)
4407 && cum->nargs_prototype < 0
4408 && (cum->call_cookie & CALL_LIBCALL) == 0
4409 && (cum->prototype || TARGET_NO_PROTOTYPE))
4411 /* For the SPE, we need to crxor CR6 always. */
4413 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4414 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4415 return GEN_INT (cum->call_cookie
4416 | ((cum->fregno == FP_ARG_MIN_REG)
4417 ? CALL_V4_SET_FP_ARGS
4418 : CALL_V4_CLEAR_FP_ARGS));
4421 return GEN_INT (cum->call_cookie);
4424 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4425 if (TARGET_64BIT && ! cum->prototype)
4427 /* Vector parameters get passed in vector register
4428 and also in GPRs or memory, in absence of prototype. */
4431 align_words = (cum->words + 1) & ~1;
4433 if (align_words >= GP_ARG_NUM_REG)
4439 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4441 return gen_rtx_PARALLEL (mode,
4443 gen_rtx_EXPR_LIST (VOIDmode,
4445 gen_rtx_EXPR_LIST (VOIDmode,
4446 gen_rtx_REG (mode, cum->vregno),
4450 return gen_rtx_REG (mode, cum->vregno);
4451 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4453 if (named || abi == ABI_V4)
4457 /* Vector parameters to varargs functions under AIX or Darwin
4458 get passed in memory and possibly also in GPRs. */
4459 int align, align_words;
4460 enum machine_mode part_mode = mode;
4462 /* Vector parameters must be 16-byte aligned. This places them at
4463 2 mod 4 in terms of words in 32-bit mode, since the parameter
4464 save area starts at offset 24 from the stack. In 64-bit mode,
4465 they just have to start on an even word, since the parameter
4466 save area is 16-byte aligned. */
4468 align = ((6 - (cum->words & 3)) & 3);
4470 align = cum->words & 1;
4471 align_words = cum->words + align;
4473 /* Out of registers? Memory, then. */
4474 if (align_words >= GP_ARG_NUM_REG)
4477 /* The vector value goes in GPRs. Only the part of the
4478 value in GPRs is reported here. */
4479 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4481 /* Fortunately, there are only two possibilities, the value
4482 is either wholly in GPRs or half in GPRs and half not. */
4485 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4488 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4489 return rs6000_spe_function_arg (cum, mode, type);
4490 else if (abi == ABI_V4)
4492 if (TARGET_HARD_FLOAT && TARGET_FPRS
4493 && (mode == SFmode || mode == DFmode))
4495 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4496 return gen_rtx_REG (mode, cum->fregno);
4503 int gregno = cum->sysv_gregno;
4505 /* Aggregates and IEEE quad get passed by reference. */
4506 if ((type && AGGREGATE_TYPE_P (type))
4510 n_words = rs6000_arg_size (mode, type);
4512 /* Long long and SPE vectors are put in odd registers. */
4513 if (n_words == 2 && (gregno & 1) == 0)
4516 /* Long long does not split between registers and stack. */
4517 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4518 return gen_rtx_REG (mode, gregno);
4525 int align = (TARGET_32BIT && (cum->words & 1) != 0
4526 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4527 int align_words = cum->words + align;
4529 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4532 if (TARGET_32BIT && TARGET_POWERPC64
4533 && (mode == DImode || mode == BLKmode))
4534 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4536 if (USE_FP_FOR_ARG_P (cum, mode, type))
4541 enum machine_mode fmode = mode;
4543 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4545 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4547 /* Long double split over regs and memory. */
4548 if (fmode == TFmode)
4551 /* Currently, we only ever need one reg here because complex
4552 doubles are split. */
4553 if (cum->fregno != FP_ARG_MAX_REG - 1)
4556 fpr[1] = gen_rtx_REG (fmode, cum->fregno);
4558 /* Do we also need to pass this arg in the parameter save
4561 && (cum->nargs_prototype <= 0
4562 || (DEFAULT_ABI == ABI_AIX
4564 && align_words >= GP_ARG_NUM_REG)));
4566 if (!needs_psave && mode == fmode)
4569 if (TARGET_32BIT && TARGET_POWERPC64
4570 && mode == DFmode && cum->stdarg)
4571 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4573 /* Describe where this piece goes. */
4575 *r = gen_rtx_EXPR_LIST (VOIDmode, *r, const0_rtx);
4580 /* Now describe the part that goes in gprs or the stack.
4581 This piece must come first, before the fprs. */
4583 if (align_words < GP_ARG_NUM_REG)
4585 unsigned long n_words = rs6000_arg_size (mode, type);
4586 enum machine_mode rmode = mode;
4588 if (align_words + n_words > GP_ARG_NUM_REG)
4589 /* If this is partially on the stack, then we only
4590 include the portion actually in registers here.
4591 We know this can only be one register because
4592 complex doubles are splt. */
4594 reg = gen_rtx_REG (rmode, GP_ARG_MIN_REG + align_words);
4596 *--r = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
4600 return gen_rtx_PARALLEL (mode, gen_rtvec_v (n, r));
4602 else if (align_words < GP_ARG_NUM_REG)
4603 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4609 /* For an arg passed partly in registers and partly in memory,
4610 this is the number of registers used.
4611 For args passed entirely in registers or entirely in memory, zero. */
4614 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4615 tree type, int named)
4619 if (DEFAULT_ABI == ABI_V4)
4622 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4623 && cum->nargs_prototype >= 0)
4626 if (USE_FP_FOR_ARG_P (cum, mode, type))
4628 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4629 ret = FP_ARG_MAX_REG - cum->fregno;
4630 else if (cum->nargs_prototype >= 0)
4634 if (cum->words < GP_ARG_NUM_REG
4635 && GP_ARG_NUM_REG < cum->words + rs6000_arg_size (mode, type))
4636 ret = GP_ARG_NUM_REG - cum->words;
4638 if (ret != 0 && TARGET_DEBUG_ARG)
4639 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4644 /* A C expression that indicates when an argument must be passed by
4645 reference. If nonzero for an argument, a copy of that argument is
4646 made in memory and a pointer to the argument is passed instead of
4647 the argument itself. The pointer is passed in whatever way is
4648 appropriate for passing a pointer to that type.
4650 Under V.4, structures and unions are passed by reference.
4652 As an extension to all ABIs, variable sized types are passed by
4656 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4657 enum machine_mode mode ATTRIBUTE_UNUSED,
4658 tree type, int named ATTRIBUTE_UNUSED)
4660 if (DEFAULT_ABI == ABI_V4
4661 && ((type && AGGREGATE_TYPE_P (type))
4664 if (TARGET_DEBUG_ARG)
4665 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4669 return type && int_size_in_bytes (type) < 0;
4673 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4676 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4681 for (i = 0; i < nregs; i++)
4683 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4684 if (reload_completed)
4686 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4689 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4690 i * GET_MODE_SIZE(reg_mode));
4693 tem = replace_equiv_address (tem, XEXP (tem, 0));
4695 if (tem == NULL_RTX)
4698 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4703 /* Perform any needed actions needed for a function that is receiving a
4704 variable number of arguments.
4708 MODE and TYPE are the mode and type of the current parameter.
4710 PRETEND_SIZE is a variable that should be set to the amount of stack
4711 that must be pushed by the prolog to pretend that our caller pushed
4714 Normally, this macro will push all remaining incoming registers on the
4715 stack and set PRETEND_SIZE to the length of the registers pushed. */
4718 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4719 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4721 CUMULATIVE_ARGS next_cum;
4722 int reg_size = TARGET_32BIT ? 4 : 8;
4723 rtx save_area = NULL_RTX, mem;
4724 int first_reg_offset, set;
4726 /* Skip the last named argument. */
4728 function_arg_advance (&next_cum, mode, type, 1);
4730 if (DEFAULT_ABI == ABI_V4)
4732 /* Indicate to allocate space on the stack for varargs save area. */
4733 cfun->machine->sysv_varargs_p = 1;
4735 save_area = plus_constant (virtual_stack_vars_rtx,
4736 - RS6000_VARARGS_SIZE);
4738 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4742 first_reg_offset = next_cum.words;
4743 save_area = virtual_incoming_args_rtx;
4744 cfun->machine->sysv_varargs_p = 0;
4746 if (MUST_PASS_IN_STACK (mode, type))
4747 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4750 set = get_varargs_alias_set ();
4751 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4753 mem = gen_rtx_MEM (BLKmode,
4754 plus_constant (save_area,
4755 first_reg_offset * reg_size)),
4756 set_mem_alias_set (mem, set);
4757 set_mem_align (mem, BITS_PER_WORD);
4759 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4760 GP_ARG_NUM_REG - first_reg_offset);
4763 /* Save FP registers if needed. */
4764 if (DEFAULT_ABI == ABI_V4
4765 && TARGET_HARD_FLOAT && TARGET_FPRS
4767 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4769 int fregno = next_cum.fregno;
4770 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4771 rtx lab = gen_label_rtx ();
4772 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4774 emit_jump_insn (gen_rtx_SET (VOIDmode,
4776 gen_rtx_IF_THEN_ELSE (VOIDmode,
4777 gen_rtx_NE (VOIDmode, cr1,
4779 gen_rtx_LABEL_REF (VOIDmode, lab),
4782 while (fregno <= FP_ARG_V4_MAX_REG)
4784 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4785 set_mem_alias_set (mem, set);
4786 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4795 /* Create the va_list data type. */
4798 rs6000_build_builtin_va_list (void)
4800 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4802 /* For AIX, prefer 'char *' because that's what the system
4803 header files like. */
4804 if (DEFAULT_ABI != ABI_V4)
4805 return build_pointer_type (char_type_node);
4807 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4808 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4810 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4811 unsigned_char_type_node);
4812 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4813 unsigned_char_type_node);
4814 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4816 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4817 short_unsigned_type_node);
4818 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4820 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4823 DECL_FIELD_CONTEXT (f_gpr) = record;
4824 DECL_FIELD_CONTEXT (f_fpr) = record;
4825 DECL_FIELD_CONTEXT (f_res) = record;
4826 DECL_FIELD_CONTEXT (f_ovf) = record;
4827 DECL_FIELD_CONTEXT (f_sav) = record;
4829 TREE_CHAIN (record) = type_decl;
4830 TYPE_NAME (record) = type_decl;
4831 TYPE_FIELDS (record) = f_gpr;
4832 TREE_CHAIN (f_gpr) = f_fpr;
4833 TREE_CHAIN (f_fpr) = f_res;
4834 TREE_CHAIN (f_res) = f_ovf;
4835 TREE_CHAIN (f_ovf) = f_sav;
4837 layout_type (record);
4839 /* The correct type is an array type of one element. */
4840 return build_array_type (record, build_index_type (size_zero_node));
4843 /* Implement va_start. */
4846 rs6000_va_start (tree valist, rtx nextarg)
4848 HOST_WIDE_INT words, n_gpr, n_fpr;
4849 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4850 tree gpr, fpr, ovf, sav, t;
4852 /* Only SVR4 needs something special. */
4853 if (DEFAULT_ABI != ABI_V4)
4855 std_expand_builtin_va_start (valist, nextarg);
4859 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4860 f_fpr = TREE_CHAIN (f_gpr);
4861 f_res = TREE_CHAIN (f_fpr);
4862 f_ovf = TREE_CHAIN (f_res);
4863 f_sav = TREE_CHAIN (f_ovf);
4865 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4866 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4867 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4868 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4869 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4871 /* Count number of gp and fp argument registers used. */
4872 words = current_function_args_info.words;
4873 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4874 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4876 if (TARGET_DEBUG_ARG)
4877 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4878 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4879 words, n_gpr, n_fpr);
4881 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4882 TREE_SIDE_EFFECTS (t) = 1;
4883 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4885 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4886 TREE_SIDE_EFFECTS (t) = 1;
4887 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4889 /* Find the overflow area. */
4890 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4892 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4893 build_int_2 (words * UNITS_PER_WORD, 0));
4894 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4895 TREE_SIDE_EFFECTS (t) = 1;
4896 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4898 /* Find the register save area. */
4899 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4900 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4901 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4902 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4903 TREE_SIDE_EFFECTS (t) = 1;
4904 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4907 /* Implement va_arg. */
4910 rs6000_va_arg (tree valist, tree type)
4912 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4913 tree gpr, fpr, ovf, sav, reg, t, u;
4914 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4915 rtx lab_false, lab_over, addr_rtx, r;
4917 if (DEFAULT_ABI != ABI_V4)
4919 /* Variable sized types are passed by reference. */
4920 if (int_size_in_bytes (type) < 0)
4922 u = build_pointer_type (type);
4924 /* Args grow upward. */
4925 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4926 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4927 TREE_SIDE_EFFECTS (t) = 1;
4929 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4930 TREE_SIDE_EFFECTS (t) = 1;
4932 t = build1 (INDIRECT_REF, u, t);
4933 TREE_SIDE_EFFECTS (t) = 1;
4935 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4937 if (targetm.calls.split_complex_arg
4938 && TREE_CODE (type) == COMPLEX_TYPE)
4940 tree elem_type = TREE_TYPE (type);
4941 enum machine_mode elem_mode = TYPE_MODE (elem_type);
4942 int elem_size = GET_MODE_SIZE (elem_mode);
4944 if (elem_size < UNITS_PER_WORD)
4946 rtx real_part, imag_part, dest_real, rr;
4948 real_part = rs6000_va_arg (valist, elem_type);
4949 imag_part = rs6000_va_arg (valist, elem_type);
4951 /* We're not returning the value here, but the address.
4952 real_part and imag_part are not contiguous, and we know
4953 there is space available to pack real_part next to
4954 imag_part. float _Complex is not promoted to
4955 double _Complex by the default promotion rules that
4956 promote float to double. */
4957 if (2 * elem_size > UNITS_PER_WORD)
4960 real_part = gen_rtx_MEM (elem_mode, real_part);
4961 imag_part = gen_rtx_MEM (elem_mode, imag_part);
4963 dest_real = adjust_address (imag_part, elem_mode, -elem_size);
4964 rr = gen_reg_rtx (elem_mode);
4965 emit_move_insn (rr, real_part);
4966 emit_move_insn (dest_real, rr);
4968 return XEXP (dest_real, 0);
4972 return std_expand_builtin_va_arg (valist, type);
4975 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4976 f_fpr = TREE_CHAIN (f_gpr);
4977 f_res = TREE_CHAIN (f_fpr);
4978 f_ovf = TREE_CHAIN (f_res);
4979 f_sav = TREE_CHAIN (f_ovf);
4981 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4982 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4983 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4984 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4985 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4987 size = int_size_in_bytes (type);
4988 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4990 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4992 /* Aggregates and long doubles are passed by reference. */
4998 size = UNITS_PER_WORD;
5001 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
5003 /* FP args go in FP registers, if present. */
5012 /* Otherwise into GP registers. */
5020 /* Pull the value out of the saved registers.... */
5022 lab_false = gen_label_rtx ();
5023 lab_over = gen_label_rtx ();
5024 addr_rtx = gen_reg_rtx (Pmode);
5026 /* AltiVec vectors never go in registers. */
5027 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
5029 TREE_THIS_VOLATILE (reg) = 1;
5030 emit_cmp_and_jump_insns
5031 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
5032 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
5035 /* Long long is aligned in the registers. */
5038 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5039 build_int_2 (n_reg - 1, 0));
5040 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
5041 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
5042 TREE_SIDE_EFFECTS (u) = 1;
5043 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
5047 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5051 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
5052 build_int_2 (n_reg, 0));
5053 TREE_SIDE_EFFECTS (u) = 1;
5055 u = build1 (CONVERT_EXPR, integer_type_node, u);
5056 TREE_SIDE_EFFECTS (u) = 1;
5058 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
5059 TREE_SIDE_EFFECTS (u) = 1;
5061 t = build (PLUS_EXPR, ptr_type_node, t, u);
5062 TREE_SIDE_EFFECTS (t) = 1;
5064 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5066 emit_move_insn (addr_rtx, r);
5068 emit_jump_insn (gen_jump (lab_over));
5072 emit_label (lab_false);
5074 /* ... otherwise out of the overflow area. */
5076 /* Make sure we don't find reg 7 for the next int arg.
5078 All AltiVec vectors go in the overflow area. So in the AltiVec
5079 case we need to get the vectors from the overflow area, but
5080 remember where the GPRs and FPRs are. */
5081 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
5082 || !TARGET_ALTIVEC))
5084 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
5085 TREE_SIDE_EFFECTS (t) = 1;
5086 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5089 /* Care for on-stack alignment if needed. */
5096 /* AltiVec vectors are 16 byte aligned. */
5097 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
5102 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
5103 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
5107 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5109 emit_move_insn (addr_rtx, r);
5111 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5112 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5113 TREE_SIDE_EFFECTS (t) = 1;
5114 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5116 emit_label (lab_over);
5120 r = gen_rtx_MEM (Pmode, addr_rtx);
5121 set_mem_alias_set (r, get_varargs_alias_set ());
5122 emit_move_insn (addr_rtx, r);
5130 #define def_builtin(MASK, NAME, TYPE, CODE) \
5132 if ((MASK) & target_flags) \
5133 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5137 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5139 static const struct builtin_description bdesc_3arg[] =
5141 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5142 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5143 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5144 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5145 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5146 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5147 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5148 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5149 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5150 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5151 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5152 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5153 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5154 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5155 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5156 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5157 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5158 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5159 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5160 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5161 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5162 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5163 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5166 /* DST operations: void foo (void *, const int, const char). */
5168 static const struct builtin_description bdesc_dst[] =
5170 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5171 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5172 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5173 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5176 /* Simple binary operations: VECc = foo (VECa, VECb). */
5178 static struct builtin_description bdesc_2arg[] =
5180 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5181 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5182 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5183 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5184 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5185 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5186 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5187 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5188 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5189 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5190 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5191 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5192 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5193 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5194 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5195 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5196 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5197 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5198 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5199 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5200 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5201 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5202 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5203 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5204 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5205 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5206 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5207 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5208 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5209 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5210 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5211 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5212 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5213 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5214 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5215 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5216 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5217 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5218 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5219 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5220 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5221 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5222 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5223 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5224 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5225 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5226 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5227 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5228 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5229 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5230 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5231 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5232 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5233 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5234 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5235 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5236 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5237 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5238 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5239 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5240 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5241 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5242 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5243 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5244 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5245 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5246 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5247 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5248 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5249 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5250 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5251 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5252 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5253 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5254 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5255 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5256 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5257 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5258 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5259 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5260 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5261 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5262 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5263 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5264 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5265 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5266 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5267 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5268 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5269 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5270 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5271 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5272 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5273 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5274 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5275 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5276 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5277 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5278 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5279 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5280 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5281 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5282 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5283 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5284 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5285 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5286 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5287 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5288 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5289 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5290 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5291 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5292 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5294 /* Place holder, leave as first spe builtin. */
5295 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5296 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5297 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5298 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5299 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5300 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5301 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5302 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5303 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5304 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5305 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5306 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5307 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5308 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5309 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5310 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5311 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5312 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5313 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5314 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5315 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5316 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5317 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5318 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5319 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5320 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5321 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5322 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5323 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5324 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5325 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5326 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5327 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5328 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5329 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5330 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5331 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5332 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5333 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5334 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5335 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5336 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5337 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5338 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5339 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5340 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5341 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5342 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5343 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5344 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5345 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5346 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5347 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5348 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5349 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5350 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5351 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5352 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5353 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5354 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5355 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5356 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5357 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5358 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5359 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5360 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5361 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5362 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5363 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5364 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5365 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5366 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5367 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5368 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5369 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5370 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5371 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5372 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5373 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5374 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5375 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5376 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5377 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5378 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5379 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5380 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5381 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5382 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5383 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5384 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5385 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5386 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5387 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5388 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5389 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5390 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5391 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5392 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5393 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5394 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5395 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5396 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5397 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5398 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5399 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5400 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5401 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5402 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5403 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5405 /* SPE binary operations expecting a 5-bit unsigned literal. */
5406 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5408 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5409 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5410 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5411 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5412 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5413 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5414 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5415 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5416 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5417 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5418 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5419 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5420 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5421 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5422 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5423 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5424 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5425 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5426 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5427 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5428 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5429 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5430 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5431 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5432 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5433 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5435 /* Place-holder. Leave as last binary SPE builtin. */
5436 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5439 /* AltiVec predicates. */
5441 struct builtin_description_predicates
5443 const unsigned int mask;
5444 const enum insn_code icode;
5446 const char *const name;
5447 const enum rs6000_builtins code;
5450 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5452 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5453 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5454 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5455 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5456 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5457 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5458 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5459 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5460 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5461 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5462 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5463 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5464 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5467 /* SPE predicates. */
5468 static struct builtin_description bdesc_spe_predicates[] =
5470 /* Place-holder. Leave as first. */
5471 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5472 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5473 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5474 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5475 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5476 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5477 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5478 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5479 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5480 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5481 /* Place-holder. Leave as last. */
5482 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5485 /* SPE evsel predicates. */
5486 static struct builtin_description bdesc_spe_evsel[] =
5488 /* Place-holder. Leave as first. */
5489 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5490 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5491 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5492 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5493 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5494 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5495 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5496 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5497 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5498 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5499 /* Place-holder. Leave as last. */
5500 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5503 /* ABS* operations. */
5505 static const struct builtin_description bdesc_abs[] =
5507 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5508 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5509 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5510 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5511 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5512 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5513 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5516 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5519 static struct builtin_description bdesc_1arg[] =
5521 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5522 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5523 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5524 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5525 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5526 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5527 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5528 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5529 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5530 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5531 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5532 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5533 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5534 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5535 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5536 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5537 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5539 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5540 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5541 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5542 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5543 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5544 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5545 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5546 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5547 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5548 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5549 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5550 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5551 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5552 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5553 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5554 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5555 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5556 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5557 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5558 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5559 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5560 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5561 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5562 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5563 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5564 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5565 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5566 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5567 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5568 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5570 /* Place-holder. Leave as last unary SPE builtin. */
5571 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5575 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5578 tree arg0 = TREE_VALUE (arglist);
5579 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5580 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5581 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5583 if (icode == CODE_FOR_nothing)
5584 /* Builtin not supported on this processor. */
5587 /* If we got invalid arguments bail out before generating bad rtl. */
5588 if (arg0 == error_mark_node)
5591 if (icode == CODE_FOR_altivec_vspltisb
5592 || icode == CODE_FOR_altivec_vspltish
5593 || icode == CODE_FOR_altivec_vspltisw
5594 || icode == CODE_FOR_spe_evsplatfi
5595 || icode == CODE_FOR_spe_evsplati)
5597 /* Only allow 5-bit *signed* literals. */
5598 if (GET_CODE (op0) != CONST_INT
5599 || INTVAL (op0) > 0x1f
5600 || INTVAL (op0) < -0x1f)
5602 error ("argument 1 must be a 5-bit signed literal");
5608 || GET_MODE (target) != tmode
5609 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5610 target = gen_reg_rtx (tmode);
5612 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5613 op0 = copy_to_mode_reg (mode0, op0);
5615 pat = GEN_FCN (icode) (target, op0);
5624 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5626 rtx pat, scratch1, scratch2;
5627 tree arg0 = TREE_VALUE (arglist);
5628 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5629 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5630 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5632 /* If we have invalid arguments, bail out before generating bad rtl. */
5633 if (arg0 == error_mark_node)
5637 || GET_MODE (target) != tmode
5638 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5639 target = gen_reg_rtx (tmode);
5641 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5642 op0 = copy_to_mode_reg (mode0, op0);
5644 scratch1 = gen_reg_rtx (mode0);
5645 scratch2 = gen_reg_rtx (mode0);
5647 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5656 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5659 tree arg0 = TREE_VALUE (arglist);
5660 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5661 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5662 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5663 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5664 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5665 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5667 if (icode == CODE_FOR_nothing)
5668 /* Builtin not supported on this processor. */
5671 /* If we got invalid arguments bail out before generating bad rtl. */
5672 if (arg0 == error_mark_node || arg1 == error_mark_node)
5675 if (icode == CODE_FOR_altivec_vcfux
5676 || icode == CODE_FOR_altivec_vcfsx
5677 || icode == CODE_FOR_altivec_vctsxs
5678 || icode == CODE_FOR_altivec_vctuxs
5679 || icode == CODE_FOR_altivec_vspltb
5680 || icode == CODE_FOR_altivec_vsplth
5681 || icode == CODE_FOR_altivec_vspltw
5682 || icode == CODE_FOR_spe_evaddiw
5683 || icode == CODE_FOR_spe_evldd
5684 || icode == CODE_FOR_spe_evldh
5685 || icode == CODE_FOR_spe_evldw
5686 || icode == CODE_FOR_spe_evlhhesplat
5687 || icode == CODE_FOR_spe_evlhhossplat
5688 || icode == CODE_FOR_spe_evlhhousplat
5689 || icode == CODE_FOR_spe_evlwhe
5690 || icode == CODE_FOR_spe_evlwhos
5691 || icode == CODE_FOR_spe_evlwhou
5692 || icode == CODE_FOR_spe_evlwhsplat
5693 || icode == CODE_FOR_spe_evlwwsplat
5694 || icode == CODE_FOR_spe_evrlwi
5695 || icode == CODE_FOR_spe_evslwi
5696 || icode == CODE_FOR_spe_evsrwis
5697 || icode == CODE_FOR_spe_evsubifw
5698 || icode == CODE_FOR_spe_evsrwiu)
5700 /* Only allow 5-bit unsigned literals. */
5702 if (TREE_CODE (arg1) != INTEGER_CST
5703 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5705 error ("argument 2 must be a 5-bit unsigned literal");
5711 || GET_MODE (target) != tmode
5712 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5713 target = gen_reg_rtx (tmode);
5715 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5716 op0 = copy_to_mode_reg (mode0, op0);
5717 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5718 op1 = copy_to_mode_reg (mode1, op1);
5720 pat = GEN_FCN (icode) (target, op0, op1);
5729 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5730 tree arglist, rtx target)
5733 tree cr6_form = TREE_VALUE (arglist);
5734 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5735 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5736 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5737 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5738 enum machine_mode tmode = SImode;
5739 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5740 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5743 if (TREE_CODE (cr6_form) != INTEGER_CST)
5745 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5749 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5754 /* If we have invalid arguments, bail out before generating bad rtl. */
5755 if (arg0 == error_mark_node || arg1 == error_mark_node)
5759 || GET_MODE (target) != tmode
5760 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5761 target = gen_reg_rtx (tmode);
5763 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5764 op0 = copy_to_mode_reg (mode0, op0);
5765 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5766 op1 = copy_to_mode_reg (mode1, op1);
5768 scratch = gen_reg_rtx (mode0);
5770 pat = GEN_FCN (icode) (scratch, op0, op1,
5771 gen_rtx_SYMBOL_REF (Pmode, opcode));
5776 /* The vec_any* and vec_all* predicates use the same opcodes for two
5777 different operations, but the bits in CR6 will be different
5778 depending on what information we want. So we have to play tricks
5779 with CR6 to get the right bits out.
5781 If you think this is disgusting, look at the specs for the
5782 AltiVec predicates. */
5784 switch (cr6_form_int)
5787 emit_insn (gen_cr6_test_for_zero (target));
5790 emit_insn (gen_cr6_test_for_zero_reverse (target));
5793 emit_insn (gen_cr6_test_for_lt (target));
5796 emit_insn (gen_cr6_test_for_lt_reverse (target));
5799 error ("argument 1 of __builtin_altivec_predicate is out of range");
5807 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5810 tree arg0 = TREE_VALUE (arglist);
5811 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5812 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5813 enum machine_mode mode0 = Pmode;
5814 enum machine_mode mode1 = Pmode;
5815 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5816 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5818 if (icode == CODE_FOR_nothing)
5819 /* Builtin not supported on this processor. */
5822 /* If we got invalid arguments bail out before generating bad rtl. */
5823 if (arg0 == error_mark_node || arg1 == error_mark_node)
5827 || GET_MODE (target) != tmode
5828 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5829 target = gen_reg_rtx (tmode);
5831 op1 = copy_to_mode_reg (mode1, op1);
5833 if (op0 == const0_rtx)
5835 addr = gen_rtx_MEM (tmode, op1);
5839 op0 = copy_to_mode_reg (mode0, op0);
5840 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5843 pat = GEN_FCN (icode) (target, addr);
5853 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5855 tree arg0 = TREE_VALUE (arglist);
5856 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5857 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5858 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5859 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5860 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5862 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5863 enum machine_mode mode1 = Pmode;
5864 enum machine_mode mode2 = Pmode;
5866 /* Invalid arguments. Bail before doing anything stoopid! */
5867 if (arg0 == error_mark_node
5868 || arg1 == error_mark_node
5869 || arg2 == error_mark_node)
5872 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5873 op0 = copy_to_mode_reg (tmode, op0);
5875 op2 = copy_to_mode_reg (mode2, op2);
5877 if (op1 == const0_rtx)
5879 addr = gen_rtx_MEM (tmode, op2);
5883 op1 = copy_to_mode_reg (mode1, op1);
5884 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5887 pat = GEN_FCN (icode) (addr, op0);
5894 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5897 tree arg0 = TREE_VALUE (arglist);
5898 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5899 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5900 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5901 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5902 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5903 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5904 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5905 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5906 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5908 if (icode == CODE_FOR_nothing)
5909 /* Builtin not supported on this processor. */
5912 /* If we got invalid arguments bail out before generating bad rtl. */
5913 if (arg0 == error_mark_node
5914 || arg1 == error_mark_node
5915 || arg2 == error_mark_node)
5918 if (icode == CODE_FOR_altivec_vsldoi_4sf
5919 || icode == CODE_FOR_altivec_vsldoi_4si
5920 || icode == CODE_FOR_altivec_vsldoi_8hi
5921 || icode == CODE_FOR_altivec_vsldoi_16qi)
5923 /* Only allow 4-bit unsigned literals. */
5925 if (TREE_CODE (arg2) != INTEGER_CST
5926 || TREE_INT_CST_LOW (arg2) & ~0xf)
5928 error ("argument 3 must be a 4-bit unsigned literal");
5934 || GET_MODE (target) != tmode
5935 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5936 target = gen_reg_rtx (tmode);
5938 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5939 op0 = copy_to_mode_reg (mode0, op0);
5940 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5941 op1 = copy_to_mode_reg (mode1, op1);
5942 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5943 op2 = copy_to_mode_reg (mode2, op2);
5945 pat = GEN_FCN (icode) (target, op0, op1, op2);
5953 /* Expand the lvx builtins. */
5955 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5957 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5958 tree arglist = TREE_OPERAND (exp, 1);
5959 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5961 enum machine_mode tmode, mode0;
5963 enum insn_code icode;
5967 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5968 icode = CODE_FOR_altivec_lvx_16qi;
5970 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5971 icode = CODE_FOR_altivec_lvx_8hi;
5973 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5974 icode = CODE_FOR_altivec_lvx_4si;
5976 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5977 icode = CODE_FOR_altivec_lvx_4sf;
5986 arg0 = TREE_VALUE (arglist);
5987 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5988 tmode = insn_data[icode].operand[0].mode;
5989 mode0 = insn_data[icode].operand[1].mode;
5992 || GET_MODE (target) != tmode
5993 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5994 target = gen_reg_rtx (tmode);
5996 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5997 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5999 pat = GEN_FCN (icode) (target, op0);
6006 /* Expand the stvx builtins. */
6008 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6011 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6012 tree arglist = TREE_OPERAND (exp, 1);
6013 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6015 enum machine_mode mode0, mode1;
6017 enum insn_code icode;
6021 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6022 icode = CODE_FOR_altivec_stvx_16qi;
6024 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6025 icode = CODE_FOR_altivec_stvx_8hi;
6027 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6028 icode = CODE_FOR_altivec_stvx_4si;
6030 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6031 icode = CODE_FOR_altivec_stvx_4sf;
6038 arg0 = TREE_VALUE (arglist);
6039 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6040 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6041 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6042 mode0 = insn_data[icode].operand[0].mode;
6043 mode1 = insn_data[icode].operand[1].mode;
6045 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6046 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6047 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6048 op1 = copy_to_mode_reg (mode1, op1);
6050 pat = GEN_FCN (icode) (op0, op1);
6058 /* Expand the dst builtins. */
6060 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6063 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6064 tree arglist = TREE_OPERAND (exp, 1);
6065 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6066 tree arg0, arg1, arg2;
6067 enum machine_mode mode0, mode1, mode2;
6068 rtx pat, op0, op1, op2;
6069 struct builtin_description *d;
6074 /* Handle DST variants. */
6075 d = (struct builtin_description *) bdesc_dst;
6076 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6077 if (d->code == fcode)
6079 arg0 = TREE_VALUE (arglist);
6080 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6081 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6082 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6083 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6084 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6085 mode0 = insn_data[d->icode].operand[0].mode;
6086 mode1 = insn_data[d->icode].operand[1].mode;
6087 mode2 = insn_data[d->icode].operand[2].mode;
6089 /* Invalid arguments, bail out before generating bad rtl. */
6090 if (arg0 == error_mark_node
6091 || arg1 == error_mark_node
6092 || arg2 == error_mark_node)
6097 if (TREE_CODE (arg2) != INTEGER_CST
6098 || TREE_INT_CST_LOW (arg2) & ~0x3)
6100 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6104 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6105 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6106 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6107 op1 = copy_to_mode_reg (mode1, op1);
6109 pat = GEN_FCN (d->icode) (op0, op1, op2);
6119 /* Expand the builtin in EXP and store the result in TARGET. Store
6120 true in *EXPANDEDP if we found a builtin to expand. */
6122 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6124 struct builtin_description *d;
6125 struct builtin_description_predicates *dp;
6127 enum insn_code icode;
6128 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6129 tree arglist = TREE_OPERAND (exp, 1);
6132 enum machine_mode tmode, mode0;
6133 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6135 target = altivec_expand_ld_builtin (exp, target, expandedp);
6139 target = altivec_expand_st_builtin (exp, target, expandedp);
6143 target = altivec_expand_dst_builtin (exp, target, expandedp);
6151 case ALTIVEC_BUILTIN_STVX:
6152 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6153 case ALTIVEC_BUILTIN_STVEBX:
6154 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6155 case ALTIVEC_BUILTIN_STVEHX:
6156 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6157 case ALTIVEC_BUILTIN_STVEWX:
6158 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6159 case ALTIVEC_BUILTIN_STVXL:
6160 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6162 case ALTIVEC_BUILTIN_MFVSCR:
6163 icode = CODE_FOR_altivec_mfvscr;
6164 tmode = insn_data[icode].operand[0].mode;
6167 || GET_MODE (target) != tmode
6168 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6169 target = gen_reg_rtx (tmode);
6171 pat = GEN_FCN (icode) (target);
6177 case ALTIVEC_BUILTIN_MTVSCR:
6178 icode = CODE_FOR_altivec_mtvscr;
6179 arg0 = TREE_VALUE (arglist);
6180 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6181 mode0 = insn_data[icode].operand[0].mode;
6183 /* If we got invalid arguments bail out before generating bad rtl. */
6184 if (arg0 == error_mark_node)
6187 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6188 op0 = copy_to_mode_reg (mode0, op0);
6190 pat = GEN_FCN (icode) (op0);
6195 case ALTIVEC_BUILTIN_DSSALL:
6196 emit_insn (gen_altivec_dssall ());
6199 case ALTIVEC_BUILTIN_DSS:
6200 icode = CODE_FOR_altivec_dss;
6201 arg0 = TREE_VALUE (arglist);
6203 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6204 mode0 = insn_data[icode].operand[0].mode;
6206 /* If we got invalid arguments bail out before generating bad rtl. */
6207 if (arg0 == error_mark_node)
6210 if (TREE_CODE (arg0) != INTEGER_CST
6211 || TREE_INT_CST_LOW (arg0) & ~0x3)
6213 error ("argument to dss must be a 2-bit unsigned literal");
6217 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6218 op0 = copy_to_mode_reg (mode0, op0);
6220 emit_insn (gen_altivec_dss (op0));
6223 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6224 arg0 = TREE_VALUE (arglist);
6225 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6226 arg0 = TREE_OPERAND (arg0, 0);
6227 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6228 TREE_STRING_POINTER (arg0));
6233 /* Expand abs* operations. */
6234 d = (struct builtin_description *) bdesc_abs;
6235 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6236 if (d->code == fcode)
6237 return altivec_expand_abs_builtin (d->icode, arglist, target);
6239 /* Expand the AltiVec predicates. */
6240 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6241 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6242 if (dp->code == fcode)
6243 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6245 /* LV* are funky. We initialized them differently. */
6248 case ALTIVEC_BUILTIN_LVSL:
6249 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6251 case ALTIVEC_BUILTIN_LVSR:
6252 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6254 case ALTIVEC_BUILTIN_LVEBX:
6255 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6257 case ALTIVEC_BUILTIN_LVEHX:
6258 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6260 case ALTIVEC_BUILTIN_LVEWX:
6261 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6263 case ALTIVEC_BUILTIN_LVXL:
6264 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6266 case ALTIVEC_BUILTIN_LVX:
6267 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6278 /* Binops that need to be initialized manually, but can be expanded
6279 automagically by rs6000_expand_binop_builtin. */
6280 static struct builtin_description bdesc_2arg_spe[] =
6282 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6283 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6284 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6285 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6286 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6287 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6288 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6289 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6290 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6291 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6292 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6293 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6294 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6295 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6296 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6297 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6298 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6299 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6300 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6301 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6302 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6303 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6306 /* Expand the builtin in EXP and store the result in TARGET. Store
6307 true in *EXPANDEDP if we found a builtin to expand.
6309 This expands the SPE builtins that are not simple unary and binary
6312 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6314 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6315 tree arglist = TREE_OPERAND (exp, 1);
6317 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6318 enum insn_code icode;
6319 enum machine_mode tmode, mode0;
6321 struct builtin_description *d;
6326 /* Syntax check for a 5-bit unsigned immediate. */
6329 case SPE_BUILTIN_EVSTDD:
6330 case SPE_BUILTIN_EVSTDH:
6331 case SPE_BUILTIN_EVSTDW:
6332 case SPE_BUILTIN_EVSTWHE:
6333 case SPE_BUILTIN_EVSTWHO:
6334 case SPE_BUILTIN_EVSTWWE:
6335 case SPE_BUILTIN_EVSTWWO:
6336 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6337 if (TREE_CODE (arg1) != INTEGER_CST
6338 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6340 error ("argument 2 must be a 5-bit unsigned literal");
6348 /* The evsplat*i instructions are not quite generic. */
6351 case SPE_BUILTIN_EVSPLATFI:
6352 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6354 case SPE_BUILTIN_EVSPLATI:
6355 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6361 d = (struct builtin_description *) bdesc_2arg_spe;
6362 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6363 if (d->code == fcode)
6364 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6366 d = (struct builtin_description *) bdesc_spe_predicates;
6367 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6368 if (d->code == fcode)
6369 return spe_expand_predicate_builtin (d->icode, arglist, target);
6371 d = (struct builtin_description *) bdesc_spe_evsel;
6372 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6373 if (d->code == fcode)
6374 return spe_expand_evsel_builtin (d->icode, arglist, target);
6378 case SPE_BUILTIN_EVSTDDX:
6379 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6380 case SPE_BUILTIN_EVSTDHX:
6381 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6382 case SPE_BUILTIN_EVSTDWX:
6383 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6384 case SPE_BUILTIN_EVSTWHEX:
6385 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6386 case SPE_BUILTIN_EVSTWHOX:
6387 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6388 case SPE_BUILTIN_EVSTWWEX:
6389 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6390 case SPE_BUILTIN_EVSTWWOX:
6391 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6392 case SPE_BUILTIN_EVSTDD:
6393 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6394 case SPE_BUILTIN_EVSTDH:
6395 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6396 case SPE_BUILTIN_EVSTDW:
6397 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6398 case SPE_BUILTIN_EVSTWHE:
6399 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6400 case SPE_BUILTIN_EVSTWHO:
6401 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6402 case SPE_BUILTIN_EVSTWWE:
6403 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6404 case SPE_BUILTIN_EVSTWWO:
6405 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6406 case SPE_BUILTIN_MFSPEFSCR:
6407 icode = CODE_FOR_spe_mfspefscr;
6408 tmode = insn_data[icode].operand[0].mode;
6411 || GET_MODE (target) != tmode
6412 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6413 target = gen_reg_rtx (tmode);
6415 pat = GEN_FCN (icode) (target);
6420 case SPE_BUILTIN_MTSPEFSCR:
6421 icode = CODE_FOR_spe_mtspefscr;
6422 arg0 = TREE_VALUE (arglist);
6423 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6424 mode0 = insn_data[icode].operand[0].mode;
6426 if (arg0 == error_mark_node)
6429 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6430 op0 = copy_to_mode_reg (mode0, op0);
6432 pat = GEN_FCN (icode) (op0);
6445 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6447 rtx pat, scratch, tmp;
6448 tree form = TREE_VALUE (arglist);
6449 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6450 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6451 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6452 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6453 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6454 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6458 if (TREE_CODE (form) != INTEGER_CST)
6460 error ("argument 1 of __builtin_spe_predicate must be a constant");
6464 form_int = TREE_INT_CST_LOW (form);
6469 if (arg0 == error_mark_node || arg1 == error_mark_node)
6473 || GET_MODE (target) != SImode
6474 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6475 target = gen_reg_rtx (SImode);
6477 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6478 op0 = copy_to_mode_reg (mode0, op0);
6479 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6480 op1 = copy_to_mode_reg (mode1, op1);
6482 scratch = gen_reg_rtx (CCmode);
6484 pat = GEN_FCN (icode) (scratch, op0, op1);
6489 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6490 _lower_. We use one compare, but look in different bits of the
6491 CR for each variant.
6493 There are 2 elements in each SPE simd type (upper/lower). The CR
6494 bits are set as follows:
6496 BIT0 | BIT 1 | BIT 2 | BIT 3
6497 U | L | (U | L) | (U & L)
6499 So, for an "all" relationship, BIT 3 would be set.
6500 For an "any" relationship, BIT 2 would be set. Etc.
6502 Following traditional nomenclature, these bits map to:
6504 BIT0 | BIT 1 | BIT 2 | BIT 3
6507 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6512 /* All variant. OV bit. */
6514 /* We need to get to the OV bit, which is the ORDERED bit. We
6515 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6516 that's ugly and will trigger a validate_condition_mode abort.
6517 So let's just use another pattern. */
6518 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6520 /* Any variant. EQ bit. */
6524 /* Upper variant. LT bit. */
6528 /* Lower variant. GT bit. */
6533 error ("argument 1 of __builtin_spe_predicate is out of range");
6537 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6538 emit_move_insn (target, tmp);
6543 /* The evsel builtins look like this:
6545 e = __builtin_spe_evsel_OP (a, b, c, d);
6549 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6550 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6554 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6557 tree arg0 = TREE_VALUE (arglist);
6558 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6559 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6560 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6561 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6562 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6563 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6564 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6565 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6566 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6571 if (arg0 == error_mark_node || arg1 == error_mark_node
6572 || arg2 == error_mark_node || arg3 == error_mark_node)
6576 || GET_MODE (target) != mode0
6577 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6578 target = gen_reg_rtx (mode0);
6580 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6581 op0 = copy_to_mode_reg (mode0, op0);
6582 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6583 op1 = copy_to_mode_reg (mode0, op1);
6584 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6585 op2 = copy_to_mode_reg (mode0, op2);
6586 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6587 op3 = copy_to_mode_reg (mode0, op3);
6589 /* Generate the compare. */
6590 scratch = gen_reg_rtx (CCmode);
6591 pat = GEN_FCN (icode) (scratch, op0, op1);
6596 if (mode0 == V2SImode)
6597 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6599 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6604 /* Expand an expression EXP that calls a built-in function,
6605 with result going to TARGET if that's convenient
6606 (and in mode MODE if that's convenient).
6607 SUBTARGET may be used as the target for computing one of EXP's operands.
6608 IGNORE is nonzero if the value is to be ignored. */
6611 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6612 enum machine_mode mode ATTRIBUTE_UNUSED,
6613 int ignore ATTRIBUTE_UNUSED)
6615 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6616 tree arglist = TREE_OPERAND (exp, 1);
6617 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6618 struct builtin_description *d;
6625 ret = altivec_expand_builtin (exp, target, &success);
6632 ret = spe_expand_builtin (exp, target, &success);
6638 if (TARGET_ALTIVEC || TARGET_SPE)
6640 /* Handle simple unary operations. */
6641 d = (struct builtin_description *) bdesc_1arg;
6642 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6643 if (d->code == fcode)
6644 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6646 /* Handle simple binary operations. */
6647 d = (struct builtin_description *) bdesc_2arg;
6648 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6649 if (d->code == fcode)
6650 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6652 /* Handle simple ternary operations. */
6653 d = (struct builtin_description *) bdesc_3arg;
6654 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6655 if (d->code == fcode)
6656 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6664 build_opaque_vector_type (tree node, int nunits)
6666 node = copy_node (node);
6667 TYPE_MAIN_VARIANT (node) = node;
6668 return build_vector_type (node, nunits);
6672 rs6000_init_builtins (void)
6674 V2SI_type_node = build_vector_type (intSI_type_node, 2);
6675 V2SF_type_node = build_vector_type (float_type_node, 2);
6676 V4HI_type_node = build_vector_type (intHI_type_node, 4);
6677 V4SI_type_node = build_vector_type (intSI_type_node, 4);
6678 V4SF_type_node = build_vector_type (float_type_node, 4);
6679 V8HI_type_node = build_vector_type (intHI_type_node, 8);
6680 V16QI_type_node = build_vector_type (intQI_type_node, 16);
6682 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
6683 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
6684 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
6686 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
6687 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6688 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6690 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
6691 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
6692 'vector unsigned short'. */
6694 bool_char_type_node = copy_node (unsigned_intQI_type_node);
6695 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
6696 bool_short_type_node = copy_node (unsigned_intHI_type_node);
6697 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
6698 bool_int_type_node = copy_node (unsigned_intSI_type_node);
6699 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
6700 pixel_type_node = copy_node (unsigned_intHI_type_node);
6701 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
6703 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6704 get_identifier ("__bool char"),
6705 bool_char_type_node));
6706 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6707 get_identifier ("__bool short"),
6708 bool_short_type_node));
6709 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6710 get_identifier ("__bool int"),
6711 bool_int_type_node));
6712 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6713 get_identifier ("__pixel"),
6716 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
6717 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
6718 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
6719 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
6721 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6722 get_identifier ("__vector unsigned char"),
6723 unsigned_V16QI_type_node));
6724 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6725 get_identifier ("__vector signed char"),
6727 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6728 get_identifier ("__vector __bool char"),
6729 bool_V16QI_type_node));
6731 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6732 get_identifier ("__vector unsigned short"),
6733 unsigned_V8HI_type_node));
6734 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6735 get_identifier ("__vector signed short"),
6737 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6738 get_identifier ("__vector __bool short"),
6739 bool_V8HI_type_node));
6741 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6742 get_identifier ("__vector unsigned int"),
6743 unsigned_V4SI_type_node));
6744 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6745 get_identifier ("__vector signed int"),
6747 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6748 get_identifier ("__vector __bool int"),
6749 bool_V4SI_type_node));
6751 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6752 get_identifier ("__vector float"),
6754 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6755 get_identifier ("__vector __pixel"),
6756 pixel_V8HI_type_node));
6759 spe_init_builtins ();
6761 altivec_init_builtins ();
6762 if (TARGET_ALTIVEC || TARGET_SPE)
6763 rs6000_common_init_builtins ();
6766 /* Search through a set of builtins and enable the mask bits.
6767 DESC is an array of builtins.
6768 SIZE is the total number of builtins.
6769 START is the builtin enum at which to start.
6770 END is the builtin enum at which to end. */
6772 enable_mask_for_builtins (struct builtin_description *desc, int size,
6773 enum rs6000_builtins start,
6774 enum rs6000_builtins end)
6778 for (i = 0; i < size; ++i)
6779 if (desc[i].code == start)
6785 for (; i < size; ++i)
6787 /* Flip all the bits on. */
6788 desc[i].mask = target_flags;
6789 if (desc[i].code == end)
6795 spe_init_builtins (void)
6797 tree endlink = void_list_node;
6798 tree puint_type_node = build_pointer_type (unsigned_type_node);
6799 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6800 struct builtin_description *d;
6803 tree v2si_ftype_4_v2si
6804 = build_function_type
6805 (opaque_V2SI_type_node,
6806 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6807 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6808 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6809 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6812 tree v2sf_ftype_4_v2sf
6813 = build_function_type
6814 (opaque_V2SF_type_node,
6815 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6816 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6817 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6818 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6821 tree int_ftype_int_v2si_v2si
6822 = build_function_type
6824 tree_cons (NULL_TREE, integer_type_node,
6825 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6826 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6829 tree int_ftype_int_v2sf_v2sf
6830 = build_function_type
6832 tree_cons (NULL_TREE, integer_type_node,
6833 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6834 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6837 tree void_ftype_v2si_puint_int
6838 = build_function_type (void_type_node,
6839 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6840 tree_cons (NULL_TREE, puint_type_node,
6841 tree_cons (NULL_TREE,
6845 tree void_ftype_v2si_puint_char
6846 = build_function_type (void_type_node,
6847 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6848 tree_cons (NULL_TREE, puint_type_node,
6849 tree_cons (NULL_TREE,
6853 tree void_ftype_v2si_pv2si_int
6854 = build_function_type (void_type_node,
6855 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6856 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6857 tree_cons (NULL_TREE,
6861 tree void_ftype_v2si_pv2si_char
6862 = build_function_type (void_type_node,
6863 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6864 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6865 tree_cons (NULL_TREE,
6870 = build_function_type (void_type_node,
6871 tree_cons (NULL_TREE, integer_type_node, endlink));
6874 = build_function_type (integer_type_node, endlink);
6876 tree v2si_ftype_pv2si_int
6877 = build_function_type (opaque_V2SI_type_node,
6878 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6879 tree_cons (NULL_TREE, integer_type_node,
6882 tree v2si_ftype_puint_int
6883 = build_function_type (opaque_V2SI_type_node,
6884 tree_cons (NULL_TREE, puint_type_node,
6885 tree_cons (NULL_TREE, integer_type_node,
6888 tree v2si_ftype_pushort_int
6889 = build_function_type (opaque_V2SI_type_node,
6890 tree_cons (NULL_TREE, pushort_type_node,
6891 tree_cons (NULL_TREE, integer_type_node,
6894 tree v2si_ftype_signed_char
6895 = build_function_type (opaque_V2SI_type_node,
6896 tree_cons (NULL_TREE, signed_char_type_node,
6899 /* The initialization of the simple binary and unary builtins is
6900 done in rs6000_common_init_builtins, but we have to enable the
6901 mask bits here manually because we have run out of `target_flags'
6902 bits. We really need to redesign this mask business. */
6904 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6905 ARRAY_SIZE (bdesc_2arg),
6908 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6909 ARRAY_SIZE (bdesc_1arg),
6911 SPE_BUILTIN_EVSUBFUSIAAW);
6912 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6913 ARRAY_SIZE (bdesc_spe_predicates),
6914 SPE_BUILTIN_EVCMPEQ,
6915 SPE_BUILTIN_EVFSTSTLT);
6916 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6917 ARRAY_SIZE (bdesc_spe_evsel),
6918 SPE_BUILTIN_EVSEL_CMPGTS,
6919 SPE_BUILTIN_EVSEL_FSTSTEQ);
6921 (*lang_hooks.decls.pushdecl)
6922 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6923 opaque_V2SI_type_node));
6925 /* Initialize irregular SPE builtins. */
6927 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6928 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6929 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6930 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6931 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6932 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6933 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6934 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6935 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6936 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6937 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6938 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6939 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6940 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6941 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6942 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6943 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6944 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6947 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6948 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6949 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6950 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6951 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6952 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6953 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6954 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6955 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6956 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6957 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6958 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6959 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6960 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6961 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6962 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6963 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6964 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6965 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6966 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6967 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6968 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6971 d = (struct builtin_description *) bdesc_spe_predicates;
6972 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6976 switch (insn_data[d->icode].operand[1].mode)
6979 type = int_ftype_int_v2si_v2si;
6982 type = int_ftype_int_v2sf_v2sf;
6988 def_builtin (d->mask, d->name, type, d->code);
6991 /* Evsel predicates. */
6992 d = (struct builtin_description *) bdesc_spe_evsel;
6993 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6997 switch (insn_data[d->icode].operand[1].mode)
7000 type = v2si_ftype_4_v2si;
7003 type = v2sf_ftype_4_v2sf;
7009 def_builtin (d->mask, d->name, type, d->code);
7014 altivec_init_builtins (void)
7016 struct builtin_description *d;
7017 struct builtin_description_predicates *dp;
7019 tree pfloat_type_node = build_pointer_type (float_type_node);
7020 tree pint_type_node = build_pointer_type (integer_type_node);
7021 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7022 tree pchar_type_node = build_pointer_type (char_type_node);
7024 tree pvoid_type_node = build_pointer_type (void_type_node);
7026 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7027 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7028 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7029 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7031 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7033 tree int_ftype_int_v4si_v4si
7034 = build_function_type_list (integer_type_node,
7035 integer_type_node, V4SI_type_node,
7036 V4SI_type_node, NULL_TREE);
7037 tree v4sf_ftype_pcfloat
7038 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7039 tree void_ftype_pfloat_v4sf
7040 = build_function_type_list (void_type_node,
7041 pfloat_type_node, V4SF_type_node, NULL_TREE);
7042 tree v4si_ftype_pcint
7043 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7044 tree void_ftype_pint_v4si
7045 = build_function_type_list (void_type_node,
7046 pint_type_node, V4SI_type_node, NULL_TREE);
7047 tree v8hi_ftype_pcshort
7048 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7049 tree void_ftype_pshort_v8hi
7050 = build_function_type_list (void_type_node,
7051 pshort_type_node, V8HI_type_node, NULL_TREE);
7052 tree v16qi_ftype_pcchar
7053 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7054 tree void_ftype_pchar_v16qi
7055 = build_function_type_list (void_type_node,
7056 pchar_type_node, V16QI_type_node, NULL_TREE);
7057 tree void_ftype_v4si
7058 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7059 tree v8hi_ftype_void
7060 = build_function_type (V8HI_type_node, void_list_node);
7061 tree void_ftype_void
7062 = build_function_type (void_type_node, void_list_node);
7064 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
7066 tree v16qi_ftype_long_pcvoid
7067 = build_function_type_list (V16QI_type_node,
7068 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7069 tree v8hi_ftype_long_pcvoid
7070 = build_function_type_list (V8HI_type_node,
7071 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7072 tree v4si_ftype_long_pcvoid
7073 = build_function_type_list (V4SI_type_node,
7074 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7076 tree void_ftype_v4si_long_pvoid
7077 = build_function_type_list (void_type_node,
7078 V4SI_type_node, long_integer_type_node,
7079 pvoid_type_node, NULL_TREE);
7080 tree void_ftype_v16qi_long_pvoid
7081 = build_function_type_list (void_type_node,
7082 V16QI_type_node, long_integer_type_node,
7083 pvoid_type_node, NULL_TREE);
7084 tree void_ftype_v8hi_long_pvoid
7085 = build_function_type_list (void_type_node,
7086 V8HI_type_node, long_integer_type_node,
7087 pvoid_type_node, NULL_TREE);
7088 tree int_ftype_int_v8hi_v8hi
7089 = build_function_type_list (integer_type_node,
7090 integer_type_node, V8HI_type_node,
7091 V8HI_type_node, NULL_TREE);
7092 tree int_ftype_int_v16qi_v16qi
7093 = build_function_type_list (integer_type_node,
7094 integer_type_node, V16QI_type_node,
7095 V16QI_type_node, NULL_TREE);
7096 tree int_ftype_int_v4sf_v4sf
7097 = build_function_type_list (integer_type_node,
7098 integer_type_node, V4SF_type_node,
7099 V4SF_type_node, NULL_TREE);
7100 tree v4si_ftype_v4si
7101 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7102 tree v8hi_ftype_v8hi
7103 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7104 tree v16qi_ftype_v16qi
7105 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7106 tree v4sf_ftype_v4sf
7107 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7108 tree void_ftype_pcvoid_int_int
7109 = build_function_type_list (void_type_node,
7110 pcvoid_type_node, integer_type_node,
7111 integer_type_node, NULL_TREE);
7112 tree int_ftype_pcchar
7113 = build_function_type_list (integer_type_node,
7114 pcchar_type_node, NULL_TREE);
7116 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7117 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7118 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7119 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7120 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7121 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7122 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7123 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7124 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7125 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7126 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7127 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7128 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7129 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7130 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7131 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7132 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7133 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7134 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7135 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
7136 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7137 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7138 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7139 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7140 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7141 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7142 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7143 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7144 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7145 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7146 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7147 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7149 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7150 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7151 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7153 /* Add the DST variants. */
7154 d = (struct builtin_description *) bdesc_dst;
7155 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7156 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7158 /* Initialize the predicates. */
7159 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7160 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7162 enum machine_mode mode1;
7165 mode1 = insn_data[dp->icode].operand[1].mode;
7170 type = int_ftype_int_v4si_v4si;
7173 type = int_ftype_int_v8hi_v8hi;
7176 type = int_ftype_int_v16qi_v16qi;
7179 type = int_ftype_int_v4sf_v4sf;
7185 def_builtin (dp->mask, dp->name, type, dp->code);
7188 /* Initialize the abs* operators. */
7189 d = (struct builtin_description *) bdesc_abs;
7190 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7192 enum machine_mode mode0;
7195 mode0 = insn_data[d->icode].operand[0].mode;
7200 type = v4si_ftype_v4si;
7203 type = v8hi_ftype_v8hi;
7206 type = v16qi_ftype_v16qi;
7209 type = v4sf_ftype_v4sf;
7215 def_builtin (d->mask, d->name, type, d->code);
7220 rs6000_common_init_builtins (void)
7222 struct builtin_description *d;
7225 tree v4sf_ftype_v4sf_v4sf_v16qi
7226 = build_function_type_list (V4SF_type_node,
7227 V4SF_type_node, V4SF_type_node,
7228 V16QI_type_node, NULL_TREE);
7229 tree v4si_ftype_v4si_v4si_v16qi
7230 = build_function_type_list (V4SI_type_node,
7231 V4SI_type_node, V4SI_type_node,
7232 V16QI_type_node, NULL_TREE);
7233 tree v8hi_ftype_v8hi_v8hi_v16qi
7234 = build_function_type_list (V8HI_type_node,
7235 V8HI_type_node, V8HI_type_node,
7236 V16QI_type_node, NULL_TREE);
7237 tree v16qi_ftype_v16qi_v16qi_v16qi
7238 = build_function_type_list (V16QI_type_node,
7239 V16QI_type_node, V16QI_type_node,
7240 V16QI_type_node, NULL_TREE);
7242 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7244 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7245 tree v16qi_ftype_int
7246 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7247 tree v8hi_ftype_v16qi
7248 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7249 tree v4sf_ftype_v4sf
7250 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7252 tree v2si_ftype_v2si_v2si
7253 = build_function_type_list (opaque_V2SI_type_node,
7254 opaque_V2SI_type_node,
7255 opaque_V2SI_type_node, NULL_TREE);
7257 tree v2sf_ftype_v2sf_v2sf
7258 = build_function_type_list (opaque_V2SF_type_node,
7259 opaque_V2SF_type_node,
7260 opaque_V2SF_type_node, NULL_TREE);
7262 tree v2si_ftype_int_int
7263 = build_function_type_list (opaque_V2SI_type_node,
7264 integer_type_node, integer_type_node,
7267 tree v2si_ftype_v2si
7268 = build_function_type_list (opaque_V2SI_type_node,
7269 opaque_V2SI_type_node, NULL_TREE);
7271 tree v2sf_ftype_v2sf
7272 = build_function_type_list (opaque_V2SF_type_node,
7273 opaque_V2SF_type_node, NULL_TREE);
7275 tree v2sf_ftype_v2si
7276 = build_function_type_list (opaque_V2SF_type_node,
7277 opaque_V2SI_type_node, NULL_TREE);
7279 tree v2si_ftype_v2sf
7280 = build_function_type_list (opaque_V2SI_type_node,
7281 opaque_V2SF_type_node, NULL_TREE);
7283 tree v2si_ftype_v2si_char
7284 = build_function_type_list (opaque_V2SI_type_node,
7285 opaque_V2SI_type_node,
7286 char_type_node, NULL_TREE);
7288 tree v2si_ftype_int_char
7289 = build_function_type_list (opaque_V2SI_type_node,
7290 integer_type_node, char_type_node, NULL_TREE);
7292 tree v2si_ftype_char
7293 = build_function_type_list (opaque_V2SI_type_node,
7294 char_type_node, NULL_TREE);
7296 tree int_ftype_int_int
7297 = build_function_type_list (integer_type_node,
7298 integer_type_node, integer_type_node,
7301 tree v4si_ftype_v4si_v4si
7302 = build_function_type_list (V4SI_type_node,
7303 V4SI_type_node, V4SI_type_node, NULL_TREE);
7304 tree v4sf_ftype_v4si_int
7305 = build_function_type_list (V4SF_type_node,
7306 V4SI_type_node, integer_type_node, NULL_TREE);
7307 tree v4si_ftype_v4sf_int
7308 = build_function_type_list (V4SI_type_node,
7309 V4SF_type_node, integer_type_node, NULL_TREE);
7310 tree v4si_ftype_v4si_int
7311 = build_function_type_list (V4SI_type_node,
7312 V4SI_type_node, integer_type_node, NULL_TREE);
7313 tree v8hi_ftype_v8hi_int
7314 = build_function_type_list (V8HI_type_node,
7315 V8HI_type_node, integer_type_node, NULL_TREE);
7316 tree v16qi_ftype_v16qi_int
7317 = build_function_type_list (V16QI_type_node,
7318 V16QI_type_node, integer_type_node, NULL_TREE);
7319 tree v16qi_ftype_v16qi_v16qi_int
7320 = build_function_type_list (V16QI_type_node,
7321 V16QI_type_node, V16QI_type_node,
7322 integer_type_node, NULL_TREE);
7323 tree v8hi_ftype_v8hi_v8hi_int
7324 = build_function_type_list (V8HI_type_node,
7325 V8HI_type_node, V8HI_type_node,
7326 integer_type_node, NULL_TREE);
7327 tree v4si_ftype_v4si_v4si_int
7328 = build_function_type_list (V4SI_type_node,
7329 V4SI_type_node, V4SI_type_node,
7330 integer_type_node, NULL_TREE);
7331 tree v4sf_ftype_v4sf_v4sf_int
7332 = build_function_type_list (V4SF_type_node,
7333 V4SF_type_node, V4SF_type_node,
7334 integer_type_node, NULL_TREE);
7335 tree v4sf_ftype_v4sf_v4sf
7336 = build_function_type_list (V4SF_type_node,
7337 V4SF_type_node, V4SF_type_node, NULL_TREE);
7338 tree v4sf_ftype_v4sf_v4sf_v4si
7339 = build_function_type_list (V4SF_type_node,
7340 V4SF_type_node, V4SF_type_node,
7341 V4SI_type_node, NULL_TREE);
7342 tree v4sf_ftype_v4sf_v4sf_v4sf
7343 = build_function_type_list (V4SF_type_node,
7344 V4SF_type_node, V4SF_type_node,
7345 V4SF_type_node, NULL_TREE);
7346 tree v4si_ftype_v4si_v4si_v4si
7347 = build_function_type_list (V4SI_type_node,
7348 V4SI_type_node, V4SI_type_node,
7349 V4SI_type_node, NULL_TREE);
7350 tree v8hi_ftype_v8hi_v8hi
7351 = build_function_type_list (V8HI_type_node,
7352 V8HI_type_node, V8HI_type_node, NULL_TREE);
7353 tree v8hi_ftype_v8hi_v8hi_v8hi
7354 = build_function_type_list (V8HI_type_node,
7355 V8HI_type_node, V8HI_type_node,
7356 V8HI_type_node, NULL_TREE);
7357 tree v4si_ftype_v8hi_v8hi_v4si
7358 = build_function_type_list (V4SI_type_node,
7359 V8HI_type_node, V8HI_type_node,
7360 V4SI_type_node, NULL_TREE);
7361 tree v4si_ftype_v16qi_v16qi_v4si
7362 = build_function_type_list (V4SI_type_node,
7363 V16QI_type_node, V16QI_type_node,
7364 V4SI_type_node, NULL_TREE);
7365 tree v16qi_ftype_v16qi_v16qi
7366 = build_function_type_list (V16QI_type_node,
7367 V16QI_type_node, V16QI_type_node, NULL_TREE);
7368 tree v4si_ftype_v4sf_v4sf
7369 = build_function_type_list (V4SI_type_node,
7370 V4SF_type_node, V4SF_type_node, NULL_TREE);
7371 tree v8hi_ftype_v16qi_v16qi
7372 = build_function_type_list (V8HI_type_node,
7373 V16QI_type_node, V16QI_type_node, NULL_TREE);
7374 tree v4si_ftype_v8hi_v8hi
7375 = build_function_type_list (V4SI_type_node,
7376 V8HI_type_node, V8HI_type_node, NULL_TREE);
7377 tree v8hi_ftype_v4si_v4si
7378 = build_function_type_list (V8HI_type_node,
7379 V4SI_type_node, V4SI_type_node, NULL_TREE);
7380 tree v16qi_ftype_v8hi_v8hi
7381 = build_function_type_list (V16QI_type_node,
7382 V8HI_type_node, V8HI_type_node, NULL_TREE);
7383 tree v4si_ftype_v16qi_v4si
7384 = build_function_type_list (V4SI_type_node,
7385 V16QI_type_node, V4SI_type_node, NULL_TREE);
7386 tree v4si_ftype_v16qi_v16qi
7387 = build_function_type_list (V4SI_type_node,
7388 V16QI_type_node, V16QI_type_node, NULL_TREE);
7389 tree v4si_ftype_v8hi_v4si
7390 = build_function_type_list (V4SI_type_node,
7391 V8HI_type_node, V4SI_type_node, NULL_TREE);
7392 tree v4si_ftype_v8hi
7393 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7394 tree int_ftype_v4si_v4si
7395 = build_function_type_list (integer_type_node,
7396 V4SI_type_node, V4SI_type_node, NULL_TREE);
7397 tree int_ftype_v4sf_v4sf
7398 = build_function_type_list (integer_type_node,
7399 V4SF_type_node, V4SF_type_node, NULL_TREE);
7400 tree int_ftype_v16qi_v16qi
7401 = build_function_type_list (integer_type_node,
7402 V16QI_type_node, V16QI_type_node, NULL_TREE);
7403 tree int_ftype_v8hi_v8hi
7404 = build_function_type_list (integer_type_node,
7405 V8HI_type_node, V8HI_type_node, NULL_TREE);
7407 /* Add the simple ternary operators. */
7408 d = (struct builtin_description *) bdesc_3arg;
7409 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7412 enum machine_mode mode0, mode1, mode2, mode3;
7415 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7418 mode0 = insn_data[d->icode].operand[0].mode;
7419 mode1 = insn_data[d->icode].operand[1].mode;
7420 mode2 = insn_data[d->icode].operand[2].mode;
7421 mode3 = insn_data[d->icode].operand[3].mode;
7423 /* When all four are of the same mode. */
7424 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7429 type = v4si_ftype_v4si_v4si_v4si;
7432 type = v4sf_ftype_v4sf_v4sf_v4sf;
7435 type = v8hi_ftype_v8hi_v8hi_v8hi;
7438 type = v16qi_ftype_v16qi_v16qi_v16qi;
7444 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7449 type = v4si_ftype_v4si_v4si_v16qi;
7452 type = v4sf_ftype_v4sf_v4sf_v16qi;
7455 type = v8hi_ftype_v8hi_v8hi_v16qi;
7458 type = v16qi_ftype_v16qi_v16qi_v16qi;
7464 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7465 && mode3 == V4SImode)
7466 type = v4si_ftype_v16qi_v16qi_v4si;
7467 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7468 && mode3 == V4SImode)
7469 type = v4si_ftype_v8hi_v8hi_v4si;
7470 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7471 && mode3 == V4SImode)
7472 type = v4sf_ftype_v4sf_v4sf_v4si;
7474 /* vchar, vchar, vchar, 4 bit literal. */
7475 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7477 type = v16qi_ftype_v16qi_v16qi_int;
7479 /* vshort, vshort, vshort, 4 bit literal. */
7480 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7482 type = v8hi_ftype_v8hi_v8hi_int;
7484 /* vint, vint, vint, 4 bit literal. */
7485 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7487 type = v4si_ftype_v4si_v4si_int;
7489 /* vfloat, vfloat, vfloat, 4 bit literal. */
7490 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7492 type = v4sf_ftype_v4sf_v4sf_int;
7497 def_builtin (d->mask, d->name, type, d->code);
7500 /* Add the simple binary operators. */
7501 d = (struct builtin_description *) bdesc_2arg;
7502 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7504 enum machine_mode mode0, mode1, mode2;
7507 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7510 mode0 = insn_data[d->icode].operand[0].mode;
7511 mode1 = insn_data[d->icode].operand[1].mode;
7512 mode2 = insn_data[d->icode].operand[2].mode;
7514 /* When all three operands are of the same mode. */
7515 if (mode0 == mode1 && mode1 == mode2)
7520 type = v4sf_ftype_v4sf_v4sf;
7523 type = v4si_ftype_v4si_v4si;
7526 type = v16qi_ftype_v16qi_v16qi;
7529 type = v8hi_ftype_v8hi_v8hi;
7532 type = v2si_ftype_v2si_v2si;
7535 type = v2sf_ftype_v2sf_v2sf;
7538 type = int_ftype_int_int;
7545 /* A few other combos we really don't want to do manually. */
7547 /* vint, vfloat, vfloat. */
7548 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7549 type = v4si_ftype_v4sf_v4sf;
7551 /* vshort, vchar, vchar. */
7552 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7553 type = v8hi_ftype_v16qi_v16qi;
7555 /* vint, vshort, vshort. */
7556 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7557 type = v4si_ftype_v8hi_v8hi;
7559 /* vshort, vint, vint. */
7560 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7561 type = v8hi_ftype_v4si_v4si;
7563 /* vchar, vshort, vshort. */
7564 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7565 type = v16qi_ftype_v8hi_v8hi;
7567 /* vint, vchar, vint. */
7568 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7569 type = v4si_ftype_v16qi_v4si;
7571 /* vint, vchar, vchar. */
7572 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7573 type = v4si_ftype_v16qi_v16qi;
7575 /* vint, vshort, vint. */
7576 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7577 type = v4si_ftype_v8hi_v4si;
7579 /* vint, vint, 5 bit literal. */
7580 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7581 type = v4si_ftype_v4si_int;
7583 /* vshort, vshort, 5 bit literal. */
7584 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7585 type = v8hi_ftype_v8hi_int;
7587 /* vchar, vchar, 5 bit literal. */
7588 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7589 type = v16qi_ftype_v16qi_int;
7591 /* vfloat, vint, 5 bit literal. */
7592 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7593 type = v4sf_ftype_v4si_int;
7595 /* vint, vfloat, 5 bit literal. */
7596 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7597 type = v4si_ftype_v4sf_int;
7599 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7600 type = v2si_ftype_int_int;
7602 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7603 type = v2si_ftype_v2si_char;
7605 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7606 type = v2si_ftype_int_char;
7609 else if (mode0 == SImode)
7614 type = int_ftype_v4si_v4si;
7617 type = int_ftype_v4sf_v4sf;
7620 type = int_ftype_v16qi_v16qi;
7623 type = int_ftype_v8hi_v8hi;
7633 def_builtin (d->mask, d->name, type, d->code);
7636 /* Add the simple unary operators. */
7637 d = (struct builtin_description *) bdesc_1arg;
7638 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7640 enum machine_mode mode0, mode1;
7643 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7646 mode0 = insn_data[d->icode].operand[0].mode;
7647 mode1 = insn_data[d->icode].operand[1].mode;
7649 if (mode0 == V4SImode && mode1 == QImode)
7650 type = v4si_ftype_int;
7651 else if (mode0 == V8HImode && mode1 == QImode)
7652 type = v8hi_ftype_int;
7653 else if (mode0 == V16QImode && mode1 == QImode)
7654 type = v16qi_ftype_int;
7655 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7656 type = v4sf_ftype_v4sf;
7657 else if (mode0 == V8HImode && mode1 == V16QImode)
7658 type = v8hi_ftype_v16qi;
7659 else if (mode0 == V4SImode && mode1 == V8HImode)
7660 type = v4si_ftype_v8hi;
7661 else if (mode0 == V2SImode && mode1 == V2SImode)
7662 type = v2si_ftype_v2si;
7663 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7664 type = v2sf_ftype_v2sf;
7665 else if (mode0 == V2SFmode && mode1 == V2SImode)
7666 type = v2sf_ftype_v2si;
7667 else if (mode0 == V2SImode && mode1 == V2SFmode)
7668 type = v2si_ftype_v2sf;
7669 else if (mode0 == V2SImode && mode1 == QImode)
7670 type = v2si_ftype_char;
7674 def_builtin (d->mask, d->name, type, d->code);
7679 rs6000_init_libfuncs (void)
7681 if (!TARGET_HARD_FLOAT)
7684 if (DEFAULT_ABI != ABI_V4)
7686 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7688 /* AIX library routines for float->int conversion. */
7689 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7690 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7691 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7692 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7695 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7696 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7697 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7698 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7699 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7703 /* 32-bit SVR4 quad floating point routines. */
7705 set_optab_libfunc (add_optab, TFmode, "_q_add");
7706 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7707 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7708 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7709 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7710 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7711 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7713 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7714 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7715 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7716 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7717 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7718 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7720 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7721 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7722 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7723 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7724 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7725 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7726 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7730 /* Expand a block move operation, and return 1 if successful. Return 0
7731 if we should let the compiler generate normal code.
7733 operands[0] is the destination
7734 operands[1] is the source
7735 operands[2] is the length
7736 operands[3] is the alignment */
7738 #define MAX_MOVE_REG 4
7741 expand_block_move (rtx operands[])
7743 rtx orig_dest = operands[0];
7744 rtx orig_src = operands[1];
7745 rtx bytes_rtx = operands[2];
7746 rtx align_rtx = operands[3];
7747 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7752 rtx stores[MAX_MOVE_REG];
7755 /* If this is not a fixed size move, just call memcpy */
7759 /* If this is not a fixed size alignment, abort */
7760 if (GET_CODE (align_rtx) != CONST_INT)
7762 align = INTVAL (align_rtx);
7764 /* Anything to move? */
7765 bytes = INTVAL (bytes_rtx);
7769 /* store_one_arg depends on expand_block_move to handle at least the size of
7770 reg_parm_stack_space. */
7771 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7774 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7777 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7778 rtx (*mov) (rtx, rtx);
7780 enum machine_mode mode = BLKmode;
7784 && bytes > 24 /* move up to 32 bytes at a time */
7792 && ! fixed_regs[12])
7794 move_bytes = (bytes > 32) ? 32 : bytes;
7795 gen_func.movstrsi = gen_movstrsi_8reg;
7797 else if (TARGET_STRING
7798 && bytes > 16 /* move up to 24 bytes at a time */
7804 && ! fixed_regs[10])
7806 move_bytes = (bytes > 24) ? 24 : bytes;
7807 gen_func.movstrsi = gen_movstrsi_6reg;
7809 else if (TARGET_STRING
7810 && bytes > 8 /* move up to 16 bytes at a time */
7816 move_bytes = (bytes > 16) ? 16 : bytes;
7817 gen_func.movstrsi = gen_movstrsi_4reg;
7819 else if (bytes >= 8 && TARGET_POWERPC64
7820 /* 64-bit loads and stores require word-aligned
7822 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7826 gen_func.mov = gen_movdi;
7828 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7829 { /* move up to 8 bytes at a time */
7830 move_bytes = (bytes > 8) ? 8 : bytes;
7831 gen_func.movstrsi = gen_movstrsi_2reg;
7833 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7834 { /* move 4 bytes */
7837 gen_func.mov = gen_movsi;
7839 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7840 { /* move 2 bytes */
7843 gen_func.mov = gen_movhi;
7845 else if (TARGET_STRING && bytes > 1)
7846 { /* move up to 4 bytes at a time */
7847 move_bytes = (bytes > 4) ? 4 : bytes;
7848 gen_func.movstrsi = gen_movstrsi_1reg;
7850 else /* move 1 byte at a time */
7854 gen_func.mov = gen_movqi;
7857 src = adjust_address (orig_src, mode, offset);
7858 dest = adjust_address (orig_dest, mode, offset);
7860 if (mode != BLKmode)
7862 rtx tmp_reg = gen_reg_rtx (mode);
7864 emit_insn ((*gen_func.mov) (tmp_reg, src));
7865 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7868 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7871 for (i = 0; i < num_reg; i++)
7872 emit_insn (stores[i]);
7876 if (mode == BLKmode)
7878 /* Move the address into scratch registers. The movstrsi
7879 patterns require zero offset. */
7880 if (!REG_P (XEXP (src, 0)))
7882 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7883 src = replace_equiv_address (src, src_reg);
7885 set_mem_size (src, GEN_INT (move_bytes));
7887 if (!REG_P (XEXP (dest, 0)))
7889 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7890 dest = replace_equiv_address (dest, dest_reg);
7892 set_mem_size (dest, GEN_INT (move_bytes));
7894 emit_insn ((*gen_func.movstrsi) (dest, src,
7895 GEN_INT (move_bytes & 31),
7904 /* Return 1 if OP is a load multiple operation. It is known to be a
7905 PARALLEL and the first section will be tested. */
7908 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7910 int count = XVECLEN (op, 0);
7911 unsigned int dest_regno;
7915 /* Perform a quick check so we don't blow up below. */
7917 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7918 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7919 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7922 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7923 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7925 for (i = 1; i < count; i++)
7927 rtx elt = XVECEXP (op, 0, i);
7929 if (GET_CODE (elt) != SET
7930 || GET_CODE (SET_DEST (elt)) != REG
7931 || GET_MODE (SET_DEST (elt)) != SImode
7932 || REGNO (SET_DEST (elt)) != dest_regno + i
7933 || GET_CODE (SET_SRC (elt)) != MEM
7934 || GET_MODE (SET_SRC (elt)) != SImode
7935 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7936 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7937 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7938 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7945 /* Similar, but tests for store multiple. Here, the second vector element
7946 is a CLOBBER. It will be tested later. */
7949 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7951 int count = XVECLEN (op, 0) - 1;
7952 unsigned int src_regno;
7956 /* Perform a quick check so we don't blow up below. */
7958 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7959 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7960 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7963 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7964 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7966 for (i = 1; i < count; i++)
7968 rtx elt = XVECEXP (op, 0, i + 1);
7970 if (GET_CODE (elt) != SET
7971 || GET_CODE (SET_SRC (elt)) != REG
7972 || GET_MODE (SET_SRC (elt)) != SImode
7973 || REGNO (SET_SRC (elt)) != src_regno + i
7974 || GET_CODE (SET_DEST (elt)) != MEM
7975 || GET_MODE (SET_DEST (elt)) != SImode
7976 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7977 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7978 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7979 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7986 /* Return a string to perform a load_multiple operation.
7987 operands[0] is the vector.
7988 operands[1] is the source address.
7989 operands[2] is the first destination register. */
7992 rs6000_output_load_multiple (rtx operands[3])
7994 /* We have to handle the case where the pseudo used to contain the address
7995 is assigned to one of the output registers. */
7997 int words = XVECLEN (operands[0], 0);
8000 if (XVECLEN (operands[0], 0) == 1)
8001 return "{l|lwz} %2,0(%1)";
8003 for (i = 0; i < words; i++)
8004 if (refers_to_regno_p (REGNO (operands[2]) + i,
8005 REGNO (operands[2]) + i + 1, operands[1], 0))
8009 xop[0] = GEN_INT (4 * (words-1));
8010 xop[1] = operands[1];
8011 xop[2] = operands[2];
8012 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8017 xop[0] = GEN_INT (4 * (words-1));
8018 xop[1] = operands[1];
8019 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8020 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8025 for (j = 0; j < words; j++)
8028 xop[0] = GEN_INT (j * 4);
8029 xop[1] = operands[1];
8030 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8031 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8033 xop[0] = GEN_INT (i * 4);
8034 xop[1] = operands[1];
8035 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8040 return "{lsi|lswi} %2,%1,%N0";
8043 /* Return 1 for a parallel vrsave operation. */
8046 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8048 int count = XVECLEN (op, 0);
8049 unsigned int dest_regno, src_regno;
8053 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8054 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8055 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8058 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8059 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8061 if (dest_regno != VRSAVE_REGNO
8062 && src_regno != VRSAVE_REGNO)
8065 for (i = 1; i < count; i++)
8067 rtx elt = XVECEXP (op, 0, i);
8069 if (GET_CODE (elt) != CLOBBER
8070 && GET_CODE (elt) != SET)
8077 /* Return 1 for an PARALLEL suitable for mfcr. */
8080 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8082 int count = XVECLEN (op, 0);
8085 /* Perform a quick check so we don't blow up below. */
8087 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8088 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8089 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8092 for (i = 0; i < count; i++)
8094 rtx exp = XVECEXP (op, 0, i);
8099 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8101 if (GET_CODE (src_reg) != REG
8102 || GET_MODE (src_reg) != CCmode
8103 || ! CR_REGNO_P (REGNO (src_reg)))
8106 if (GET_CODE (exp) != SET
8107 || GET_CODE (SET_DEST (exp)) != REG
8108 || GET_MODE (SET_DEST (exp)) != SImode
8109 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8111 unspec = SET_SRC (exp);
8112 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8114 if (GET_CODE (unspec) != UNSPEC
8115 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8116 || XVECLEN (unspec, 0) != 2
8117 || XVECEXP (unspec, 0, 0) != src_reg
8118 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8119 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8125 /* Return 1 for an PARALLEL suitable for mtcrf. */
8128 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8130 int count = XVECLEN (op, 0);
8134 /* Perform a quick check so we don't blow up below. */
8136 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8137 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8138 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8140 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8142 if (GET_CODE (src_reg) != REG
8143 || GET_MODE (src_reg) != SImode
8144 || ! INT_REGNO_P (REGNO (src_reg)))
8147 for (i = 0; i < count; i++)
8149 rtx exp = XVECEXP (op, 0, i);
8153 if (GET_CODE (exp) != SET
8154 || GET_CODE (SET_DEST (exp)) != REG
8155 || GET_MODE (SET_DEST (exp)) != CCmode
8156 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8158 unspec = SET_SRC (exp);
8159 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8161 if (GET_CODE (unspec) != UNSPEC
8162 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8163 || XVECLEN (unspec, 0) != 2
8164 || XVECEXP (unspec, 0, 0) != src_reg
8165 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8166 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8172 /* Return 1 for an PARALLEL suitable for lmw. */
8175 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8177 int count = XVECLEN (op, 0);
8178 unsigned int dest_regno;
8180 unsigned int base_regno;
8181 HOST_WIDE_INT offset;
8184 /* Perform a quick check so we don't blow up below. */
8186 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8187 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8188 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8191 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8192 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8195 || count != 32 - (int) dest_regno)
8198 if (legitimate_indirect_address_p (src_addr, 0))
8201 base_regno = REGNO (src_addr);
8202 if (base_regno == 0)
8205 else if (legitimate_offset_address_p (SImode, src_addr, 0))
8207 offset = INTVAL (XEXP (src_addr, 1));
8208 base_regno = REGNO (XEXP (src_addr, 0));
8213 for (i = 0; i < count; i++)
8215 rtx elt = XVECEXP (op, 0, i);
8218 HOST_WIDE_INT newoffset;
8220 if (GET_CODE (elt) != SET
8221 || GET_CODE (SET_DEST (elt)) != REG
8222 || GET_MODE (SET_DEST (elt)) != SImode
8223 || REGNO (SET_DEST (elt)) != dest_regno + i
8224 || GET_CODE (SET_SRC (elt)) != MEM
8225 || GET_MODE (SET_SRC (elt)) != SImode)
8227 newaddr = XEXP (SET_SRC (elt), 0);
8228 if (legitimate_indirect_address_p (newaddr, 0))
8233 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8235 addr_reg = XEXP (newaddr, 0);
8236 newoffset = INTVAL (XEXP (newaddr, 1));
8240 if (REGNO (addr_reg) != base_regno
8241 || newoffset != offset + 4 * i)
8248 /* Return 1 for an PARALLEL suitable for stmw. */
8251 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8253 int count = XVECLEN (op, 0);
8254 unsigned int src_regno;
8256 unsigned int base_regno;
8257 HOST_WIDE_INT offset;
8260 /* Perform a quick check so we don't blow up below. */
8262 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8263 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8264 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8267 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8268 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8271 || count != 32 - (int) src_regno)
8274 if (legitimate_indirect_address_p (dest_addr, 0))
8277 base_regno = REGNO (dest_addr);
8278 if (base_regno == 0)
8281 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8283 offset = INTVAL (XEXP (dest_addr, 1));
8284 base_regno = REGNO (XEXP (dest_addr, 0));
8289 for (i = 0; i < count; i++)
8291 rtx elt = XVECEXP (op, 0, i);
8294 HOST_WIDE_INT newoffset;
8296 if (GET_CODE (elt) != SET
8297 || GET_CODE (SET_SRC (elt)) != REG
8298 || GET_MODE (SET_SRC (elt)) != SImode
8299 || REGNO (SET_SRC (elt)) != src_regno + i
8300 || GET_CODE (SET_DEST (elt)) != MEM
8301 || GET_MODE (SET_DEST (elt)) != SImode)
8303 newaddr = XEXP (SET_DEST (elt), 0);
8304 if (legitimate_indirect_address_p (newaddr, 0))
8309 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8311 addr_reg = XEXP (newaddr, 0);
8312 newoffset = INTVAL (XEXP (newaddr, 1));
8316 if (REGNO (addr_reg) != base_regno
8317 || newoffset != offset + 4 * i)
8324 /* A validation routine: say whether CODE, a condition code, and MODE
8325 match. The other alternatives either don't make sense or should
8326 never be generated. */
8329 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8331 if ((GET_RTX_CLASS (code) != RTX_COMPARE
8332 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
8333 || GET_MODE_CLASS (mode) != MODE_CC)
8336 /* These don't make sense. */
8337 if ((code == GT || code == LT || code == GE || code == LE)
8338 && mode == CCUNSmode)
8341 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8342 && mode != CCUNSmode)
8345 if (mode != CCFPmode
8346 && (code == ORDERED || code == UNORDERED
8347 || code == UNEQ || code == LTGT
8348 || code == UNGT || code == UNLT
8349 || code == UNGE || code == UNLE))
8352 /* These should never be generated except for
8353 flag_finite_math_only. */
8354 if (mode == CCFPmode
8355 && ! flag_finite_math_only
8356 && (code == LE || code == GE
8357 || code == UNEQ || code == LTGT
8358 || code == UNGT || code == UNLT))
8361 /* These are invalid; the information is not there. */
8362 if (mode == CCEQmode
8363 && code != EQ && code != NE)
8367 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8368 We only check the opcode against the mode of the CC value here. */
8371 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8373 enum rtx_code code = GET_CODE (op);
8374 enum machine_mode cc_mode;
8376 if (!COMPARISON_P (op))
8379 cc_mode = GET_MODE (XEXP (op, 0));
8380 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8383 validate_condition_mode (code, cc_mode);
8388 /* Return 1 if OP is a comparison operation that is valid for a branch
8389 insn and which is true if the corresponding bit in the CC register
8393 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8397 if (! branch_comparison_operator (op, mode))
8400 code = GET_CODE (op);
8401 return (code == EQ || code == LT || code == GT
8402 || code == LTU || code == GTU
8403 || code == UNORDERED);
8406 /* Return 1 if OP is a comparison operation that is valid for an scc
8407 insn: it must be a positive comparison. */
8410 scc_comparison_operator (rtx op, enum machine_mode mode)
8412 return branch_positive_comparison_operator (op, mode);
8416 trap_comparison_operator (rtx op, enum machine_mode mode)
8418 if (mode != VOIDmode && mode != GET_MODE (op))
8420 return COMPARISON_P (op);
8424 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8426 enum rtx_code code = GET_CODE (op);
8427 return (code == AND || code == IOR || code == XOR);
8431 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8433 enum rtx_code code = GET_CODE (op);
8434 return (code == IOR || code == XOR);
8438 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8440 enum rtx_code code = GET_CODE (op);
8441 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8444 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8445 mask required to convert the result of a rotate insn into a shift
8446 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8449 includes_lshift_p (rtx shiftop, rtx andop)
8451 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8453 shift_mask <<= INTVAL (shiftop);
8455 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8458 /* Similar, but for right shift. */
8461 includes_rshift_p (rtx shiftop, rtx andop)
8463 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8465 shift_mask >>= INTVAL (shiftop);
8467 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8470 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8471 to perform a left shift. It must have exactly SHIFTOP least
8472 significant 0's, then one or more 1's, then zero or more 0's. */
8475 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8477 if (GET_CODE (andop) == CONST_INT)
8479 HOST_WIDE_INT c, lsb, shift_mask;
8482 if (c == 0 || c == ~0)
8486 shift_mask <<= INTVAL (shiftop);
8488 /* Find the least significant one bit. */
8491 /* It must coincide with the LSB of the shift mask. */
8492 if (-lsb != shift_mask)
8495 /* Invert to look for the next transition (if any). */
8498 /* Remove the low group of ones (originally low group of zeros). */
8501 /* Again find the lsb, and check we have all 1's above. */
8505 else if (GET_CODE (andop) == CONST_DOUBLE
8506 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8508 HOST_WIDE_INT low, high, lsb;
8509 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8511 low = CONST_DOUBLE_LOW (andop);
8512 if (HOST_BITS_PER_WIDE_INT < 64)
8513 high = CONST_DOUBLE_HIGH (andop);
8515 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8516 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8519 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8521 shift_mask_high = ~0;
8522 if (INTVAL (shiftop) > 32)
8523 shift_mask_high <<= INTVAL (shiftop) - 32;
8527 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8534 return high == -lsb;
8537 shift_mask_low = ~0;
8538 shift_mask_low <<= INTVAL (shiftop);
8542 if (-lsb != shift_mask_low)
8545 if (HOST_BITS_PER_WIDE_INT < 64)
8550 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8553 return high == -lsb;
8557 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8563 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8564 to perform a left shift. It must have SHIFTOP or more least
8565 significant 0's, with the remainder of the word 1's. */
8568 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8570 if (GET_CODE (andop) == CONST_INT)
8572 HOST_WIDE_INT c, lsb, shift_mask;
8575 shift_mask <<= INTVAL (shiftop);
8578 /* Find the least significant one bit. */
8581 /* It must be covered by the shift mask.
8582 This test also rejects c == 0. */
8583 if ((lsb & shift_mask) == 0)
8586 /* Check we have all 1's above the transition, and reject all 1's. */
8587 return c == -lsb && lsb != 1;
8589 else if (GET_CODE (andop) == CONST_DOUBLE
8590 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8592 HOST_WIDE_INT low, lsb, shift_mask_low;
8594 low = CONST_DOUBLE_LOW (andop);
8596 if (HOST_BITS_PER_WIDE_INT < 64)
8598 HOST_WIDE_INT high, shift_mask_high;
8600 high = CONST_DOUBLE_HIGH (andop);
8604 shift_mask_high = ~0;
8605 if (INTVAL (shiftop) > 32)
8606 shift_mask_high <<= INTVAL (shiftop) - 32;
8610 if ((lsb & shift_mask_high) == 0)
8613 return high == -lsb;
8619 shift_mask_low = ~0;
8620 shift_mask_low <<= INTVAL (shiftop);
8624 if ((lsb & shift_mask_low) == 0)
8627 return low == -lsb && lsb != 1;
8633 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8634 for lfq and stfq insns iff the registers are hard registers. */
8637 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8639 /* We might have been passed a SUBREG. */
8640 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8643 /* We might have been passed non floating point registers. */
8644 if (!FP_REGNO_P (REGNO (reg1))
8645 || !FP_REGNO_P (REGNO (reg2)))
8648 return (REGNO (reg1) == REGNO (reg2) - 1);
8651 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8652 addr1 and addr2 must be in consecutive memory locations
8653 (addr2 == addr1 + 8). */
8656 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
8662 /* The mems cannot be volatile. */
8663 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
8666 addr1 = XEXP (mem1, 0);
8667 addr2 = XEXP (mem2, 0);
8669 /* Extract an offset (if used) from the first addr. */
8670 if (GET_CODE (addr1) == PLUS)
8672 /* If not a REG, return zero. */
8673 if (GET_CODE (XEXP (addr1, 0)) != REG)
8677 reg1 = REGNO (XEXP (addr1, 0));
8678 /* The offset must be constant! */
8679 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8681 offset1 = INTVAL (XEXP (addr1, 1));
8684 else if (GET_CODE (addr1) != REG)
8688 reg1 = REGNO (addr1);
8689 /* This was a simple (mem (reg)) expression. Offset is 0. */
8693 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8694 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8695 register as addr1. */
8696 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8698 if (GET_CODE (addr2) != PLUS)
8701 if (GET_CODE (XEXP (addr2, 0)) != REG
8702 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8705 if (reg1 != REGNO (XEXP (addr2, 0)))
8708 /* The offset for the second addr must be 8 more than the first addr. */
8709 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8712 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8717 /* Return the register class of a scratch register needed to copy IN into
8718 or out of a register in CLASS in MODE. If it can be done directly,
8719 NO_REGS is returned. */
8722 secondary_reload_class (enum reg_class class,
8723 enum machine_mode mode ATTRIBUTE_UNUSED,
8728 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8730 && MACHOPIC_INDIRECT
8734 /* We cannot copy a symbolic operand directly into anything
8735 other than BASE_REGS for TARGET_ELF. So indicate that a
8736 register from BASE_REGS is needed as an intermediate
8739 On Darwin, pic addresses require a load from memory, which
8740 needs a base register. */
8741 if (class != BASE_REGS
8742 && (GET_CODE (in) == SYMBOL_REF
8743 || GET_CODE (in) == HIGH
8744 || GET_CODE (in) == LABEL_REF
8745 || GET_CODE (in) == CONST))
8749 if (GET_CODE (in) == REG)
8752 if (regno >= FIRST_PSEUDO_REGISTER)
8754 regno = true_regnum (in);
8755 if (regno >= FIRST_PSEUDO_REGISTER)
8759 else if (GET_CODE (in) == SUBREG)
8761 regno = true_regnum (in);
8762 if (regno >= FIRST_PSEUDO_REGISTER)
8768 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8770 if (class == GENERAL_REGS || class == BASE_REGS
8771 || (regno >= 0 && INT_REGNO_P (regno)))
8774 /* Constants, memory, and FP registers can go into FP registers. */
8775 if ((regno == -1 || FP_REGNO_P (regno))
8776 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8779 /* Memory, and AltiVec registers can go into AltiVec registers. */
8780 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8781 && class == ALTIVEC_REGS)
8784 /* We can copy among the CR registers. */
8785 if ((class == CR_REGS || class == CR0_REGS)
8786 && regno >= 0 && CR_REGNO_P (regno))
8789 /* Otherwise, we need GENERAL_REGS. */
8790 return GENERAL_REGS;
8793 /* Given a comparison operation, return the bit number in CCR to test. We
8794 know this is a valid comparison.
8796 SCC_P is 1 if this is for an scc. That means that %D will have been
8797 used instead of %C, so the bits will be in different places.
8799 Return -1 if OP isn't a valid comparison for some reason. */
8802 ccr_bit (rtx op, int scc_p)
8804 enum rtx_code code = GET_CODE (op);
8805 enum machine_mode cc_mode;
8810 if (!COMPARISON_P (op))
8815 if (GET_CODE (reg) != REG
8816 || ! CR_REGNO_P (REGNO (reg)))
8819 cc_mode = GET_MODE (reg);
8820 cc_regnum = REGNO (reg);
8821 base_bit = 4 * (cc_regnum - CR0_REGNO);
8823 validate_condition_mode (code, cc_mode);
8825 /* When generating a sCOND operation, only positive conditions are
8827 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8828 && code != GTU && code != LTU)
8834 return scc_p ? base_bit + 3 : base_bit + 2;
8836 return base_bit + 2;
8837 case GT: case GTU: case UNLE:
8838 return base_bit + 1;
8839 case LT: case LTU: case UNGE:
8841 case ORDERED: case UNORDERED:
8842 return base_bit + 3;
8845 /* If scc, we will have done a cror to put the bit in the
8846 unordered position. So test that bit. For integer, this is ! LT
8847 unless this is an scc insn. */
8848 return scc_p ? base_bit + 3 : base_bit;
8851 return scc_p ? base_bit + 3 : base_bit + 1;
8858 /* Return the GOT register. */
8861 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8863 /* The second flow pass currently (June 1999) can't update
8864 regs_ever_live without disturbing other parts of the compiler, so
8865 update it here to make the prolog/epilogue code happy. */
8866 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8867 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8869 current_function_uses_pic_offset_table = 1;
8871 return pic_offset_table_rtx;
8874 /* Function to init struct machine_function.
8875 This will be called, via a pointer variable,
8876 from push_function_context. */
8878 static struct machine_function *
8879 rs6000_init_machine_status (void)
8881 return ggc_alloc_cleared (sizeof (machine_function));
8884 /* These macros test for integers and extract the low-order bits. */
8886 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8887 && GET_MODE (X) == VOIDmode)
8889 #define INT_LOWPART(X) \
8890 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8896 unsigned long val = INT_LOWPART (op);
8898 /* If the high bit is zero, the value is the first 1 bit we find
8900 if ((val & 0x80000000) == 0)
8902 if ((val & 0xffffffff) == 0)
8906 while (((val <<= 1) & 0x80000000) == 0)
8911 /* If the high bit is set and the low bit is not, or the mask is all
8912 1's, the value is zero. */
8913 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8916 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8919 while (((val >>= 1) & 1) != 0)
8929 unsigned long val = INT_LOWPART (op);
8931 /* If the low bit is zero, the value is the first 1 bit we find from
8935 if ((val & 0xffffffff) == 0)
8939 while (((val >>= 1) & 1) == 0)
8945 /* If the low bit is set and the high bit is not, or the mask is all
8946 1's, the value is 31. */
8947 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8950 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8953 while (((val <<= 1) & 0x80000000) != 0)
8959 /* Locate some local-dynamic symbol still in use by this function
8960 so that we can print its name in some tls_ld pattern. */
8963 rs6000_get_some_local_dynamic_name (void)
8967 if (cfun->machine->some_ld_name)
8968 return cfun->machine->some_ld_name;
8970 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8972 && for_each_rtx (&PATTERN (insn),
8973 rs6000_get_some_local_dynamic_name_1, 0))
8974 return cfun->machine->some_ld_name;
8979 /* Helper function for rs6000_get_some_local_dynamic_name. */
8982 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8986 if (GET_CODE (x) == SYMBOL_REF)
8988 const char *str = XSTR (x, 0);
8989 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8991 cfun->machine->some_ld_name = str;
8999 /* Print an operand. Recognize special options, documented below. */
9002 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9003 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9005 #define SMALL_DATA_RELOC "sda21"
9006 #define SMALL_DATA_REG 0
9010 print_operand (FILE *file, rtx x, int code)
9014 unsigned HOST_WIDE_INT uval;
9019 /* Write out an instruction after the call which may be replaced
9020 with glue code by the loader. This depends on the AIX version. */
9021 asm_fprintf (file, RS6000_CALL_GLUE);
9024 /* %a is output_address. */
9027 /* If X is a constant integer whose low-order 5 bits are zero,
9028 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9029 in the AIX assembler where "sri" with a zero shift count
9030 writes a trash instruction. */
9031 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9038 /* If constant, low-order 16 bits of constant, unsigned.
9039 Otherwise, write normally. */
9041 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9043 print_operand (file, x, 0);
9047 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9048 for 64-bit mask direction. */
9049 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9052 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9056 /* X is a CR register. Print the number of the GT bit of the CR. */
9057 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9058 output_operand_lossage ("invalid %%E value");
9060 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9064 /* Like 'J' but get to the GT bit. */
9065 if (GET_CODE (x) != REG)
9068 /* Bit 1 is GT bit. */
9069 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
9071 /* If we want bit 31, write a shift count of zero, not 32. */
9072 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9076 /* X is a CR register. Print the number of the EQ bit of the CR */
9077 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9078 output_operand_lossage ("invalid %%E value");
9080 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9084 /* X is a CR register. Print the shift count needed to move it
9085 to the high-order four bits. */
9086 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9087 output_operand_lossage ("invalid %%f value");
9089 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9093 /* Similar, but print the count for the rotate in the opposite
9095 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9096 output_operand_lossage ("invalid %%F value");
9098 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9102 /* X is a constant integer. If it is negative, print "m",
9103 otherwise print "z". This is to make an aze or ame insn. */
9104 if (GET_CODE (x) != CONST_INT)
9105 output_operand_lossage ("invalid %%G value");
9106 else if (INTVAL (x) >= 0)
9113 /* If constant, output low-order five bits. Otherwise, write
9116 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9118 print_operand (file, x, 0);
9122 /* If constant, output low-order six bits. Otherwise, write
9125 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9127 print_operand (file, x, 0);
9131 /* Print `i' if this is a constant, else nothing. */
9137 /* Write the bit number in CCR for jump. */
9140 output_operand_lossage ("invalid %%j code");
9142 fprintf (file, "%d", i);
9146 /* Similar, but add one for shift count in rlinm for scc and pass
9147 scc flag to `ccr_bit'. */
9150 output_operand_lossage ("invalid %%J code");
9152 /* If we want bit 31, write a shift count of zero, not 32. */
9153 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9157 /* X must be a constant. Write the 1's complement of the
9160 output_operand_lossage ("invalid %%k value");
9162 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9166 /* X must be a symbolic constant on ELF. Write an
9167 expression suitable for an 'addi' that adds in the low 16
9169 if (GET_CODE (x) != CONST)
9171 print_operand_address (file, x);
9176 if (GET_CODE (XEXP (x, 0)) != PLUS
9177 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9178 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9179 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9180 output_operand_lossage ("invalid %%K value");
9181 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9183 /* For GNU as, there must be a non-alphanumeric character
9184 between 'l' and the number. The '-' is added by
9185 print_operand() already. */
9186 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9188 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9192 /* %l is output_asm_label. */
9195 /* Write second word of DImode or DFmode reference. Works on register
9196 or non-indexed memory only. */
9197 if (GET_CODE (x) == REG)
9198 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9199 else if (GET_CODE (x) == MEM)
9201 /* Handle possible auto-increment. Since it is pre-increment and
9202 we have already done it, we can just use an offset of word. */
9203 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9204 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9205 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9208 output_address (XEXP (adjust_address_nv (x, SImode,
9212 if (small_data_operand (x, GET_MODE (x)))
9213 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9214 reg_names[SMALL_DATA_REG]);
9219 /* MB value for a mask operand. */
9220 if (! mask_operand (x, SImode))
9221 output_operand_lossage ("invalid %%m value");
9223 fprintf (file, "%d", extract_MB (x));
9227 /* ME value for a mask operand. */
9228 if (! mask_operand (x, SImode))
9229 output_operand_lossage ("invalid %%M value");
9231 fprintf (file, "%d", extract_ME (x));
9234 /* %n outputs the negative of its operand. */
9237 /* Write the number of elements in the vector times 4. */
9238 if (GET_CODE (x) != PARALLEL)
9239 output_operand_lossage ("invalid %%N value");
9241 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9245 /* Similar, but subtract 1 first. */
9246 if (GET_CODE (x) != PARALLEL)
9247 output_operand_lossage ("invalid %%O value");
9249 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9253 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9255 || INT_LOWPART (x) < 0
9256 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9257 output_operand_lossage ("invalid %%p value");
9259 fprintf (file, "%d", i);
9263 /* The operand must be an indirect memory reference. The result
9264 is the register name. */
9265 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9266 || REGNO (XEXP (x, 0)) >= 32)
9267 output_operand_lossage ("invalid %%P value");
9269 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9273 /* This outputs the logical code corresponding to a boolean
9274 expression. The expression may have one or both operands
9275 negated (if one, only the first one). For condition register
9276 logical operations, it will also treat the negated
9277 CR codes as NOTs, but not handle NOTs of them. */
9279 const char *const *t = 0;
9281 enum rtx_code code = GET_CODE (x);
9282 static const char * const tbl[3][3] = {
9283 { "and", "andc", "nor" },
9284 { "or", "orc", "nand" },
9285 { "xor", "eqv", "xor" } };
9289 else if (code == IOR)
9291 else if (code == XOR)
9294 output_operand_lossage ("invalid %%q value");
9296 if (GET_CODE (XEXP (x, 0)) != NOT)
9300 if (GET_CODE (XEXP (x, 1)) == NOT)
9318 /* X is a CR register. Print the mask for `mtcrf'. */
9319 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9320 output_operand_lossage ("invalid %%R value");
9322 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9326 /* Low 5 bits of 32 - value */
9328 output_operand_lossage ("invalid %%s value");
9330 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9334 /* PowerPC64 mask position. All 0's is excluded.
9335 CONST_INT 32-bit mask is considered sign-extended so any
9336 transition must occur within the CONST_INT, not on the boundary. */
9337 if (! mask64_operand (x, DImode))
9338 output_operand_lossage ("invalid %%S value");
9340 uval = INT_LOWPART (x);
9342 if (uval & 1) /* Clear Left */
9344 #if HOST_BITS_PER_WIDE_INT > 64
9345 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9349 else /* Clear Right */
9352 #if HOST_BITS_PER_WIDE_INT > 64
9353 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9361 fprintf (file, "%d", i);
9365 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9366 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9369 /* Bit 3 is OV bit. */
9370 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9372 /* If we want bit 31, write a shift count of zero, not 32. */
9373 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9377 /* Print the symbolic name of a branch target register. */
9378 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9379 && REGNO (x) != COUNT_REGISTER_REGNUM))
9380 output_operand_lossage ("invalid %%T value");
9381 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9382 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9384 fputs ("ctr", file);
9388 /* High-order 16 bits of constant for use in unsigned operand. */
9390 output_operand_lossage ("invalid %%u value");
9392 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9393 (INT_LOWPART (x) >> 16) & 0xffff);
9397 /* High-order 16 bits of constant for use in signed operand. */
9399 output_operand_lossage ("invalid %%v value");
9401 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9402 (INT_LOWPART (x) >> 16) & 0xffff);
9406 /* Print `u' if this has an auto-increment or auto-decrement. */
9407 if (GET_CODE (x) == MEM
9408 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9409 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9414 /* Print the trap code for this operand. */
9415 switch (GET_CODE (x))
9418 fputs ("eq", file); /* 4 */
9421 fputs ("ne", file); /* 24 */
9424 fputs ("lt", file); /* 16 */
9427 fputs ("le", file); /* 20 */
9430 fputs ("gt", file); /* 8 */
9433 fputs ("ge", file); /* 12 */
9436 fputs ("llt", file); /* 2 */
9439 fputs ("lle", file); /* 6 */
9442 fputs ("lgt", file); /* 1 */
9445 fputs ("lge", file); /* 5 */
9453 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9456 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9457 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9459 print_operand (file, x, 0);
9463 /* MB value for a PowerPC64 rldic operand. */
9464 val = (GET_CODE (x) == CONST_INT
9465 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9470 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9471 if ((val <<= 1) < 0)
9474 #if HOST_BITS_PER_WIDE_INT == 32
9475 if (GET_CODE (x) == CONST_INT && i >= 0)
9476 i += 32; /* zero-extend high-part was all 0's */
9477 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9479 val = CONST_DOUBLE_LOW (x);
9486 for ( ; i < 64; i++)
9487 if ((val <<= 1) < 0)
9492 fprintf (file, "%d", i + 1);
9496 if (GET_CODE (x) == MEM
9497 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9502 /* Like 'L', for third word of TImode */
9503 if (GET_CODE (x) == REG)
9504 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9505 else if (GET_CODE (x) == MEM)
9507 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9508 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9509 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9511 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9512 if (small_data_operand (x, GET_MODE (x)))
9513 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9514 reg_names[SMALL_DATA_REG]);
9519 /* X is a SYMBOL_REF. Write out the name preceded by a
9520 period and without any trailing data in brackets. Used for function
9521 names. If we are configured for System V (or the embedded ABI) on
9522 the PowerPC, do not emit the period, since those systems do not use
9523 TOCs and the like. */
9524 if (GET_CODE (x) != SYMBOL_REF)
9527 if (XSTR (x, 0)[0] != '.')
9529 switch (DEFAULT_ABI)
9543 /* For macho, we need to check it see if we need a stub. */
9546 const char *name = XSTR (x, 0);
9548 if (machopic_classify_name (name) == MACHOPIC_UNDEFINED_FUNCTION)
9549 name = machopic_stub_name (name);
9551 assemble_name (file, name);
9553 else if (TARGET_AIX)
9554 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9556 assemble_name (file, XSTR (x, 0));
9560 /* Like 'L', for last word of TImode. */
9561 if (GET_CODE (x) == REG)
9562 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9563 else if (GET_CODE (x) == MEM)
9565 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9566 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9567 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9569 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9570 if (small_data_operand (x, GET_MODE (x)))
9571 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9572 reg_names[SMALL_DATA_REG]);
9576 /* Print AltiVec or SPE memory operand. */
9581 if (GET_CODE (x) != MEM)
9589 if (GET_CODE (tmp) == REG)
9591 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9594 /* Handle [reg+UIMM]. */
9595 else if (GET_CODE (tmp) == PLUS &&
9596 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9600 if (GET_CODE (XEXP (tmp, 0)) != REG)
9603 x = INTVAL (XEXP (tmp, 1));
9604 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9608 /* Fall through. Must be [reg+reg]. */
9610 if (GET_CODE (tmp) == REG)
9611 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9612 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9614 if (REGNO (XEXP (tmp, 0)) == 0)
9615 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9616 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9618 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9619 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9627 if (GET_CODE (x) == REG)
9628 fprintf (file, "%s", reg_names[REGNO (x)]);
9629 else if (GET_CODE (x) == MEM)
9631 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9632 know the width from the mode. */
9633 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9634 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9635 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9636 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9637 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9638 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9640 output_address (XEXP (x, 0));
9643 output_addr_const (file, x);
9647 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9651 output_operand_lossage ("invalid %%xn code");
9655 /* Print the address of an operand. */
9658 print_operand_address (FILE *file, rtx x)
9660 if (GET_CODE (x) == REG)
9661 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9662 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9663 || GET_CODE (x) == LABEL_REF)
9665 output_addr_const (file, x);
9666 if (small_data_operand (x, GET_MODE (x)))
9667 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9668 reg_names[SMALL_DATA_REG]);
9669 else if (TARGET_TOC)
9672 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9674 if (REGNO (XEXP (x, 0)) == 0)
9675 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9676 reg_names[ REGNO (XEXP (x, 0)) ]);
9678 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9679 reg_names[ REGNO (XEXP (x, 1)) ]);
9681 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9682 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9683 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9685 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9686 && CONSTANT_P (XEXP (x, 1)))
9688 output_addr_const (file, XEXP (x, 1));
9689 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9693 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9694 && CONSTANT_P (XEXP (x, 1)))
9696 fprintf (file, "lo16(");
9697 output_addr_const (file, XEXP (x, 1));
9698 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9701 else if (legitimate_constant_pool_address_p (x))
9703 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9705 rtx contains_minus = XEXP (x, 1);
9709 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9710 turn it into (sym) for output_addr_const. */
9711 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9712 contains_minus = XEXP (contains_minus, 0);
9714 minus = XEXP (contains_minus, 0);
9715 symref = XEXP (minus, 0);
9716 XEXP (contains_minus, 0) = symref;
9721 name = XSTR (symref, 0);
9722 newname = alloca (strlen (name) + sizeof ("@toc"));
9723 strcpy (newname, name);
9724 strcat (newname, "@toc");
9725 XSTR (symref, 0) = newname;
9727 output_addr_const (file, XEXP (x, 1));
9729 XSTR (symref, 0) = name;
9730 XEXP (contains_minus, 0) = minus;
9733 output_addr_const (file, XEXP (x, 1));
9735 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9741 /* Target hook for assembling integer objects. The PowerPC version has
9742 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9743 is defined. It also needs to handle DI-mode objects on 64-bit
9747 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9749 #ifdef RELOCATABLE_NEEDS_FIXUP
9750 /* Special handling for SI values. */
9751 if (size == 4 && aligned_p)
9753 extern int in_toc_section (void);
9754 static int recurse = 0;
9756 /* For -mrelocatable, we mark all addresses that need to be fixed up
9757 in the .fixup section. */
9758 if (TARGET_RELOCATABLE
9759 && !in_toc_section ()
9760 && !in_text_section ()
9761 && !in_unlikely_text_section ()
9763 && GET_CODE (x) != CONST_INT
9764 && GET_CODE (x) != CONST_DOUBLE
9770 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9772 ASM_OUTPUT_LABEL (asm_out_file, buf);
9773 fprintf (asm_out_file, "\t.long\t(");
9774 output_addr_const (asm_out_file, x);
9775 fprintf (asm_out_file, ")@fixup\n");
9776 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9777 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9778 fprintf (asm_out_file, "\t.long\t");
9779 assemble_name (asm_out_file, buf);
9780 fprintf (asm_out_file, "\n\t.previous\n");
9784 /* Remove initial .'s to turn a -mcall-aixdesc function
9785 address into the address of the descriptor, not the function
9787 else if (GET_CODE (x) == SYMBOL_REF
9788 && XSTR (x, 0)[0] == '.'
9789 && DEFAULT_ABI == ABI_AIX)
9791 const char *name = XSTR (x, 0);
9792 while (*name == '.')
9795 fprintf (asm_out_file, "\t.long\t%s\n", name);
9799 #endif /* RELOCATABLE_NEEDS_FIXUP */
9800 return default_assemble_integer (x, size, aligned_p);
9803 #ifdef HAVE_GAS_HIDDEN
9804 /* Emit an assembler directive to set symbol visibility for DECL to
9808 rs6000_assemble_visibility (tree decl, int vis)
9810 /* Functions need to have their entry point symbol visibility set as
9811 well as their descriptor symbol visibility. */
9812 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9814 static const char * const visibility_types[] = {
9815 NULL, "internal", "hidden", "protected"
9818 const char *name, *type;
9820 name = ((* targetm.strip_name_encoding)
9821 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9822 type = visibility_types[vis];
9824 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9825 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9828 default_assemble_visibility (decl, vis);
9833 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9835 /* Reversal of FP compares takes care -- an ordered compare
9836 becomes an unordered compare and vice versa. */
9837 if (mode == CCFPmode
9838 && (!flag_finite_math_only
9839 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9840 || code == UNEQ || code == LTGT))
9841 return reverse_condition_maybe_unordered (code);
9843 return reverse_condition (code);
9846 /* Generate a compare for CODE. Return a brand-new rtx that
9847 represents the result of the compare. */
9850 rs6000_generate_compare (enum rtx_code code)
9852 enum machine_mode comp_mode;
9855 if (rs6000_compare_fp_p)
9856 comp_mode = CCFPmode;
9857 else if (code == GTU || code == LTU
9858 || code == GEU || code == LEU)
9859 comp_mode = CCUNSmode;
9863 /* First, the compare. */
9864 compare_result = gen_reg_rtx (comp_mode);
9866 /* SPE FP compare instructions on the GPRs. Yuck! */
9867 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9868 && rs6000_compare_fp_p)
9870 rtx cmp, or1, or2, or_result, compare_result2;
9872 /* Note: The E500 comparison instructions set the GT bit (x +
9873 1), on success. This explains the mess. */
9877 case EQ: case UNEQ: case NE: case LTGT:
9878 cmp = flag_finite_math_only
9879 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9881 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9882 rs6000_compare_op1);
9884 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
9885 cmp = flag_finite_math_only
9886 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9888 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9889 rs6000_compare_op1);
9891 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
9892 cmp = flag_finite_math_only
9893 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9895 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9896 rs6000_compare_op1);
9902 /* Synthesize LE and GE from LT/GT || EQ. */
9903 if (code == LE || code == GE || code == LEU || code == GEU)
9909 case LE: code = LT; break;
9910 case GE: code = GT; break;
9911 case LEU: code = LT; break;
9912 case GEU: code = GT; break;
9916 or1 = gen_reg_rtx (SImode);
9917 or2 = gen_reg_rtx (SImode);
9918 or_result = gen_reg_rtx (CCEQmode);
9919 compare_result2 = gen_reg_rtx (CCFPmode);
9922 cmp = flag_finite_math_only
9923 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9925 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9926 rs6000_compare_op1);
9929 or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
9930 or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
9932 /* OR them together. */
9933 cmp = gen_rtx_SET (VOIDmode, or_result,
9934 gen_rtx_COMPARE (CCEQmode,
9935 gen_rtx_IOR (SImode, or1, or2),
9937 compare_result = or_result;
9942 if (code == NE || code == LTGT)
9951 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9952 gen_rtx_COMPARE (comp_mode,
9954 rs6000_compare_op1)));
9956 /* Some kinds of FP comparisons need an OR operation;
9957 under flag_finite_math_only we don't bother. */
9958 if (rs6000_compare_fp_p
9959 && ! flag_finite_math_only
9960 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9961 && (code == LE || code == GE
9962 || code == UNEQ || code == LTGT
9963 || code == UNGT || code == UNLT))
9965 enum rtx_code or1, or2;
9966 rtx or1_rtx, or2_rtx, compare2_rtx;
9967 rtx or_result = gen_reg_rtx (CCEQmode);
9971 case LE: or1 = LT; or2 = EQ; break;
9972 case GE: or1 = GT; or2 = EQ; break;
9973 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9974 case LTGT: or1 = LT; or2 = GT; break;
9975 case UNGT: or1 = UNORDERED; or2 = GT; break;
9976 case UNLT: or1 = UNORDERED; or2 = LT; break;
9979 validate_condition_mode (or1, comp_mode);
9980 validate_condition_mode (or2, comp_mode);
9981 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
9982 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
9983 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9984 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9986 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9988 compare_result = or_result;
9992 validate_condition_mode (code, GET_MODE (compare_result));
9994 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
9998 /* Emit the RTL for an sCOND pattern. */
10001 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10004 enum machine_mode op_mode;
10005 enum rtx_code cond_code;
10007 condition_rtx = rs6000_generate_compare (code);
10008 cond_code = GET_CODE (condition_rtx);
10010 if (TARGET_E500 && rs6000_compare_fp_p
10011 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10015 PUT_MODE (condition_rtx, SImode);
10016 t = XEXP (condition_rtx, 0);
10018 if (cond_code != NE && cond_code != EQ)
10021 if (cond_code == NE)
10022 emit_insn (gen_e500_flip_gt_bit (t, t));
10024 emit_insn (gen_move_from_CR_gt_bit (result, t));
10028 if (cond_code == NE
10029 || cond_code == GE || cond_code == LE
10030 || cond_code == GEU || cond_code == LEU
10031 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10033 rtx not_result = gen_reg_rtx (CCEQmode);
10034 rtx not_op, rev_cond_rtx;
10035 enum machine_mode cc_mode;
10037 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10039 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
10040 SImode, XEXP (condition_rtx, 0), const0_rtx);
10041 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10042 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10043 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10046 op_mode = GET_MODE (rs6000_compare_op0);
10047 if (op_mode == VOIDmode)
10048 op_mode = GET_MODE (rs6000_compare_op1);
10050 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10052 PUT_MODE (condition_rtx, DImode);
10053 convert_move (result, condition_rtx, 0);
10057 PUT_MODE (condition_rtx, SImode);
10058 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10062 /* Emit a branch of kind CODE to location LOC. */
10065 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10067 rtx condition_rtx, loc_ref;
10069 condition_rtx = rs6000_generate_compare (code);
10070 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10071 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10072 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10073 loc_ref, pc_rtx)));
10076 /* Return the string to output a conditional branch to LABEL, which is
10077 the operand number of the label, or -1 if the branch is really a
10078 conditional return.
10080 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10081 condition code register and its mode specifies what kind of
10082 comparison we made.
10084 REVERSED is nonzero if we should reverse the sense of the comparison.
10086 INSN is the insn. */
10089 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10091 static char string[64];
10092 enum rtx_code code = GET_CODE (op);
10093 rtx cc_reg = XEXP (op, 0);
10094 enum machine_mode mode = GET_MODE (cc_reg);
10095 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10096 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10097 int really_reversed = reversed ^ need_longbranch;
10103 validate_condition_mode (code, mode);
10105 /* Work out which way this really branches. We could use
10106 reverse_condition_maybe_unordered here always but this
10107 makes the resulting assembler clearer. */
10108 if (really_reversed)
10110 /* Reversal of FP compares takes care -- an ordered compare
10111 becomes an unordered compare and vice versa. */
10112 if (mode == CCFPmode)
10113 code = reverse_condition_maybe_unordered (code);
10115 code = reverse_condition (code);
10118 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10120 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10123 /* Opposite of GT. */
10125 else if (code == NE)
10133 /* Not all of these are actually distinct opcodes, but
10134 we distinguish them for clarity of the resulting assembler. */
10135 case NE: case LTGT:
10136 ccode = "ne"; break;
10137 case EQ: case UNEQ:
10138 ccode = "eq"; break;
10140 ccode = "ge"; break;
10141 case GT: case GTU: case UNGT:
10142 ccode = "gt"; break;
10144 ccode = "le"; break;
10145 case LT: case LTU: case UNLT:
10146 ccode = "lt"; break;
10147 case UNORDERED: ccode = "un"; break;
10148 case ORDERED: ccode = "nu"; break;
10149 case UNGE: ccode = "nl"; break;
10150 case UNLE: ccode = "ng"; break;
10155 /* Maybe we have a guess as to how likely the branch is.
10156 The old mnemonics don't have a way to specify this information. */
10158 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10159 if (note != NULL_RTX)
10161 /* PROB is the difference from 50%. */
10162 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10164 /* Only hint for highly probable/improbable branches on newer
10165 cpus as static prediction overrides processor dynamic
10166 prediction. For older cpus we may as well always hint, but
10167 assume not taken for branches that are very close to 50% as a
10168 mispredicted taken branch is more expensive than a
10169 mispredicted not-taken branch. */
10170 if (rs6000_always_hint
10171 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10173 if (abs (prob) > REG_BR_PROB_BASE / 20
10174 && ((prob > 0) ^ need_longbranch))
10182 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10184 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10186 /* We need to escape any '%' characters in the reg_names string.
10187 Assume they'd only be the first character.... */
10188 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10190 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10194 /* If the branch distance was too far, we may have to use an
10195 unconditional branch to go the distance. */
10196 if (need_longbranch)
10197 s += sprintf (s, ",$+8\n\tb %s", label);
10199 s += sprintf (s, ",%s", label);
10205 /* Return the string to flip the GT bit on a CR. */
10207 output_e500_flip_gt_bit (rtx dst, rtx src)
10209 static char string[64];
10212 if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
10213 || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
10217 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
10218 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
10220 sprintf (string, "crnot %d,%d", a, b);
10224 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
10225 operands of the last comparison is nonzero/true, FALSE_COND if it
10226 is zero/false. Return 0 if the hardware has no such operation. */
10229 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10231 enum rtx_code code = GET_CODE (op);
10232 rtx op0 = rs6000_compare_op0;
10233 rtx op1 = rs6000_compare_op1;
10234 REAL_VALUE_TYPE c1;
10235 enum machine_mode compare_mode = GET_MODE (op0);
10236 enum machine_mode result_mode = GET_MODE (dest);
10239 /* These modes should always match. */
10240 if (GET_MODE (op1) != compare_mode
10241 /* In the isel case however, we can use a compare immediate, so
10242 op1 may be a small constant. */
10243 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10245 if (GET_MODE (true_cond) != result_mode)
10247 if (GET_MODE (false_cond) != result_mode)
10250 /* First, work out if the hardware can do this at all, or
10251 if it's too slow.... */
10252 if (! rs6000_compare_fp_p)
10255 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10258 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
10259 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
10262 /* Eliminate half of the comparisons by switching operands, this
10263 makes the remaining code simpler. */
10264 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10265 || code == LTGT || code == LT || code == UNLE)
10267 code = reverse_condition_maybe_unordered (code);
10269 true_cond = false_cond;
10273 /* UNEQ and LTGT take four instructions for a comparison with zero,
10274 it'll probably be faster to use a branch here too. */
10275 if (code == UNEQ && HONOR_NANS (compare_mode))
10278 if (GET_CODE (op1) == CONST_DOUBLE)
10279 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10281 /* We're going to try to implement comparisons by performing
10282 a subtract, then comparing against zero. Unfortunately,
10283 Inf - Inf is NaN which is not zero, and so if we don't
10284 know that the operand is finite and the comparison
10285 would treat EQ different to UNORDERED, we can't do it. */
10286 if (HONOR_INFINITIES (compare_mode)
10287 && code != GT && code != UNGE
10288 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10289 /* Constructs of the form (a OP b ? a : b) are safe. */
10290 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10291 || (! rtx_equal_p (op0, true_cond)
10292 && ! rtx_equal_p (op1, true_cond))))
10294 /* At this point we know we can use fsel. */
10296 /* Reduce the comparison to a comparison against zero. */
10297 temp = gen_reg_rtx (compare_mode);
10298 emit_insn (gen_rtx_SET (VOIDmode, temp,
10299 gen_rtx_MINUS (compare_mode, op0, op1)));
10301 op1 = CONST0_RTX (compare_mode);
10303 /* If we don't care about NaNs we can reduce some of the comparisons
10304 down to faster ones. */
10305 if (! HONOR_NANS (compare_mode))
10311 true_cond = false_cond;
10324 /* Now, reduce everything down to a GE. */
10331 temp = gen_reg_rtx (compare_mode);
10332 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10337 temp = gen_reg_rtx (compare_mode);
10338 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10343 temp = gen_reg_rtx (compare_mode);
10344 emit_insn (gen_rtx_SET (VOIDmode, temp,
10345 gen_rtx_NEG (compare_mode,
10346 gen_rtx_ABS (compare_mode, op0))));
10351 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10352 temp = gen_reg_rtx (result_mode);
10353 emit_insn (gen_rtx_SET (VOIDmode, temp,
10354 gen_rtx_IF_THEN_ELSE (result_mode,
10355 gen_rtx_GE (VOIDmode,
10357 true_cond, false_cond)));
10358 false_cond = true_cond;
10361 temp = gen_reg_rtx (compare_mode);
10362 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10367 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10368 temp = gen_reg_rtx (result_mode);
10369 emit_insn (gen_rtx_SET (VOIDmode, temp,
10370 gen_rtx_IF_THEN_ELSE (result_mode,
10371 gen_rtx_GE (VOIDmode,
10373 true_cond, false_cond)));
10374 true_cond = false_cond;
10377 temp = gen_reg_rtx (compare_mode);
10378 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10386 emit_insn (gen_rtx_SET (VOIDmode, dest,
10387 gen_rtx_IF_THEN_ELSE (result_mode,
10388 gen_rtx_GE (VOIDmode,
10390 true_cond, false_cond)));
10394 /* Same as above, but for ints (isel). */
10397 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10399 rtx condition_rtx, cr;
10401 /* All isel implementations thus far are 32-bits. */
10402 if (GET_MODE (rs6000_compare_op0) != SImode)
10405 /* We still have to do the compare, because isel doesn't do a
10406 compare, it just looks at the CRx bits set by a previous compare
10408 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10409 cr = XEXP (condition_rtx, 0);
10411 if (GET_MODE (cr) == CCmode)
10412 emit_insn (gen_isel_signed (dest, condition_rtx,
10413 true_cond, false_cond, cr));
10415 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10416 true_cond, false_cond, cr));
10422 output_isel (rtx *operands)
10424 enum rtx_code code;
10426 code = GET_CODE (operands[1]);
10427 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10429 PUT_CODE (operands[1], reverse_condition (code));
10430 return "isel %0,%3,%2,%j1";
10433 return "isel %0,%2,%3,%j1";
10437 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10439 enum machine_mode mode = GET_MODE (op0);
10443 if (code == SMAX || code == SMIN)
10448 if (code == SMAX || code == UMAX)
10449 target = emit_conditional_move (dest, c, op0, op1, mode,
10450 op0, op1, mode, 0);
10452 target = emit_conditional_move (dest, c, op0, op1, mode,
10453 op1, op0, mode, 0);
10454 if (target == NULL_RTX)
10456 if (target != dest)
10457 emit_move_insn (dest, target);
10460 /* Emit instructions to move SRC to DST. Called by splitters for
10461 multi-register moves. It will emit at most one instruction for
10462 each register that is accessed; that is, it won't emit li/lis pairs
10463 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10467 rs6000_split_multireg_move (rtx dst, rtx src)
10469 /* The register number of the first register being moved. */
10471 /* The mode that is to be moved. */
10472 enum machine_mode mode;
10473 /* The mode that the move is being done in, and its size. */
10474 enum machine_mode reg_mode;
10476 /* The number of registers that will be moved. */
10479 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10480 mode = GET_MODE (dst);
10481 nregs = HARD_REGNO_NREGS (reg, mode);
10482 if (FP_REGNO_P (reg))
10484 else if (ALTIVEC_REGNO_P (reg))
10485 reg_mode = V16QImode;
10487 reg_mode = word_mode;
10488 reg_mode_size = GET_MODE_SIZE (reg_mode);
10490 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10493 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10495 /* Move register range backwards, if we might have destructive
10498 for (i = nregs - 1; i >= 0; i--)
10499 emit_insn (gen_rtx_SET (VOIDmode,
10500 simplify_gen_subreg (reg_mode, dst, mode,
10501 i * reg_mode_size),
10502 simplify_gen_subreg (reg_mode, src, mode,
10503 i * reg_mode_size)));
10509 bool used_update = false;
10511 if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10515 if (GET_CODE (XEXP (src, 0)) == PRE_INC
10516 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10519 breg = XEXP (XEXP (src, 0), 0);
10520 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
10521 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10522 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10523 emit_insn (TARGET_32BIT
10524 ? gen_addsi3 (breg, breg, delta_rtx)
10525 : gen_adddi3 (breg, breg, delta_rtx));
10526 src = gen_rtx_MEM (mode, breg);
10529 /* We have now address involving an base register only.
10530 If we use one of the registers to address memory,
10531 we have change that register last. */
10533 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10534 ? XEXP (XEXP (src, 0), 0)
10540 if (REGNO (breg) >= REGNO (dst)
10541 && REGNO (breg) < REGNO (dst) + nregs)
10542 j = REGNO (breg) - REGNO (dst);
10545 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10549 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10550 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10553 breg = XEXP (XEXP (dst, 0), 0);
10554 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10555 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10556 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10558 /* We have to update the breg before doing the store.
10559 Use store with update, if available. */
10563 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10564 emit_insn (TARGET_32BIT
10565 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10566 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10567 used_update = true;
10570 emit_insn (TARGET_32BIT
10571 ? gen_addsi3 (breg, breg, delta_rtx)
10572 : gen_adddi3 (breg, breg, delta_rtx));
10573 dst = gen_rtx_MEM (mode, breg);
10577 for (i = 0; i < nregs; i++)
10579 /* Calculate index to next subword. */
10584 /* If compiler already emited move of first word by
10585 store with update, no need to do anything. */
10586 if (j == 0 && used_update)
10589 emit_insn (gen_rtx_SET (VOIDmode,
10590 simplify_gen_subreg (reg_mode, dst, mode,
10591 j * reg_mode_size),
10592 simplify_gen_subreg (reg_mode, src, mode,
10593 j * reg_mode_size)));
10599 /* This page contains routines that are used to determine what the
10600 function prologue and epilogue code will do and write them out. */
10602 /* Return the first fixed-point register that is required to be
10603 saved. 32 if none. */
10606 first_reg_to_save (void)
10610 /* Find lowest numbered live register. */
10611 for (first_reg = 13; first_reg <= 31; first_reg++)
10612 if (regs_ever_live[first_reg]
10613 && (! call_used_regs[first_reg]
10614 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10615 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10616 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10621 && current_function_uses_pic_offset_table
10622 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10623 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10629 /* Similar, for FP regs. */
10632 first_fp_reg_to_save (void)
10636 /* Find lowest numbered live register. */
10637 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10638 if (regs_ever_live[first_reg])
10644 /* Similar, for AltiVec regs. */
10647 first_altivec_reg_to_save (void)
10651 /* Stack frame remains as is unless we are in AltiVec ABI. */
10652 if (! TARGET_ALTIVEC_ABI)
10653 return LAST_ALTIVEC_REGNO + 1;
10655 /* Find lowest numbered live register. */
10656 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10657 if (regs_ever_live[i])
10663 /* Return a 32-bit mask of the AltiVec registers we need to set in
10664 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10665 the 32-bit word is 0. */
10667 static unsigned int
10668 compute_vrsave_mask (void)
10670 unsigned int i, mask = 0;
10672 /* First, find out if we use _any_ altivec registers. */
10673 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10674 if (regs_ever_live[i])
10675 mask |= ALTIVEC_REG_BIT (i);
10680 /* Next, remove the argument registers from the set. These must
10681 be in the VRSAVE mask set by the caller, so we don't need to add
10682 them in again. More importantly, the mask we compute here is
10683 used to generate CLOBBERs in the set_vrsave insn, and we do not
10684 wish the argument registers to die. */
10685 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10686 mask &= ~ALTIVEC_REG_BIT (i);
10688 /* Similarly, remove the return value from the set. */
10691 diddle_return_value (is_altivec_return_reg, &yes);
10693 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10700 is_altivec_return_reg (rtx reg, void *xyes)
10702 bool *yes = (bool *) xyes;
10703 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10708 /* Calculate the stack information for the current function. This is
10709 complicated by having two separate calling sequences, the AIX calling
10710 sequence and the V.4 calling sequence.
10712 AIX (and Darwin/Mac OS X) stack frames look like:
10714 SP----> +---------------------------------------+
10715 | back chain to caller | 0 0
10716 +---------------------------------------+
10717 | saved CR | 4 8 (8-11)
10718 +---------------------------------------+
10720 +---------------------------------------+
10721 | reserved for compilers | 12 24
10722 +---------------------------------------+
10723 | reserved for binders | 16 32
10724 +---------------------------------------+
10725 | saved TOC pointer | 20 40
10726 +---------------------------------------+
10727 | Parameter save area (P) | 24 48
10728 +---------------------------------------+
10729 | Alloca space (A) | 24+P etc.
10730 +---------------------------------------+
10731 | Local variable space (L) | 24+P+A
10732 +---------------------------------------+
10733 | Float/int conversion temporary (X) | 24+P+A+L
10734 +---------------------------------------+
10735 | Save area for AltiVec registers (W) | 24+P+A+L+X
10736 +---------------------------------------+
10737 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10738 +---------------------------------------+
10739 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10740 +---------------------------------------+
10741 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10742 +---------------------------------------+
10743 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10744 +---------------------------------------+
10745 old SP->| back chain to caller's caller |
10746 +---------------------------------------+
10748 The required alignment for AIX configurations is two words (i.e., 8
10752 V.4 stack frames look like:
10754 SP----> +---------------------------------------+
10755 | back chain to caller | 0
10756 +---------------------------------------+
10757 | caller's saved LR | 4
10758 +---------------------------------------+
10759 | Parameter save area (P) | 8
10760 +---------------------------------------+
10761 | Alloca space (A) | 8+P
10762 +---------------------------------------+
10763 | Varargs save area (V) | 8+P+A
10764 +---------------------------------------+
10765 | Local variable space (L) | 8+P+A+V
10766 +---------------------------------------+
10767 | Float/int conversion temporary (X) | 8+P+A+V+L
10768 +---------------------------------------+
10769 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10770 +---------------------------------------+
10771 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10772 +---------------------------------------+
10773 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10774 +---------------------------------------+
10775 | SPE: area for 64-bit GP registers |
10776 +---------------------------------------+
10777 | SPE alignment padding |
10778 +---------------------------------------+
10779 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10780 +---------------------------------------+
10781 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10782 +---------------------------------------+
10783 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10784 +---------------------------------------+
10785 old SP->| back chain to caller's caller |
10786 +---------------------------------------+
10788 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10789 given. (But note below and in sysv4.h that we require only 8 and
10790 may round up the size of our stack frame anyways. The historical
10791 reason is early versions of powerpc-linux which didn't properly
10792 align the stack at program startup. A happy side-effect is that
10793 -mno-eabi libraries can be used with -meabi programs.)
10795 The EABI configuration defaults to the V.4 layout. However,
10796 the stack alignment requirements may differ. If -mno-eabi is not
10797 given, the required stack alignment is 8 bytes; if -mno-eabi is
10798 given, the required alignment is 16 bytes. (But see V.4 comment
10801 #ifndef ABI_STACK_BOUNDARY
10802 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10805 static rs6000_stack_t *
10806 rs6000_stack_info (void)
10808 static rs6000_stack_t info, zero_info;
10809 rs6000_stack_t *info_ptr = &info;
10810 int reg_size = TARGET_32BIT ? 4 : 8;
10812 HOST_WIDE_INT non_fixed_size;
10814 /* Zero all fields portably. */
10819 /* Cache value so we don't rescan instruction chain over and over. */
10820 if (cfun->machine->insn_chain_scanned_p == 0)
10822 cfun->machine->insn_chain_scanned_p = 1;
10823 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10827 /* Select which calling sequence. */
10828 info_ptr->abi = DEFAULT_ABI;
10830 /* Calculate which registers need to be saved & save area size. */
10831 info_ptr->first_gp_reg_save = first_reg_to_save ();
10832 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10833 even if it currently looks like we won't. */
10834 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10835 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10836 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10837 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10838 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10840 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10842 /* For the SPE, we have an additional upper 32-bits on each GPR.
10843 Ideally we should save the entire 64-bits only when the upper
10844 half is used in SIMD instructions. Since we only record
10845 registers live (not the size they are used in), this proves
10846 difficult because we'd have to traverse the instruction chain at
10847 the right time, taking reload into account. This is a real pain,
10848 so we opt to save the GPRs in 64-bits always if but one register
10849 gets used in 64-bits. Otherwise, all the registers in the frame
10850 get saved in 32-bits.
10852 So... since when we save all GPRs (except the SP) in 64-bits, the
10853 traditional GP save area will be empty. */
10854 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10855 info_ptr->gp_size = 0;
10857 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10858 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10860 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10861 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10862 - info_ptr->first_altivec_reg_save);
10864 /* Does this function call anything? */
10865 info_ptr->calls_p = (! current_function_is_leaf
10866 || cfun->machine->ra_needs_full_frame);
10868 /* Determine if we need to save the link register. */
10869 if (rs6000_ra_ever_killed ()
10870 || (DEFAULT_ABI == ABI_AIX
10871 && current_function_profile
10872 && !TARGET_PROFILE_KERNEL)
10873 #ifdef TARGET_RELOCATABLE
10874 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10876 || (info_ptr->first_fp_reg_save != 64
10877 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10878 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10879 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10880 || (DEFAULT_ABI == ABI_DARWIN
10882 && current_function_uses_pic_offset_table)
10883 || info_ptr->calls_p)
10885 info_ptr->lr_save_p = 1;
10886 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10889 /* Determine if we need to save the condition code registers. */
10890 if (regs_ever_live[CR2_REGNO]
10891 || regs_ever_live[CR3_REGNO]
10892 || regs_ever_live[CR4_REGNO])
10894 info_ptr->cr_save_p = 1;
10895 if (DEFAULT_ABI == ABI_V4)
10896 info_ptr->cr_size = reg_size;
10899 /* If the current function calls __builtin_eh_return, then we need
10900 to allocate stack space for registers that will hold data for
10901 the exception handler. */
10902 if (current_function_calls_eh_return)
10905 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10908 /* SPE saves EH registers in 64-bits. */
10909 ehrd_size = i * (TARGET_SPE_ABI
10910 && info_ptr->spe_64bit_regs_used != 0
10911 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10916 /* Determine various sizes. */
10917 info_ptr->reg_size = reg_size;
10918 info_ptr->fixed_size = RS6000_SAVE_AREA;
10919 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10920 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10921 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10922 TARGET_ALTIVEC ? 16 : 8);
10924 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10925 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10927 info_ptr->spe_gp_size = 0;
10929 if (TARGET_ALTIVEC_ABI)
10930 info_ptr->vrsave_mask = compute_vrsave_mask ();
10932 info_ptr->vrsave_mask = 0;
10934 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10935 info_ptr->vrsave_size = 4;
10937 info_ptr->vrsave_size = 0;
10939 /* Calculate the offsets. */
10940 switch (DEFAULT_ABI)
10948 info_ptr->fp_save_offset = - info_ptr->fp_size;
10949 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10951 if (TARGET_ALTIVEC_ABI)
10953 info_ptr->vrsave_save_offset
10954 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10956 /* Align stack so vector save area is on a quadword boundary. */
10957 if (info_ptr->altivec_size != 0)
10958 info_ptr->altivec_padding_size
10959 = 16 - (-info_ptr->vrsave_save_offset % 16);
10961 info_ptr->altivec_padding_size = 0;
10963 info_ptr->altivec_save_offset
10964 = info_ptr->vrsave_save_offset
10965 - info_ptr->altivec_padding_size
10966 - info_ptr->altivec_size;
10968 /* Adjust for AltiVec case. */
10969 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10972 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10973 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10974 info_ptr->lr_save_offset = 2*reg_size;
10978 info_ptr->fp_save_offset = - info_ptr->fp_size;
10979 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10980 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10982 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10984 /* Align stack so SPE GPR save area is aligned on a
10985 double-word boundary. */
10986 if (info_ptr->spe_gp_size != 0)
10987 info_ptr->spe_padding_size
10988 = 8 - (-info_ptr->cr_save_offset % 8);
10990 info_ptr->spe_padding_size = 0;
10992 info_ptr->spe_gp_save_offset
10993 = info_ptr->cr_save_offset
10994 - info_ptr->spe_padding_size
10995 - info_ptr->spe_gp_size;
10997 /* Adjust for SPE case. */
10998 info_ptr->toc_save_offset
10999 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11001 else if (TARGET_ALTIVEC_ABI)
11003 info_ptr->vrsave_save_offset
11004 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11006 /* Align stack so vector save area is on a quadword boundary. */
11007 if (info_ptr->altivec_size != 0)
11008 info_ptr->altivec_padding_size
11009 = 16 - (-info_ptr->vrsave_save_offset % 16);
11011 info_ptr->altivec_padding_size = 0;
11013 info_ptr->altivec_save_offset
11014 = info_ptr->vrsave_save_offset
11015 - info_ptr->altivec_padding_size
11016 - info_ptr->altivec_size;
11018 /* Adjust for AltiVec case. */
11019 info_ptr->toc_save_offset
11020 = info_ptr->altivec_save_offset - info_ptr->toc_size;
11023 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
11024 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
11025 info_ptr->lr_save_offset = reg_size;
11029 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
11030 + info_ptr->gp_size
11031 + info_ptr->altivec_size
11032 + info_ptr->altivec_padding_size
11033 + info_ptr->spe_gp_size
11034 + info_ptr->spe_padding_size
11036 + info_ptr->cr_size
11037 + info_ptr->lr_size
11038 + info_ptr->vrsave_size
11039 + info_ptr->toc_size,
11040 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
11043 non_fixed_size = (info_ptr->vars_size
11044 + info_ptr->parm_size
11045 + info_ptr->save_size
11046 + info_ptr->varargs_size);
11048 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11049 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11051 /* Determine if we need to allocate any stack frame:
11053 For AIX we need to push the stack if a frame pointer is needed
11054 (because the stack might be dynamically adjusted), if we are
11055 debugging, if we make calls, or if the sum of fp_save, gp_save,
11056 and local variables are more than the space needed to save all
11057 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11058 + 18*8 = 288 (GPR13 reserved).
11060 For V.4 we don't have the stack cushion that AIX uses, but assume
11061 that the debugger can handle stackless frames. */
11063 if (info_ptr->calls_p)
11064 info_ptr->push_p = 1;
11066 else if (DEFAULT_ABI == ABI_V4)
11067 info_ptr->push_p = non_fixed_size != 0;
11069 else if (frame_pointer_needed)
11070 info_ptr->push_p = 1;
11072 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11073 info_ptr->push_p = 1;
11076 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11078 /* Zero offsets if we're not saving those registers. */
11079 if (info_ptr->fp_size == 0)
11080 info_ptr->fp_save_offset = 0;
11082 if (info_ptr->gp_size == 0)
11083 info_ptr->gp_save_offset = 0;
11085 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11086 info_ptr->altivec_save_offset = 0;
11088 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11089 info_ptr->vrsave_save_offset = 0;
11091 if (! TARGET_SPE_ABI
11092 || info_ptr->spe_64bit_regs_used == 0
11093 || info_ptr->spe_gp_size == 0)
11094 info_ptr->spe_gp_save_offset = 0;
11096 if (! info_ptr->lr_save_p)
11097 info_ptr->lr_save_offset = 0;
11099 if (! info_ptr->cr_save_p)
11100 info_ptr->cr_save_offset = 0;
11102 if (! info_ptr->toc_save_p)
11103 info_ptr->toc_save_offset = 0;
11108 /* Return true if the current function uses any GPRs in 64-bit SIMD
11112 spe_func_has_64bit_regs_p (void)
11116 /* Functions that save and restore all the call-saved registers will
11117 need to save/restore the registers in 64-bits. */
11118 if (current_function_calls_eh_return
11119 || current_function_calls_setjmp
11120 || current_function_has_nonlocal_goto)
11123 insns = get_insns ();
11125 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11131 i = PATTERN (insn);
11132 if (GET_CODE (i) == SET
11133 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11142 debug_stack_info (rs6000_stack_t *info)
11144 const char *abi_string;
11147 info = rs6000_stack_info ();
11149 fprintf (stderr, "\nStack information for function %s:\n",
11150 ((current_function_decl && DECL_NAME (current_function_decl))
11151 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11156 default: abi_string = "Unknown"; break;
11157 case ABI_NONE: abi_string = "NONE"; break;
11158 case ABI_AIX: abi_string = "AIX"; break;
11159 case ABI_DARWIN: abi_string = "Darwin"; break;
11160 case ABI_V4: abi_string = "V.4"; break;
11163 fprintf (stderr, "\tABI = %5s\n", abi_string);
11165 if (TARGET_ALTIVEC_ABI)
11166 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11168 if (TARGET_SPE_ABI)
11169 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11171 if (info->first_gp_reg_save != 32)
11172 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
11174 if (info->first_fp_reg_save != 64)
11175 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
11177 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11178 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11179 info->first_altivec_reg_save);
11181 if (info->lr_save_p)
11182 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
11184 if (info->cr_save_p)
11185 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
11187 if (info->toc_save_p)
11188 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
11190 if (info->vrsave_mask)
11191 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
11194 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
11197 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
11199 if (info->gp_save_offset)
11200 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
11202 if (info->fp_save_offset)
11203 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
11205 if (info->altivec_save_offset)
11206 fprintf (stderr, "\taltivec_save_offset = %5d\n",
11207 info->altivec_save_offset);
11209 if (info->spe_gp_save_offset)
11210 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
11211 info->spe_gp_save_offset);
11213 if (info->vrsave_save_offset)
11214 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
11215 info->vrsave_save_offset);
11217 if (info->lr_save_offset)
11218 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
11220 if (info->cr_save_offset)
11221 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
11223 if (info->toc_save_offset)
11224 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
11226 if (info->varargs_save_offset)
11227 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11229 if (info->total_size)
11230 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11233 if (info->varargs_size)
11234 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
11236 if (info->vars_size)
11237 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11240 if (info->parm_size)
11241 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
11243 if (info->fixed_size)
11244 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
11247 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
11249 if (info->spe_gp_size)
11250 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
11253 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
11255 if (info->altivec_size)
11256 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
11258 if (info->vrsave_size)
11259 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
11261 if (info->altivec_padding_size)
11262 fprintf (stderr, "\taltivec_padding_size= %5d\n",
11263 info->altivec_padding_size);
11265 if (info->spe_padding_size)
11266 fprintf (stderr, "\tspe_padding_size = %5d\n",
11267 info->spe_padding_size);
11270 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
11273 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
11275 if (info->toc_size)
11276 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
11278 if (info->save_size)
11279 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
11281 if (info->reg_size != 4)
11282 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
11284 fprintf (stderr, "\n");
11288 rs6000_return_addr (int count, rtx frame)
11290 /* Currently we don't optimize very well between prolog and body
11291 code and for PIC code the code can be actually quite bad, so
11292 don't try to be too clever here. */
11293 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11295 cfun->machine->ra_needs_full_frame = 1;
11302 plus_constant (copy_to_reg
11303 (gen_rtx_MEM (Pmode,
11304 memory_address (Pmode, frame))),
11305 RETURN_ADDRESS_OFFSET)));
11308 cfun->machine->ra_need_lr = 1;
11309 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11312 /* Say whether a function is a candidate for sibcall handling or not.
11313 We do not allow indirect calls to be optimized into sibling calls.
11314 Also, we can't do it if there are any vector parameters; there's
11315 nowhere to put the VRsave code so it works; note that functions with
11316 vector parameters are required to have a prototype, so the argument
11317 type info must be available here. (The tail recursion case can work
11318 with vector parameters, but there's no way to distinguish here.) */
11320 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11325 if (TARGET_ALTIVEC_VRSAVE)
11327 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11328 type; type = TREE_CHAIN (type))
11330 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11334 if (DEFAULT_ABI == ABI_DARWIN
11335 || (*targetm.binds_local_p) (decl))
11337 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11339 if (!lookup_attribute ("longcall", attr_list)
11340 || lookup_attribute ("shortcall", attr_list))
11348 rs6000_ra_ever_killed (void)
11354 if (current_function_is_thunk)
11357 /* regs_ever_live has LR marked as used if any sibcalls are present,
11358 but this should not force saving and restoring in the
11359 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11360 clobbers LR, so that is inappropriate. */
11362 /* Also, the prologue can generate a store into LR that
11363 doesn't really count, like this:
11366 bcl to set PIC register
11370 When we're called from the epilogue, we need to avoid counting
11371 this as a store. */
11373 push_topmost_sequence ();
11374 top = get_insns ();
11375 pop_topmost_sequence ();
11376 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11378 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11382 if (FIND_REG_INC_NOTE (insn, reg))
11384 else if (GET_CODE (insn) == CALL_INSN
11385 && !SIBLING_CALL_P (insn))
11387 else if (set_of (reg, insn) != NULL_RTX
11388 && !prologue_epilogue_contains (insn))
11395 /* Add a REG_MAYBE_DEAD note to the insn. */
11397 rs6000_maybe_dead (rtx insn)
11399 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11404 /* Emit instructions needed to load the TOC register.
11405 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11406 a constant pool; or for SVR4 -fpic. */
11409 rs6000_emit_load_toc_table (int fromprolog)
11412 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11414 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11416 rtx temp = (fromprolog
11417 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11418 : gen_reg_rtx (Pmode));
11419 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11421 rs6000_maybe_dead (insn);
11422 insn = emit_move_insn (dest, temp);
11424 rs6000_maybe_dead (insn);
11426 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11429 rtx tempLR = (fromprolog
11430 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11431 : gen_reg_rtx (Pmode));
11432 rtx temp0 = (fromprolog
11433 ? gen_rtx_REG (Pmode, 0)
11434 : gen_reg_rtx (Pmode));
11437 /* possibly create the toc section */
11438 if (! toc_initialized)
11441 function_section (current_function_decl);
11448 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11449 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11451 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11452 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11454 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11456 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11457 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11464 static int reload_toc_labelno = 0;
11466 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11468 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
11469 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11471 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
11472 emit_move_insn (dest, tempLR);
11473 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11475 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11477 rs6000_maybe_dead (insn);
11479 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11481 /* This is for AIX code running in non-PIC ELF32. */
11484 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11485 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11487 insn = emit_insn (gen_elf_high (dest, realsym));
11489 rs6000_maybe_dead (insn);
11490 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11492 rs6000_maybe_dead (insn);
11494 else if (DEFAULT_ABI == ABI_AIX)
11497 insn = emit_insn (gen_load_toc_aix_si (dest));
11499 insn = emit_insn (gen_load_toc_aix_di (dest));
11501 rs6000_maybe_dead (insn);
11507 /* Emit instructions to restore the link register after determining where
11508 its value has been stored. */
11511 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11513 rs6000_stack_t *info = rs6000_stack_info ();
11516 operands[0] = source;
11517 operands[1] = scratch;
11519 if (info->lr_save_p)
11521 rtx frame_rtx = stack_pointer_rtx;
11522 HOST_WIDE_INT sp_offset = 0;
11525 if (frame_pointer_needed
11526 || current_function_calls_alloca
11527 || info->total_size > 32767)
11529 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11530 frame_rtx = operands[1];
11532 else if (info->push_p)
11533 sp_offset = info->total_size;
11535 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11536 tmp = gen_rtx_MEM (Pmode, tmp);
11537 emit_move_insn (tmp, operands[0]);
11540 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11543 static GTY(()) int set = -1;
11546 get_TOC_alias_set (void)
11549 set = new_alias_set ();
11553 /* This returns nonzero if the current function uses the TOC. This is
11554 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11555 is generated by the ABI_V4 load_toc_* patterns. */
11562 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11565 rtx pat = PATTERN (insn);
11568 if (GET_CODE (pat) == PARALLEL)
11569 for (i = 0; i < XVECLEN (pat, 0); i++)
11571 rtx sub = XVECEXP (pat, 0, i);
11572 if (GET_CODE (sub) == USE)
11574 sub = XEXP (sub, 0);
11575 if (GET_CODE (sub) == UNSPEC
11576 && XINT (sub, 1) == UNSPEC_TOC)
11586 create_TOC_reference (rtx symbol)
11588 return gen_rtx_PLUS (Pmode,
11589 gen_rtx_REG (Pmode, TOC_REGISTER),
11590 gen_rtx_CONST (Pmode,
11591 gen_rtx_MINUS (Pmode, symbol,
11592 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11595 /* If _Unwind_* has been called from within the same module,
11596 toc register is not guaranteed to be saved to 40(1) on function
11597 entry. Save it there in that case. */
11600 rs6000_aix_emit_builtin_unwind_init (void)
11603 rtx stack_top = gen_reg_rtx (Pmode);
11604 rtx opcode_addr = gen_reg_rtx (Pmode);
11605 rtx opcode = gen_reg_rtx (SImode);
11606 rtx tocompare = gen_reg_rtx (SImode);
11607 rtx no_toc_save_needed = gen_label_rtx ();
11609 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11610 emit_move_insn (stack_top, mem);
11612 mem = gen_rtx_MEM (Pmode,
11613 gen_rtx_PLUS (Pmode, stack_top,
11614 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11615 emit_move_insn (opcode_addr, mem);
11616 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11617 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11618 : 0xE8410028, SImode));
11620 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11621 SImode, NULL_RTX, NULL_RTX,
11622 no_toc_save_needed);
11624 mem = gen_rtx_MEM (Pmode,
11625 gen_rtx_PLUS (Pmode, stack_top,
11626 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11627 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11628 emit_label (no_toc_save_needed);
11631 /* This ties together stack memory (MEM with an alias set of
11632 rs6000_sr_alias_set) and the change to the stack pointer. */
11635 rs6000_emit_stack_tie (void)
11637 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11639 set_mem_alias_set (mem, rs6000_sr_alias_set);
11640 emit_insn (gen_stack_tie (mem));
11643 /* Emit the correct code for allocating stack space, as insns.
11644 If COPY_R12, make sure a copy of the old frame is left in r12.
11645 The generated code may use hard register 0 as a temporary. */
11648 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11651 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11652 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11653 rtx todec = GEN_INT (-size);
11655 if (current_function_limit_stack)
11657 if (REG_P (stack_limit_rtx)
11658 && REGNO (stack_limit_rtx) > 1
11659 && REGNO (stack_limit_rtx) <= 31)
11661 emit_insn (TARGET_32BIT
11662 ? gen_addsi3 (tmp_reg,
11665 : gen_adddi3 (tmp_reg,
11669 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11672 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11674 && DEFAULT_ABI == ABI_V4)
11676 rtx toload = gen_rtx_CONST (VOIDmode,
11677 gen_rtx_PLUS (Pmode,
11681 emit_insn (gen_elf_high (tmp_reg, toload));
11682 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11683 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11687 warning ("stack limit expression is not supported");
11690 if (copy_r12 || ! TARGET_UPDATE)
11691 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11697 /* Need a note here so that try_split doesn't get confused. */
11698 if (get_last_insn() == NULL_RTX)
11699 emit_note (NOTE_INSN_DELETED);
11700 insn = emit_move_insn (tmp_reg, todec);
11701 try_split (PATTERN (insn), insn, 0);
11705 insn = emit_insn (TARGET_32BIT
11706 ? gen_movsi_update (stack_reg, stack_reg,
11708 : gen_movdi_update (stack_reg, stack_reg,
11709 todec, stack_reg));
11713 insn = emit_insn (TARGET_32BIT
11714 ? gen_addsi3 (stack_reg, stack_reg, todec)
11715 : gen_adddi3 (stack_reg, stack_reg, todec));
11716 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11717 gen_rtx_REG (Pmode, 12));
11720 RTX_FRAME_RELATED_P (insn) = 1;
11722 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11723 gen_rtx_SET (VOIDmode, stack_reg,
11724 gen_rtx_PLUS (Pmode, stack_reg,
11729 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11730 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11731 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11732 deduce these equivalences by itself so it wasn't necessary to hold
11733 its hand so much. */
11736 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11737 rtx reg2, rtx rreg)
11741 /* copy_rtx will not make unique copies of registers, so we need to
11742 ensure we don't have unwanted sharing here. */
11744 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11747 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11749 real = copy_rtx (PATTERN (insn));
11751 if (reg2 != NULL_RTX)
11752 real = replace_rtx (real, reg2, rreg);
11754 real = replace_rtx (real, reg,
11755 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11756 STACK_POINTER_REGNUM),
11759 /* We expect that 'real' is either a SET or a PARALLEL containing
11760 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11761 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11763 if (GET_CODE (real) == SET)
11767 temp = simplify_rtx (SET_SRC (set));
11769 SET_SRC (set) = temp;
11770 temp = simplify_rtx (SET_DEST (set));
11772 SET_DEST (set) = temp;
11773 if (GET_CODE (SET_DEST (set)) == MEM)
11775 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11777 XEXP (SET_DEST (set), 0) = temp;
11780 else if (GET_CODE (real) == PARALLEL)
11783 for (i = 0; i < XVECLEN (real, 0); i++)
11784 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11786 rtx set = XVECEXP (real, 0, i);
11788 temp = simplify_rtx (SET_SRC (set));
11790 SET_SRC (set) = temp;
11791 temp = simplify_rtx (SET_DEST (set));
11793 SET_DEST (set) = temp;
11794 if (GET_CODE (SET_DEST (set)) == MEM)
11796 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11798 XEXP (SET_DEST (set), 0) = temp;
11800 RTX_FRAME_RELATED_P (set) = 1;
11807 real = spe_synthesize_frame_save (real);
11809 RTX_FRAME_RELATED_P (insn) = 1;
11810 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11815 /* Given an SPE frame note, return a PARALLEL of SETs with the
11816 original note, plus a synthetic register save. */
11819 spe_synthesize_frame_save (rtx real)
11821 rtx synth, offset, reg, real2;
11823 if (GET_CODE (real) != SET
11824 || GET_MODE (SET_SRC (real)) != V2SImode)
11827 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11828 frame related note. The parallel contains a set of the register
11829 being saved, and another set to a synthetic register (n+1200).
11830 This is so we can differentiate between 64-bit and 32-bit saves.
11831 Words cannot describe this nastiness. */
11833 if (GET_CODE (SET_DEST (real)) != MEM
11834 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11835 || GET_CODE (SET_SRC (real)) != REG)
11839 (set (mem (plus (reg x) (const y)))
11842 (set (mem (plus (reg x) (const y+4)))
11846 real2 = copy_rtx (real);
11847 PUT_MODE (SET_DEST (real2), SImode);
11848 reg = SET_SRC (real2);
11849 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11850 synth = copy_rtx (real2);
11852 if (BYTES_BIG_ENDIAN)
11854 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11855 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11858 reg = SET_SRC (synth);
11860 synth = replace_rtx (synth, reg,
11861 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11863 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11864 synth = replace_rtx (synth, offset,
11865 GEN_INT (INTVAL (offset)
11866 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11868 RTX_FRAME_RELATED_P (synth) = 1;
11869 RTX_FRAME_RELATED_P (real2) = 1;
11870 if (BYTES_BIG_ENDIAN)
11871 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11873 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11878 /* Returns an insn that has a vrsave set operation with the
11879 appropriate CLOBBERs. */
11882 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11885 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11886 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11889 = gen_rtx_SET (VOIDmode,
11891 gen_rtx_UNSPEC_VOLATILE (SImode,
11892 gen_rtvec (2, reg, vrsave),
11897 /* We need to clobber the registers in the mask so the scheduler
11898 does not move sets to VRSAVE before sets of AltiVec registers.
11900 However, if the function receives nonlocal gotos, reload will set
11901 all call saved registers live. We will end up with:
11903 (set (reg 999) (mem))
11904 (parallel [ (set (reg vrsave) (unspec blah))
11905 (clobber (reg 999))])
11907 The clobber will cause the store into reg 999 to be dead, and
11908 flow will attempt to delete an epilogue insn. In this case, we
11909 need an unspec use/set of the register. */
11911 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11912 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11914 if (!epiloguep || call_used_regs [i])
11915 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11916 gen_rtx_REG (V4SImode, i));
11919 rtx reg = gen_rtx_REG (V4SImode, i);
11922 = gen_rtx_SET (VOIDmode,
11924 gen_rtx_UNSPEC (V4SImode,
11925 gen_rtvec (1, reg), 27));
11929 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11931 for (i = 0; i < nclobs; ++i)
11932 XVECEXP (insn, 0, i) = clobs[i];
11937 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11938 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11941 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11942 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11944 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11945 rtx replacea, replaceb;
11947 int_rtx = GEN_INT (offset);
11949 /* Some cases that need register indexed addressing. */
11950 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11952 && SPE_VECTOR_MODE (mode)
11953 && !SPE_CONST_OFFSET_OK (offset)))
11955 /* Whomever calls us must make sure r11 is available in the
11956 flow path of instructions in the prologue. */
11957 offset_rtx = gen_rtx_REG (Pmode, 11);
11958 emit_move_insn (offset_rtx, int_rtx);
11960 replacea = offset_rtx;
11961 replaceb = int_rtx;
11965 offset_rtx = int_rtx;
11966 replacea = NULL_RTX;
11967 replaceb = NULL_RTX;
11970 reg = gen_rtx_REG (mode, regno);
11971 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11972 mem = gen_rtx_MEM (mode, addr);
11973 set_mem_alias_set (mem, rs6000_sr_alias_set);
11975 insn = emit_move_insn (mem, reg);
11977 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11980 /* Emit an offset memory reference suitable for a frame store, while
11981 converting to a valid addressing mode. */
11984 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11986 rtx int_rtx, offset_rtx;
11988 int_rtx = GEN_INT (offset);
11990 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11992 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11993 emit_move_insn (offset_rtx, int_rtx);
11996 offset_rtx = int_rtx;
11998 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12001 /* Emit function prologue as insns. */
12004 rs6000_emit_prologue (void)
12006 rs6000_stack_t *info = rs6000_stack_info ();
12007 enum machine_mode reg_mode = Pmode;
12008 int reg_size = TARGET_32BIT ? 4 : 8;
12009 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12010 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12011 rtx frame_reg_rtx = sp_reg_rtx;
12012 rtx cr_save_rtx = NULL_RTX;
12014 int saving_FPRs_inline;
12015 int using_store_multiple;
12016 HOST_WIDE_INT sp_offset = 0;
12018 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12020 reg_mode = V2SImode;
12024 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12025 && (!TARGET_SPE_ABI
12026 || info->spe_64bit_regs_used == 0)
12027 && info->first_gp_reg_save < 31);
12028 saving_FPRs_inline = (info->first_fp_reg_save == 64
12029 || FP_SAVE_INLINE (info->first_fp_reg_save)
12030 || current_function_calls_eh_return
12031 || cfun->machine->ra_need_lr);
12033 /* For V.4, update stack before we do any saving and set back pointer. */
12035 && (DEFAULT_ABI == ABI_V4
12036 || current_function_calls_eh_return))
12038 if (info->total_size < 32767)
12039 sp_offset = info->total_size;
12041 frame_reg_rtx = frame_ptr_rtx;
12042 rs6000_emit_allocate_stack (info->total_size,
12043 (frame_reg_rtx != sp_reg_rtx
12044 && (info->cr_save_p
12046 || info->first_fp_reg_save < 64
12047 || info->first_gp_reg_save < 32
12049 if (frame_reg_rtx != sp_reg_rtx)
12050 rs6000_emit_stack_tie ();
12053 /* Save AltiVec registers if needed. */
12054 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12058 /* There should be a non inline version of this, for when we
12059 are saving lots of vector registers. */
12060 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12061 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12063 rtx areg, savereg, mem;
12066 offset = info->altivec_save_offset + sp_offset
12067 + 16 * (i - info->first_altivec_reg_save);
12069 savereg = gen_rtx_REG (V4SImode, i);
12071 areg = gen_rtx_REG (Pmode, 0);
12072 emit_move_insn (areg, GEN_INT (offset));
12074 /* AltiVec addressing mode is [reg+reg]. */
12075 mem = gen_rtx_MEM (V4SImode,
12076 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
12078 set_mem_alias_set (mem, rs6000_sr_alias_set);
12080 insn = emit_move_insn (mem, savereg);
12082 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12083 areg, GEN_INT (offset));
12087 /* VRSAVE is a bit vector representing which AltiVec registers
12088 are used. The OS uses this to determine which vector
12089 registers to save on a context switch. We need to save
12090 VRSAVE on the stack frame, add whatever AltiVec registers we
12091 used in this function, and do the corresponding magic in the
12094 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12095 && info->vrsave_mask != 0)
12097 rtx reg, mem, vrsave;
12100 /* Get VRSAVE onto a GPR. */
12101 reg = gen_rtx_REG (SImode, 12);
12102 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12104 emit_insn (gen_get_vrsave_internal (reg));
12106 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
12109 offset = info->vrsave_save_offset + sp_offset;
12111 = gen_rtx_MEM (SImode,
12112 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12113 set_mem_alias_set (mem, rs6000_sr_alias_set);
12114 insn = emit_move_insn (mem, reg);
12116 /* Include the registers in the mask. */
12117 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12119 insn = emit_insn (generate_set_vrsave (reg, info, 0));
12122 /* If we use the link register, get it into r0. */
12123 if (info->lr_save_p)
12124 emit_move_insn (gen_rtx_REG (Pmode, 0),
12125 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12127 /* If we need to save CR, put it into r12. */
12128 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12130 cr_save_rtx = gen_rtx_REG (SImode, 12);
12131 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12134 /* Do any required saving of fpr's. If only one or two to save, do
12135 it ourselves. Otherwise, call function. */
12136 if (saving_FPRs_inline)
12139 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12140 if ((regs_ever_live[info->first_fp_reg_save+i]
12141 && ! call_used_regs[info->first_fp_reg_save+i]))
12142 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12143 info->first_fp_reg_save + i,
12144 info->fp_save_offset + sp_offset + 8 * i,
12147 else if (info->first_fp_reg_save != 64)
12151 const char *alloc_rname;
12153 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12155 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12156 gen_rtx_REG (Pmode,
12157 LINK_REGISTER_REGNUM));
12158 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12159 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12160 alloc_rname = ggc_strdup (rname);
12161 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12162 gen_rtx_SYMBOL_REF (Pmode,
12164 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12166 rtx addr, reg, mem;
12167 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12168 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12169 GEN_INT (info->fp_save_offset
12170 + sp_offset + 8*i));
12171 mem = gen_rtx_MEM (DFmode, addr);
12172 set_mem_alias_set (mem, rs6000_sr_alias_set);
12174 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12176 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12177 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12178 NULL_RTX, NULL_RTX);
12181 /* Save GPRs. This is done as a PARALLEL if we are using
12182 the store-multiple instructions. */
12183 if (using_store_multiple)
12187 p = rtvec_alloc (32 - info->first_gp_reg_save);
12188 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12190 rtx addr, reg, mem;
12191 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12192 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12193 GEN_INT (info->gp_save_offset
12196 mem = gen_rtx_MEM (reg_mode, addr);
12197 set_mem_alias_set (mem, rs6000_sr_alias_set);
12199 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12201 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12202 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12203 NULL_RTX, NULL_RTX);
12208 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12209 if ((regs_ever_live[info->first_gp_reg_save+i]
12210 && ! call_used_regs[info->first_gp_reg_save+i])
12211 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12212 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12213 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12215 rtx addr, reg, mem;
12216 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12218 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12220 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12223 if (!SPE_CONST_OFFSET_OK (offset))
12225 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12226 emit_move_insn (b, GEN_INT (offset));
12229 b = GEN_INT (offset);
12231 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12232 mem = gen_rtx_MEM (V2SImode, addr);
12233 set_mem_alias_set (mem, rs6000_sr_alias_set);
12234 insn = emit_move_insn (mem, reg);
12236 if (GET_CODE (b) == CONST_INT)
12237 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12238 NULL_RTX, NULL_RTX);
12240 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12241 b, GEN_INT (offset));
12245 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12246 GEN_INT (info->gp_save_offset
12249 mem = gen_rtx_MEM (reg_mode, addr);
12250 set_mem_alias_set (mem, rs6000_sr_alias_set);
12252 insn = emit_move_insn (mem, reg);
12253 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12254 NULL_RTX, NULL_RTX);
12259 /* ??? There's no need to emit actual instructions here, but it's the
12260 easiest way to get the frame unwind information emitted. */
12261 if (current_function_calls_eh_return)
12263 unsigned int i, regno;
12265 /* In AIX ABI we need to pretend we save r2 here. */
12268 rtx addr, reg, mem;
12270 reg = gen_rtx_REG (reg_mode, 2);
12271 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12272 GEN_INT (sp_offset + 5 * reg_size));
12273 mem = gen_rtx_MEM (reg_mode, addr);
12274 set_mem_alias_set (mem, rs6000_sr_alias_set);
12276 insn = emit_move_insn (mem, reg);
12277 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12278 NULL_RTX, NULL_RTX);
12279 PATTERN (insn) = gen_blockage ();
12284 regno = EH_RETURN_DATA_REGNO (i);
12285 if (regno == INVALID_REGNUM)
12288 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12289 info->ehrd_offset + sp_offset
12290 + reg_size * (int) i,
12295 /* Save lr if we used it. */
12296 if (info->lr_save_p)
12298 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12299 GEN_INT (info->lr_save_offset + sp_offset));
12300 rtx reg = gen_rtx_REG (Pmode, 0);
12301 rtx mem = gen_rtx_MEM (Pmode, addr);
12302 /* This should not be of rs6000_sr_alias_set, because of
12303 __builtin_return_address. */
12305 insn = emit_move_insn (mem, reg);
12306 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12307 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12310 /* Save CR if we use any that must be preserved. */
12311 if (info->cr_save_p)
12313 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12314 GEN_INT (info->cr_save_offset + sp_offset));
12315 rtx mem = gen_rtx_MEM (SImode, addr);
12317 set_mem_alias_set (mem, rs6000_sr_alias_set);
12319 /* If r12 was used to hold the original sp, copy cr into r0 now
12321 if (REGNO (frame_reg_rtx) == 12)
12323 cr_save_rtx = gen_rtx_REG (SImode, 0);
12324 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12326 insn = emit_move_insn (mem, cr_save_rtx);
12328 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12329 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12330 But that's OK. All we have to do is specify that _one_ condition
12331 code register is saved in this stack slot. The thrower's epilogue
12332 will then restore all the call-saved registers.
12333 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12334 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12335 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12338 /* Update stack and set back pointer unless this is V.4,
12339 for which it was done previously. */
12341 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12342 rs6000_emit_allocate_stack (info->total_size, FALSE);
12344 /* Set frame pointer, if needed. */
12345 if (frame_pointer_needed)
12347 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12349 RTX_FRAME_RELATED_P (insn) = 1;
12352 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12353 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12354 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12355 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12357 /* If emit_load_toc_table will use the link register, we need to save
12358 it. We use R12 for this purpose because emit_load_toc_table
12359 can use register 0. This allows us to use a plain 'blr' to return
12360 from the procedure more often. */
12361 int save_LR_around_toc_setup = (TARGET_ELF
12362 && DEFAULT_ABI != ABI_AIX
12364 && ! info->lr_save_p
12365 && EXIT_BLOCK_PTR->pred != NULL);
12366 if (save_LR_around_toc_setup)
12368 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12369 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12370 rs6000_emit_load_toc_table (TRUE);
12371 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12374 rs6000_emit_load_toc_table (TRUE);
12378 if (DEFAULT_ABI == ABI_DARWIN
12379 && flag_pic && current_function_uses_pic_offset_table)
12381 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12382 const char *picbase = machopic_function_base_name ();
12383 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12385 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12387 rs6000_maybe_dead (
12388 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12389 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12394 /* Write function prologue. */
12397 rs6000_output_function_prologue (FILE *file,
12398 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12400 rs6000_stack_t *info = rs6000_stack_info ();
12402 if (TARGET_DEBUG_STACK)
12403 debug_stack_info (info);
12405 /* Write .extern for any function we will call to save and restore
12407 if (info->first_fp_reg_save < 64
12408 && !FP_SAVE_INLINE (info->first_fp_reg_save))
12409 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12410 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12411 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12412 RESTORE_FP_SUFFIX);
12414 /* Write .extern for AIX common mode routines, if needed. */
12415 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12417 fputs ("\t.extern __mulh\n", file);
12418 fputs ("\t.extern __mull\n", file);
12419 fputs ("\t.extern __divss\n", file);
12420 fputs ("\t.extern __divus\n", file);
12421 fputs ("\t.extern __quoss\n", file);
12422 fputs ("\t.extern __quous\n", file);
12423 common_mode_defined = 1;
12426 if (! HAVE_prologue)
12430 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12431 the "toplevel" insn chain. */
12432 emit_note (NOTE_INSN_DELETED);
12433 rs6000_emit_prologue ();
12434 emit_note (NOTE_INSN_DELETED);
12436 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12440 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12442 INSN_ADDRESSES_NEW (insn, addr);
12447 if (TARGET_DEBUG_STACK)
12448 debug_rtx_list (get_insns (), 100);
12449 final (get_insns (), file, FALSE, FALSE);
12453 rs6000_pic_labelno++;
12456 /* Emit function epilogue as insns.
12458 At present, dwarf2out_frame_debug_expr doesn't understand
12459 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12460 anywhere in the epilogue. Most of the insns below would in any case
12461 need special notes to explain where r11 is in relation to the stack. */
12464 rs6000_emit_epilogue (int sibcall)
12466 rs6000_stack_t *info;
12467 int restoring_FPRs_inline;
12468 int using_load_multiple;
12469 int using_mfcr_multiple;
12470 int use_backchain_to_restore_sp;
12472 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12473 rtx frame_reg_rtx = sp_reg_rtx;
12474 enum machine_mode reg_mode = Pmode;
12475 int reg_size = TARGET_32BIT ? 4 : 8;
12478 info = rs6000_stack_info ();
12480 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12482 reg_mode = V2SImode;
12486 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12487 && (!TARGET_SPE_ABI
12488 || info->spe_64bit_regs_used == 0)
12489 && info->first_gp_reg_save < 31);
12490 restoring_FPRs_inline = (sibcall
12491 || current_function_calls_eh_return
12492 || info->first_fp_reg_save == 64
12493 || FP_SAVE_INLINE (info->first_fp_reg_save));
12494 use_backchain_to_restore_sp = (frame_pointer_needed
12495 || current_function_calls_alloca
12496 || info->total_size > 32767);
12497 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12498 || rs6000_cpu == PROCESSOR_PPC603
12499 || rs6000_cpu == PROCESSOR_PPC750
12502 /* If we have a frame pointer, a call to alloca, or a large stack
12503 frame, restore the old stack pointer using the backchain. Otherwise,
12504 we know what size to update it with. */
12505 if (use_backchain_to_restore_sp)
12507 /* Under V.4, don't reset the stack pointer until after we're done
12508 loading the saved registers. */
12509 if (DEFAULT_ABI == ABI_V4)
12510 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12512 emit_move_insn (frame_reg_rtx,
12513 gen_rtx_MEM (Pmode, sp_reg_rtx));
12516 else if (info->push_p)
12518 if (DEFAULT_ABI == ABI_V4
12519 || current_function_calls_eh_return)
12520 sp_offset = info->total_size;
12523 emit_insn (TARGET_32BIT
12524 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12525 GEN_INT (info->total_size))
12526 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12527 GEN_INT (info->total_size)));
12531 /* Restore AltiVec registers if needed. */
12532 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12536 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12537 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12539 rtx addr, areg, mem;
12541 areg = gen_rtx_REG (Pmode, 0);
12543 (areg, GEN_INT (info->altivec_save_offset
12545 + 16 * (i - info->first_altivec_reg_save)));
12547 /* AltiVec addressing mode is [reg+reg]. */
12548 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12549 mem = gen_rtx_MEM (V4SImode, addr);
12550 set_mem_alias_set (mem, rs6000_sr_alias_set);
12552 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12556 /* Restore VRSAVE if needed. */
12557 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12558 && info->vrsave_mask != 0)
12560 rtx addr, mem, reg;
12562 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12563 GEN_INT (info->vrsave_save_offset + sp_offset));
12564 mem = gen_rtx_MEM (SImode, addr);
12565 set_mem_alias_set (mem, rs6000_sr_alias_set);
12566 reg = gen_rtx_REG (SImode, 12);
12567 emit_move_insn (reg, mem);
12569 emit_insn (generate_set_vrsave (reg, info, 1));
12572 /* Get the old lr if we saved it. */
12573 if (info->lr_save_p)
12575 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12576 info->lr_save_offset + sp_offset);
12578 set_mem_alias_set (mem, rs6000_sr_alias_set);
12580 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12583 /* Get the old cr if we saved it. */
12584 if (info->cr_save_p)
12586 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12587 GEN_INT (info->cr_save_offset + sp_offset));
12588 rtx mem = gen_rtx_MEM (SImode, addr);
12590 set_mem_alias_set (mem, rs6000_sr_alias_set);
12592 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12595 /* Set LR here to try to overlap restores below. */
12596 if (info->lr_save_p)
12597 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12598 gen_rtx_REG (Pmode, 0));
12600 /* Load exception handler data registers, if needed. */
12601 if (current_function_calls_eh_return)
12603 unsigned int i, regno;
12607 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12608 GEN_INT (sp_offset + 5 * reg_size));
12609 rtx mem = gen_rtx_MEM (reg_mode, addr);
12611 set_mem_alias_set (mem, rs6000_sr_alias_set);
12613 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12620 regno = EH_RETURN_DATA_REGNO (i);
12621 if (regno == INVALID_REGNUM)
12624 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12625 info->ehrd_offset + sp_offset
12626 + reg_size * (int) i);
12627 set_mem_alias_set (mem, rs6000_sr_alias_set);
12629 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12633 /* Restore GPRs. This is done as a PARALLEL if we are using
12634 the load-multiple instructions. */
12635 if (using_load_multiple)
12638 p = rtvec_alloc (32 - info->first_gp_reg_save);
12639 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12641 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12642 GEN_INT (info->gp_save_offset
12645 rtx mem = gen_rtx_MEM (reg_mode, addr);
12647 set_mem_alias_set (mem, rs6000_sr_alias_set);
12650 gen_rtx_SET (VOIDmode,
12651 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12654 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12657 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12658 if ((regs_ever_live[info->first_gp_reg_save+i]
12659 && ! call_used_regs[info->first_gp_reg_save+i])
12660 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12661 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12662 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12664 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12665 GEN_INT (info->gp_save_offset
12668 rtx mem = gen_rtx_MEM (reg_mode, addr);
12670 /* Restore 64-bit quantities for SPE. */
12671 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12673 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12676 if (!SPE_CONST_OFFSET_OK (offset))
12678 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12679 emit_move_insn (b, GEN_INT (offset));
12682 b = GEN_INT (offset);
12684 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12685 mem = gen_rtx_MEM (V2SImode, addr);
12688 set_mem_alias_set (mem, rs6000_sr_alias_set);
12690 emit_move_insn (gen_rtx_REG (reg_mode,
12691 info->first_gp_reg_save + i), mem);
12694 /* Restore fpr's if we need to do it without calling a function. */
12695 if (restoring_FPRs_inline)
12696 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12697 if ((regs_ever_live[info->first_fp_reg_save+i]
12698 && ! call_used_regs[info->first_fp_reg_save+i]))
12701 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12702 GEN_INT (info->fp_save_offset
12705 mem = gen_rtx_MEM (DFmode, addr);
12706 set_mem_alias_set (mem, rs6000_sr_alias_set);
12708 emit_move_insn (gen_rtx_REG (DFmode,
12709 info->first_fp_reg_save + i),
12713 /* If we saved cr, restore it here. Just those that were used. */
12714 if (info->cr_save_p)
12716 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12719 if (using_mfcr_multiple)
12721 for (i = 0; i < 8; i++)
12722 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12728 if (using_mfcr_multiple && count > 1)
12733 p = rtvec_alloc (count);
12736 for (i = 0; i < 8; i++)
12737 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12739 rtvec r = rtvec_alloc (2);
12740 RTVEC_ELT (r, 0) = r12_rtx;
12741 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12742 RTVEC_ELT (p, ndx) =
12743 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12744 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12747 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12752 for (i = 0; i < 8; i++)
12753 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12755 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12761 /* If this is V.4, unwind the stack pointer after all of the loads
12762 have been done. We need to emit a block here so that sched
12763 doesn't decide to move the sp change before the register restores
12764 (which may not have any obvious dependency on the stack). This
12765 doesn't hurt performance, because there is no scheduling that can
12766 be done after this point. */
12767 if (DEFAULT_ABI == ABI_V4
12768 || current_function_calls_eh_return)
12770 if (frame_reg_rtx != sp_reg_rtx)
12771 rs6000_emit_stack_tie ();
12773 if (use_backchain_to_restore_sp)
12775 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12777 else if (sp_offset != 0)
12779 emit_insn (TARGET_32BIT
12780 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12781 GEN_INT (sp_offset))
12782 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12783 GEN_INT (sp_offset)));
12787 if (current_function_calls_eh_return)
12789 rtx sa = EH_RETURN_STACKADJ_RTX;
12790 emit_insn (TARGET_32BIT
12791 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12792 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12798 if (! restoring_FPRs_inline)
12799 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12801 p = rtvec_alloc (2);
12803 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12804 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12805 gen_rtx_REG (Pmode,
12806 LINK_REGISTER_REGNUM));
12808 /* If we have to restore more than two FP registers, branch to the
12809 restore function. It will return to our caller. */
12810 if (! restoring_FPRs_inline)
12814 const char *alloc_rname;
12816 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12817 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12818 alloc_rname = ggc_strdup (rname);
12819 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12820 gen_rtx_SYMBOL_REF (Pmode,
12823 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12826 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12827 GEN_INT (info->fp_save_offset + 8*i));
12828 mem = gen_rtx_MEM (DFmode, addr);
12829 set_mem_alias_set (mem, rs6000_sr_alias_set);
12831 RTVEC_ELT (p, i+3) =
12832 gen_rtx_SET (VOIDmode,
12833 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12838 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12842 /* Write function epilogue. */
12845 rs6000_output_function_epilogue (FILE *file,
12846 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12848 rs6000_stack_t *info = rs6000_stack_info ();
12850 if (! HAVE_epilogue)
12852 rtx insn = get_last_insn ();
12853 /* If the last insn was a BARRIER, we don't have to write anything except
12854 the trace table. */
12855 if (GET_CODE (insn) == NOTE)
12856 insn = prev_nonnote_insn (insn);
12857 if (insn == 0 || GET_CODE (insn) != BARRIER)
12859 /* This is slightly ugly, but at least we don't have two
12860 copies of the epilogue-emitting code. */
12863 /* A NOTE_INSN_DELETED is supposed to be at the start
12864 and end of the "toplevel" insn chain. */
12865 emit_note (NOTE_INSN_DELETED);
12866 rs6000_emit_epilogue (FALSE);
12867 emit_note (NOTE_INSN_DELETED);
12869 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12873 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12875 INSN_ADDRESSES_NEW (insn, addr);
12880 if (TARGET_DEBUG_STACK)
12881 debug_rtx_list (get_insns (), 100);
12882 final (get_insns (), file, FALSE, FALSE);
12888 macho_branch_islands ();
12889 /* Mach-O doesn't support labels at the end of objects, so if
12890 it looks like we might want one, insert a NOP. */
12892 rtx insn = get_last_insn ();
12895 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12896 insn = PREV_INSN (insn);
12900 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12901 fputs ("\tnop\n", file);
12905 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12908 We don't output a traceback table if -finhibit-size-directive was
12909 used. The documentation for -finhibit-size-directive reads
12910 ``don't output a @code{.size} assembler directive, or anything
12911 else that would cause trouble if the function is split in the
12912 middle, and the two halves are placed at locations far apart in
12913 memory.'' The traceback table has this property, since it
12914 includes the offset from the start of the function to the
12915 traceback table itself.
12917 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12918 different traceback table. */
12919 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12920 && rs6000_traceback != traceback_none)
12922 const char *fname = NULL;
12923 const char *language_string = lang_hooks.name;
12924 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12926 int optional_tbtab;
12928 if (rs6000_traceback == traceback_full)
12929 optional_tbtab = 1;
12930 else if (rs6000_traceback == traceback_part)
12931 optional_tbtab = 0;
12933 optional_tbtab = !optimize_size && !TARGET_ELF;
12935 if (optional_tbtab)
12937 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12938 while (*fname == '.') /* V.4 encodes . in the name */
12941 /* Need label immediately before tbtab, so we can compute
12942 its offset from the function start. */
12943 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12944 ASM_OUTPUT_LABEL (file, fname);
12947 /* The .tbtab pseudo-op can only be used for the first eight
12948 expressions, since it can't handle the possibly variable
12949 length fields that follow. However, if you omit the optional
12950 fields, the assembler outputs zeros for all optional fields
12951 anyways, giving each variable length field is minimum length
12952 (as defined in sys/debug.h). Thus we can not use the .tbtab
12953 pseudo-op at all. */
12955 /* An all-zero word flags the start of the tbtab, for debuggers
12956 that have to find it by searching forward from the entry
12957 point or from the current pc. */
12958 fputs ("\t.long 0\n", file);
12960 /* Tbtab format type. Use format type 0. */
12961 fputs ("\t.byte 0,", file);
12963 /* Language type. Unfortunately, there does not seem to be any
12964 official way to discover the language being compiled, so we
12965 use language_string.
12966 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
12967 Java is 13. Objective-C is 14. */
12968 if (! strcmp (language_string, "GNU C"))
12970 else if (! strcmp (language_string, "GNU F77"))
12972 else if (! strcmp (language_string, "GNU Pascal"))
12974 else if (! strcmp (language_string, "GNU Ada"))
12976 else if (! strcmp (language_string, "GNU C++"))
12978 else if (! strcmp (language_string, "GNU Java"))
12980 else if (! strcmp (language_string, "GNU Objective-C"))
12984 fprintf (file, "%d,", i);
12986 /* 8 single bit fields: global linkage (not set for C extern linkage,
12987 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12988 from start of procedure stored in tbtab, internal function, function
12989 has controlled storage, function has no toc, function uses fp,
12990 function logs/aborts fp operations. */
12991 /* Assume that fp operations are used if any fp reg must be saved. */
12992 fprintf (file, "%d,",
12993 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12995 /* 6 bitfields: function is interrupt handler, name present in
12996 proc table, function calls alloca, on condition directives
12997 (controls stack walks, 3 bits), saves condition reg, saves
12999 /* The `function calls alloca' bit seems to be set whenever reg 31 is
13000 set up as a frame pointer, even when there is no alloca call. */
13001 fprintf (file, "%d,",
13002 ((optional_tbtab << 6)
13003 | ((optional_tbtab & frame_pointer_needed) << 5)
13004 | (info->cr_save_p << 1)
13005 | (info->lr_save_p)));
13007 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
13009 fprintf (file, "%d,",
13010 (info->push_p << 7) | (64 - info->first_fp_reg_save));
13012 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
13013 fprintf (file, "%d,", (32 - first_reg_to_save ()));
13015 if (optional_tbtab)
13017 /* Compute the parameter info from the function decl argument
13020 int next_parm_info_bit = 31;
13022 for (decl = DECL_ARGUMENTS (current_function_decl);
13023 decl; decl = TREE_CHAIN (decl))
13025 rtx parameter = DECL_INCOMING_RTL (decl);
13026 enum machine_mode mode = GET_MODE (parameter);
13028 if (GET_CODE (parameter) == REG)
13030 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
13036 if (mode == SFmode)
13038 else if (mode == DFmode || mode == TFmode)
13043 /* If only one bit will fit, don't or in this entry. */
13044 if (next_parm_info_bit > 0)
13045 parm_info |= (bits << (next_parm_info_bit - 1));
13046 next_parm_info_bit -= 2;
13050 fixed_parms += ((GET_MODE_SIZE (mode)
13051 + (UNITS_PER_WORD - 1))
13053 next_parm_info_bit -= 1;
13059 /* Number of fixed point parameters. */
13060 /* This is actually the number of words of fixed point parameters; thus
13061 an 8 byte struct counts as 2; and thus the maximum value is 8. */
13062 fprintf (file, "%d,", fixed_parms);
13064 /* 2 bitfields: number of floating point parameters (7 bits), parameters
13066 /* This is actually the number of fp registers that hold parameters;
13067 and thus the maximum value is 13. */
13068 /* Set parameters on stack bit if parameters are not in their original
13069 registers, regardless of whether they are on the stack? Xlc
13070 seems to set the bit when not optimizing. */
13071 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
13073 if (! optional_tbtab)
13076 /* Optional fields follow. Some are variable length. */
13078 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
13079 11 double float. */
13080 /* There is an entry for each parameter in a register, in the order that
13081 they occur in the parameter list. Any intervening arguments on the
13082 stack are ignored. If the list overflows a long (max possible length
13083 34 bits) then completely leave off all elements that don't fit. */
13084 /* Only emit this long if there was at least one parameter. */
13085 if (fixed_parms || float_parms)
13086 fprintf (file, "\t.long %d\n", parm_info);
13088 /* Offset from start of code to tb table. */
13089 fputs ("\t.long ", file);
13090 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13092 RS6000_OUTPUT_BASENAME (file, fname);
13094 assemble_name (file, fname);
13096 fputs ("-.", file);
13098 RS6000_OUTPUT_BASENAME (file, fname);
13100 assemble_name (file, fname);
13104 /* Interrupt handler mask. */
13105 /* Omit this long, since we never set the interrupt handler bit
13108 /* Number of CTL (controlled storage) anchors. */
13109 /* Omit this long, since the has_ctl bit is never set above. */
13111 /* Displacement into stack of each CTL anchor. */
13112 /* Omit this list of longs, because there are no CTL anchors. */
13114 /* Length of function name. */
13117 fprintf (file, "\t.short %d\n", (int) strlen (fname));
13119 /* Function name. */
13120 assemble_string (fname, strlen (fname));
13122 /* Register for alloca automatic storage; this is always reg 31.
13123 Only emit this if the alloca bit was set above. */
13124 if (frame_pointer_needed)
13125 fputs ("\t.byte 31\n", file);
13127 fputs ("\t.align 2\n", file);
13131 /* A C compound statement that outputs the assembler code for a thunk
13132 function, used to implement C++ virtual function calls with
13133 multiple inheritance. The thunk acts as a wrapper around a virtual
13134 function, adjusting the implicit object parameter before handing
13135 control off to the real function.
13137 First, emit code to add the integer DELTA to the location that
13138 contains the incoming first argument. Assume that this argument
13139 contains a pointer, and is the one used to pass the `this' pointer
13140 in C++. This is the incoming argument *before* the function
13141 prologue, e.g. `%o0' on a sparc. The addition must preserve the
13142 values of all other incoming arguments.
13144 After the addition, emit code to jump to FUNCTION, which is a
13145 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
13146 not touch the return address. Hence returning from FUNCTION will
13147 return to whoever called the current `thunk'.
13149 The effect must be as if FUNCTION had been called directly with the
13150 adjusted first argument. This macro is responsible for emitting
13151 all of the code for a thunk function; output_function_prologue()
13152 and output_function_epilogue() are not invoked.
13154 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
13155 been extracted from it.) It might possibly be useful on some
13156 targets, but probably not.
13158 If you do not define this macro, the target-independent code in the
13159 C++ frontend will generate a less efficient heavyweight thunk that
13160 calls FUNCTION instead of jumping to it. The generic approach does
13161 not support varargs. */
13164 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13165 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13168 rtx this, insn, funexp;
13170 reload_completed = 1;
13171 epilogue_completed = 1;
13172 no_new_pseudos = 1;
13174 /* Mark the end of the (empty) prologue. */
13175 emit_note (NOTE_INSN_PROLOGUE_END);
13177 /* Find the "this" pointer. If the function returns a structure,
13178 the structure return pointer is in r3. */
13179 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13180 this = gen_rtx_REG (Pmode, 4);
13182 this = gen_rtx_REG (Pmode, 3);
13184 /* Apply the constant offset, if required. */
13187 rtx delta_rtx = GEN_INT (delta);
13188 emit_insn (TARGET_32BIT
13189 ? gen_addsi3 (this, this, delta_rtx)
13190 : gen_adddi3 (this, this, delta_rtx));
13193 /* Apply the offset from the vtable, if required. */
13196 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13197 rtx tmp = gen_rtx_REG (Pmode, 12);
13199 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13200 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13202 emit_insn (TARGET_32BIT
13203 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13204 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13205 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13209 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13211 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13213 emit_insn (TARGET_32BIT
13214 ? gen_addsi3 (this, this, tmp)
13215 : gen_adddi3 (this, this, tmp));
13218 /* Generate a tail call to the target function. */
13219 if (!TREE_USED (function))
13221 assemble_external (function);
13222 TREE_USED (function) = 1;
13224 funexp = XEXP (DECL_RTL (function), 0);
13225 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13228 if (MACHOPIC_INDIRECT)
13229 funexp = machopic_indirect_call_target (funexp);
13232 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13233 generate sibcall RTL explicitly to avoid constraint abort. */
13234 insn = emit_call_insn (
13235 gen_rtx_PARALLEL (VOIDmode,
13237 gen_rtx_CALL (VOIDmode,
13238 funexp, const0_rtx),
13239 gen_rtx_USE (VOIDmode, const0_rtx),
13240 gen_rtx_USE (VOIDmode,
13241 gen_rtx_REG (SImode,
13242 LINK_REGISTER_REGNUM)),
13243 gen_rtx_RETURN (VOIDmode))));
13244 SIBLING_CALL_P (insn) = 1;
13247 /* Run just enough of rest_of_compilation to get the insns emitted.
13248 There's not really enough bulk here to make other passes such as
13249 instruction scheduling worth while. Note that use_thunk calls
13250 assemble_start_function and assemble_end_function. */
13251 insn = get_insns ();
13252 insn_locators_initialize ();
13253 shorten_branches (insn);
13254 final_start_function (insn, file, 1);
13255 final (insn, file, 1, 0);
13256 final_end_function ();
13258 reload_completed = 0;
13259 epilogue_completed = 0;
13260 no_new_pseudos = 0;
13263 /* A quick summary of the various types of 'constant-pool tables'
13266 Target Flags Name One table per
13267 AIX (none) AIX TOC object file
13268 AIX -mfull-toc AIX TOC object file
13269 AIX -mminimal-toc AIX minimal TOC translation unit
13270 SVR4/EABI (none) SVR4 SDATA object file
13271 SVR4/EABI -fpic SVR4 pic object file
13272 SVR4/EABI -fPIC SVR4 PIC translation unit
13273 SVR4/EABI -mrelocatable EABI TOC function
13274 SVR4/EABI -maix AIX TOC object file
13275 SVR4/EABI -maix -mminimal-toc
13276 AIX minimal TOC translation unit
13278 Name Reg. Set by entries contains:
13279 made by addrs? fp? sum?
13281 AIX TOC 2 crt0 as Y option option
13282 AIX minimal TOC 30 prolog gcc Y Y option
13283 SVR4 SDATA 13 crt0 gcc N Y N
13284 SVR4 pic 30 prolog ld Y not yet N
13285 SVR4 PIC 30 prolog gcc Y option option
13286 EABI TOC 30 prolog gcc Y option option
13290 /* Hash functions for the hash table. */
13293 rs6000_hash_constant (rtx k)
13295 enum rtx_code code = GET_CODE (k);
13296 enum machine_mode mode = GET_MODE (k);
13297 unsigned result = (code << 3) ^ mode;
13298 const char *format;
13301 format = GET_RTX_FORMAT (code);
13302 flen = strlen (format);
13308 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13311 if (mode != VOIDmode)
13312 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13324 for (; fidx < flen; fidx++)
13325 switch (format[fidx])
13330 const char *str = XSTR (k, fidx);
13331 len = strlen (str);
13332 result = result * 613 + len;
13333 for (i = 0; i < len; i++)
13334 result = result * 613 + (unsigned) str[i];
13339 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13343 result = result * 613 + (unsigned) XINT (k, fidx);
13346 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13347 result = result * 613 + (unsigned) XWINT (k, fidx);
13351 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13352 result = result * 613 + (unsigned) (XWINT (k, fidx)
13366 toc_hash_function (const void *hash_entry)
13368 const struct toc_hash_struct *thc =
13369 (const struct toc_hash_struct *) hash_entry;
13370 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13373 /* Compare H1 and H2 for equivalence. */
13376 toc_hash_eq (const void *h1, const void *h2)
13378 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13379 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13381 if (((const struct toc_hash_struct *) h1)->key_mode
13382 != ((const struct toc_hash_struct *) h2)->key_mode)
13385 return rtx_equal_p (r1, r2);
13388 /* These are the names given by the C++ front-end to vtables, and
13389 vtable-like objects. Ideally, this logic should not be here;
13390 instead, there should be some programmatic way of inquiring as
13391 to whether or not an object is a vtable. */
13393 #define VTABLE_NAME_P(NAME) \
13394 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13395 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13396 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13397 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
13398 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13401 rs6000_output_symbol_ref (FILE *file, rtx x)
13403 /* Currently C++ toc references to vtables can be emitted before it
13404 is decided whether the vtable is public or private. If this is
13405 the case, then the linker will eventually complain that there is
13406 a reference to an unknown section. Thus, for vtables only,
13407 we emit the TOC reference to reference the symbol and not the
13409 const char *name = XSTR (x, 0);
13411 if (VTABLE_NAME_P (name))
13413 RS6000_OUTPUT_BASENAME (file, name);
13416 assemble_name (file, name);
13419 /* Output a TOC entry. We derive the entry name from what is being
13423 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13426 const char *name = buf;
13427 const char *real_name;
13434 /* When the linker won't eliminate them, don't output duplicate
13435 TOC entries (this happens on AIX if there is any kind of TOC,
13436 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13438 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13440 struct toc_hash_struct *h;
13443 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13444 time because GGC is not initialized at that point. */
13445 if (toc_hash_table == NULL)
13446 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13447 toc_hash_eq, NULL);
13449 h = ggc_alloc (sizeof (*h));
13451 h->key_mode = mode;
13452 h->labelno = labelno;
13454 found = htab_find_slot (toc_hash_table, h, 1);
13455 if (*found == NULL)
13457 else /* This is indeed a duplicate.
13458 Set this label equal to that label. */
13460 fputs ("\t.set ", file);
13461 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13462 fprintf (file, "%d,", labelno);
13463 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13464 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13470 /* If we're going to put a double constant in the TOC, make sure it's
13471 aligned properly when strict alignment is on. */
13472 if (GET_CODE (x) == CONST_DOUBLE
13473 && STRICT_ALIGNMENT
13474 && GET_MODE_BITSIZE (mode) >= 64
13475 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13476 ASM_OUTPUT_ALIGN (file, 3);
13479 (*targetm.asm_out.internal_label) (file, "LC", labelno);
13481 /* Handle FP constants specially. Note that if we have a minimal
13482 TOC, things we put here aren't actually in the TOC, so we can allow
13484 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13486 REAL_VALUE_TYPE rv;
13489 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13490 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13494 if (TARGET_MINIMAL_TOC)
13495 fputs (DOUBLE_INT_ASM_OP, file);
13497 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13498 k[0] & 0xffffffff, k[1] & 0xffffffff,
13499 k[2] & 0xffffffff, k[3] & 0xffffffff);
13500 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13501 k[0] & 0xffffffff, k[1] & 0xffffffff,
13502 k[2] & 0xffffffff, k[3] & 0xffffffff);
13507 if (TARGET_MINIMAL_TOC)
13508 fputs ("\t.long ", file);
13510 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13511 k[0] & 0xffffffff, k[1] & 0xffffffff,
13512 k[2] & 0xffffffff, k[3] & 0xffffffff);
13513 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13514 k[0] & 0xffffffff, k[1] & 0xffffffff,
13515 k[2] & 0xffffffff, k[3] & 0xffffffff);
13519 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13521 REAL_VALUE_TYPE rv;
13524 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13525 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13529 if (TARGET_MINIMAL_TOC)
13530 fputs (DOUBLE_INT_ASM_OP, file);
13532 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13533 k[0] & 0xffffffff, k[1] & 0xffffffff);
13534 fprintf (file, "0x%lx%08lx\n",
13535 k[0] & 0xffffffff, k[1] & 0xffffffff);
13540 if (TARGET_MINIMAL_TOC)
13541 fputs ("\t.long ", file);
13543 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13544 k[0] & 0xffffffff, k[1] & 0xffffffff);
13545 fprintf (file, "0x%lx,0x%lx\n",
13546 k[0] & 0xffffffff, k[1] & 0xffffffff);
13550 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13552 REAL_VALUE_TYPE rv;
13555 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13556 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13560 if (TARGET_MINIMAL_TOC)
13561 fputs (DOUBLE_INT_ASM_OP, file);
13563 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13564 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13569 if (TARGET_MINIMAL_TOC)
13570 fputs ("\t.long ", file);
13572 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13573 fprintf (file, "0x%lx\n", l & 0xffffffff);
13577 else if (GET_MODE (x) == VOIDmode
13578 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13580 unsigned HOST_WIDE_INT low;
13581 HOST_WIDE_INT high;
13583 if (GET_CODE (x) == CONST_DOUBLE)
13585 low = CONST_DOUBLE_LOW (x);
13586 high = CONST_DOUBLE_HIGH (x);
13589 #if HOST_BITS_PER_WIDE_INT == 32
13592 high = (low & 0x80000000) ? ~0 : 0;
13596 low = INTVAL (x) & 0xffffffff;
13597 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13601 /* TOC entries are always Pmode-sized, but since this
13602 is a bigendian machine then if we're putting smaller
13603 integer constants in the TOC we have to pad them.
13604 (This is still a win over putting the constants in
13605 a separate constant pool, because then we'd have
13606 to have both a TOC entry _and_ the actual constant.)
13608 For a 32-bit target, CONST_INT values are loaded and shifted
13609 entirely within `low' and can be stored in one TOC entry. */
13611 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13612 abort ();/* It would be easy to make this work, but it doesn't now. */
13614 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13616 #if HOST_BITS_PER_WIDE_INT == 32
13617 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13618 POINTER_SIZE, &low, &high, 0);
13621 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13622 high = (HOST_WIDE_INT) low >> 32;
13629 if (TARGET_MINIMAL_TOC)
13630 fputs (DOUBLE_INT_ASM_OP, file);
13632 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13633 (long) high & 0xffffffff, (long) low & 0xffffffff);
13634 fprintf (file, "0x%lx%08lx\n",
13635 (long) high & 0xffffffff, (long) low & 0xffffffff);
13640 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13642 if (TARGET_MINIMAL_TOC)
13643 fputs ("\t.long ", file);
13645 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13646 (long) high & 0xffffffff, (long) low & 0xffffffff);
13647 fprintf (file, "0x%lx,0x%lx\n",
13648 (long) high & 0xffffffff, (long) low & 0xffffffff);
13652 if (TARGET_MINIMAL_TOC)
13653 fputs ("\t.long ", file);
13655 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13656 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13662 if (GET_CODE (x) == CONST)
13664 if (GET_CODE (XEXP (x, 0)) != PLUS)
13667 base = XEXP (XEXP (x, 0), 0);
13668 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13671 if (GET_CODE (base) == SYMBOL_REF)
13672 name = XSTR (base, 0);
13673 else if (GET_CODE (base) == LABEL_REF)
13674 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13675 else if (GET_CODE (base) == CODE_LABEL)
13676 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13680 real_name = (*targetm.strip_name_encoding) (name);
13681 if (TARGET_MINIMAL_TOC)
13682 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13685 fprintf (file, "\t.tc %s", real_name);
13688 fprintf (file, ".N%d", - offset);
13690 fprintf (file, ".P%d", offset);
13692 fputs ("[TC],", file);
13695 /* Currently C++ toc references to vtables can be emitted before it
13696 is decided whether the vtable is public or private. If this is
13697 the case, then the linker will eventually complain that there is
13698 a TOC reference to an unknown section. Thus, for vtables only,
13699 we emit the TOC reference to reference the symbol and not the
13701 if (VTABLE_NAME_P (name))
13703 RS6000_OUTPUT_BASENAME (file, name);
13705 fprintf (file, "%d", offset);
13706 else if (offset > 0)
13707 fprintf (file, "+%d", offset);
13710 output_addr_const (file, x);
13714 /* Output an assembler pseudo-op to write an ASCII string of N characters
13715 starting at P to FILE.
13717 On the RS/6000, we have to do this using the .byte operation and
13718 write out special characters outside the quoted string.
13719 Also, the assembler is broken; very long strings are truncated,
13720 so we must artificially break them up early. */
13723 output_ascii (FILE *file, const char *p, int n)
13726 int i, count_string;
13727 const char *for_string = "\t.byte \"";
13728 const char *for_decimal = "\t.byte ";
13729 const char *to_close = NULL;
13732 for (i = 0; i < n; i++)
13735 if (c >= ' ' && c < 0177)
13738 fputs (for_string, file);
13741 /* Write two quotes to get one. */
13749 for_decimal = "\"\n\t.byte ";
13753 if (count_string >= 512)
13755 fputs (to_close, file);
13757 for_string = "\t.byte \"";
13758 for_decimal = "\t.byte ";
13766 fputs (for_decimal, file);
13767 fprintf (file, "%d", c);
13769 for_string = "\n\t.byte \"";
13770 for_decimal = ", ";
13776 /* Now close the string if we have written one. Then end the line. */
13778 fputs (to_close, file);
13781 /* Generate a unique section name for FILENAME for a section type
13782 represented by SECTION_DESC. Output goes into BUF.
13784 SECTION_DESC can be any string, as long as it is different for each
13785 possible section type.
13787 We name the section in the same manner as xlc. The name begins with an
13788 underscore followed by the filename (after stripping any leading directory
13789 names) with the last period replaced by the string SECTION_DESC. If
13790 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13794 rs6000_gen_section_name (char **buf, const char *filename,
13795 const char *section_desc)
13797 const char *q, *after_last_slash, *last_period = 0;
13801 after_last_slash = filename;
13802 for (q = filename; *q; q++)
13805 after_last_slash = q + 1;
13806 else if (*q == '.')
13810 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13811 *buf = (char *) xmalloc (len);
13816 for (q = after_last_slash; *q; q++)
13818 if (q == last_period)
13820 strcpy (p, section_desc);
13821 p += strlen (section_desc);
13825 else if (ISALNUM (*q))
13829 if (last_period == 0)
13830 strcpy (p, section_desc);
13835 /* Emit profile function. */
13838 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13840 if (TARGET_PROFILE_KERNEL)
13843 if (DEFAULT_ABI == ABI_AIX)
13845 #ifndef NO_PROFILE_COUNTERS
13846 # define NO_PROFILE_COUNTERS 0
13848 if (NO_PROFILE_COUNTERS)
13849 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13853 const char *label_name;
13856 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13857 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13858 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13860 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13864 else if (DEFAULT_ABI == ABI_DARWIN)
13866 const char *mcount_name = RS6000_MCOUNT;
13867 int caller_addr_regno = LINK_REGISTER_REGNUM;
13869 /* Be conservative and always set this, at least for now. */
13870 current_function_uses_pic_offset_table = 1;
13873 /* For PIC code, set up a stub and collect the caller's address
13874 from r0, which is where the prologue puts it. */
13875 if (MACHOPIC_INDIRECT)
13877 mcount_name = machopic_stub_name (mcount_name);
13878 if (current_function_uses_pic_offset_table)
13879 caller_addr_regno = 0;
13882 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13884 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13888 /* Write function profiler code. */
13891 output_function_profiler (FILE *file, int labelno)
13896 switch (DEFAULT_ABI)
13905 warning ("no profiling of 64-bit code for this ABI");
13908 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13909 fprintf (file, "\tmflr %s\n", reg_names[0]);
13912 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13913 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13914 reg_names[0], save_lr, reg_names[1]);
13915 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13916 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13917 assemble_name (file, buf);
13918 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13920 else if (flag_pic > 1)
13922 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13923 reg_names[0], save_lr, reg_names[1]);
13924 /* Now, we need to get the address of the label. */
13925 fputs ("\tbl 1f\n\t.long ", file);
13926 assemble_name (file, buf);
13927 fputs ("-.\n1:", file);
13928 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13929 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13930 reg_names[0], reg_names[11]);
13931 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13932 reg_names[0], reg_names[0], reg_names[11]);
13936 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13937 assemble_name (file, buf);
13938 fputs ("@ha\n", file);
13939 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13940 reg_names[0], save_lr, reg_names[1]);
13941 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13942 assemble_name (file, buf);
13943 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13946 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13947 fprintf (file, "\tbl %s%s\n",
13948 RS6000_MCOUNT, flag_pic ? "@plt" : "");
13953 if (!TARGET_PROFILE_KERNEL)
13955 /* Don't do anything, done in output_profile_hook (). */
13962 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13963 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13965 if (current_function_needs_context)
13967 asm_fprintf (file, "\tstd %s,24(%s)\n",
13968 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13969 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13970 asm_fprintf (file, "\tld %s,24(%s)\n",
13971 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13974 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13982 rs6000_use_dfa_pipeline_interface (void)
13987 /* Power4 load update and store update instructions are cracked into a
13988 load or store and an integer insn which are executed in the same cycle.
13989 Branches have their own dispatch slot which does not count against the
13990 GCC issue rate, but it changes the program flow so there are no other
13991 instructions to issue in this cycle. */
13994 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13995 int verbose ATTRIBUTE_UNUSED,
13996 rtx insn, int more)
13998 if (GET_CODE (PATTERN (insn)) == USE
13999 || GET_CODE (PATTERN (insn)) == CLOBBER)
14002 if (rs6000_sched_groups)
14004 if (is_microcoded_insn (insn))
14006 else if (is_cracked_insn (insn))
14007 return more > 2 ? more - 2 : 0;
14013 /* Adjust the cost of a scheduling dependency. Return the new cost of
14014 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
14017 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
14020 if (! recog_memoized (insn))
14023 if (REG_NOTE_KIND (link) != 0)
14026 if (REG_NOTE_KIND (link) == 0)
14028 /* Data dependency; DEP_INSN writes a register that INSN reads
14029 some cycles later. */
14030 switch (get_attr_type (insn))
14033 /* Tell the first scheduling pass about the latency between
14034 a mtctr and bctr (and mtlr and br/blr). The first
14035 scheduling pass will not know about this latency since
14036 the mtctr instruction, which has the latency associated
14037 to it, will be generated by reload. */
14038 return TARGET_POWER ? 5 : 4;
14040 /* Leave some extra cycles between a compare and its
14041 dependent branch, to inhibit expensive mispredicts. */
14042 if ((rs6000_cpu_attr == CPU_PPC603
14043 || rs6000_cpu_attr == CPU_PPC604
14044 || rs6000_cpu_attr == CPU_PPC604E
14045 || rs6000_cpu_attr == CPU_PPC620
14046 || rs6000_cpu_attr == CPU_PPC630
14047 || rs6000_cpu_attr == CPU_PPC750
14048 || rs6000_cpu_attr == CPU_PPC7400
14049 || rs6000_cpu_attr == CPU_PPC7450
14050 || rs6000_cpu_attr == CPU_POWER4
14051 || rs6000_cpu_attr == CPU_POWER5)
14052 && recog_memoized (dep_insn)
14053 && (INSN_CODE (dep_insn) >= 0)
14054 && (get_attr_type (dep_insn) == TYPE_CMP
14055 || get_attr_type (dep_insn) == TYPE_COMPARE
14056 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
14057 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
14058 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
14059 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
14060 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
14061 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
14066 /* Fall out to return default cost. */
14072 /* The function returns a true if INSN is microcoded.
14073 Return false otherwise. */
14076 is_microcoded_insn (rtx insn)
14078 if (!insn || !INSN_P (insn)
14079 || GET_CODE (PATTERN (insn)) == USE
14080 || GET_CODE (PATTERN (insn)) == CLOBBER)
14083 if (rs6000_sched_groups)
14085 enum attr_type type = get_attr_type (insn);
14086 if (type == TYPE_LOAD_EXT_U
14087 || type == TYPE_LOAD_EXT_UX
14088 || type == TYPE_LOAD_UX
14089 || type == TYPE_STORE_UX
14090 || type == TYPE_MFCR)
14097 /* The function returns a nonzero value if INSN can be scheduled only
14098 as the first insn in a dispatch group ("dispatch-slot restricted").
14099 In this case, the returned value indicates how many dispatch slots
14100 the insn occupies (at the beginning of the group).
14101 Return 0 otherwise. */
14104 is_dispatch_slot_restricted (rtx insn)
14106 enum attr_type type;
14108 if (!rs6000_sched_groups)
14112 || insn == NULL_RTX
14113 || GET_CODE (insn) == NOTE
14114 || GET_CODE (PATTERN (insn)) == USE
14115 || GET_CODE (PATTERN (insn)) == CLOBBER)
14118 type = get_attr_type (insn);
14125 case TYPE_DELAYED_CR:
14126 case TYPE_CR_LOGICAL:
14134 if (rs6000_cpu == PROCESSOR_POWER5
14135 && is_cracked_insn (insn))
14141 /* The function returns true if INSN is cracked into 2 instructions
14142 by the processor (and therefore occupies 2 issue slots). */
14145 is_cracked_insn (rtx insn)
14147 if (!insn || !INSN_P (insn)
14148 || GET_CODE (PATTERN (insn)) == USE
14149 || GET_CODE (PATTERN (insn)) == CLOBBER)
14152 if (rs6000_sched_groups)
14154 enum attr_type type = get_attr_type (insn);
14155 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14156 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14157 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14158 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14159 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14160 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14161 || type == TYPE_IDIV || type == TYPE_LDIV
14162 || type == TYPE_INSERT_WORD)
14169 /* The function returns true if INSN can be issued only from
14170 the branch slot. */
14173 is_branch_slot_insn (rtx insn)
14175 if (!insn || !INSN_P (insn)
14176 || GET_CODE (PATTERN (insn)) == USE
14177 || GET_CODE (PATTERN (insn)) == CLOBBER)
14180 if (rs6000_sched_groups)
14182 enum attr_type type = get_attr_type (insn);
14183 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14191 /* A C statement (sans semicolon) to update the integer scheduling
14192 priority INSN_PRIORITY (INSN). Increase the priority to execute the
14193 INSN earlier, reduce the priority to execute INSN later. Do not
14194 define this macro if you do not need to adjust the scheduling
14195 priorities of insns. */
14198 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14200 /* On machines (like the 750) which have asymmetric integer units,
14201 where one integer unit can do multiply and divides and the other
14202 can't, reduce the priority of multiply/divide so it is scheduled
14203 before other integer operations. */
14206 if (! INSN_P (insn))
14209 if (GET_CODE (PATTERN (insn)) == USE)
14212 switch (rs6000_cpu_attr) {
14214 switch (get_attr_type (insn))
14221 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14222 priority, priority);
14223 if (priority >= 0 && priority < 0x01000000)
14230 if (is_dispatch_slot_restricted (insn)
14231 && reload_completed
14232 && current_sched_info->sched_max_insns_priority
14233 && rs6000_sched_restricted_insns_priority)
14236 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
14237 if (rs6000_sched_restricted_insns_priority == 1)
14238 /* Attach highest priority to insn. This means that in
14239 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14240 precede 'priority' (critical path) considerations. */
14241 return current_sched_info->sched_max_insns_priority;
14242 else if (rs6000_sched_restricted_insns_priority == 2)
14243 /* Increase priority of insn by a minimal amount. This means that in
14244 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14245 precede dispatch-slot restriction considerations. */
14246 return (priority + 1);
14252 /* Return how many instructions the machine can issue per cycle. */
14255 rs6000_issue_rate (void)
14257 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
14258 if (!reload_completed)
14261 switch (rs6000_cpu_attr) {
14262 case CPU_RIOS1: /* ? */
14264 case CPU_PPC601: /* ? */
14287 /* Return how many instructions to look ahead for better insn
14291 rs6000_use_sched_lookahead (void)
14293 if (rs6000_cpu_attr == CPU_PPC8540)
14298 /* Determine is PAT refers to memory. */
14301 is_mem_ref (rtx pat)
14307 if (GET_CODE (pat) == MEM)
14310 /* Recursively process the pattern. */
14311 fmt = GET_RTX_FORMAT (GET_CODE (pat));
14313 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14316 ret |= is_mem_ref (XEXP (pat, i));
14317 else if (fmt[i] == 'E')
14318 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14319 ret |= is_mem_ref (XVECEXP (pat, i, j));
14325 /* Determine if PAT is a PATTERN of a load insn. */
14328 is_load_insn1 (rtx pat)
14330 if (!pat || pat == NULL_RTX)
14333 if (GET_CODE (pat) == SET)
14334 return is_mem_ref (SET_SRC (pat));
14336 if (GET_CODE (pat) == PARALLEL)
14340 for (i = 0; i < XVECLEN (pat, 0); i++)
14341 if (is_load_insn1 (XVECEXP (pat, 0, i)))
14348 /* Determine if INSN loads from memory. */
14351 is_load_insn (rtx insn)
14353 if (!insn || !INSN_P (insn))
14356 if (GET_CODE (insn) == CALL_INSN)
14359 return is_load_insn1 (PATTERN (insn));
14362 /* Determine if PAT is a PATTERN of a store insn. */
14365 is_store_insn1 (rtx pat)
14367 if (!pat || pat == NULL_RTX)
14370 if (GET_CODE (pat) == SET)
14371 return is_mem_ref (SET_DEST (pat));
14373 if (GET_CODE (pat) == PARALLEL)
14377 for (i = 0; i < XVECLEN (pat, 0); i++)
14378 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14385 /* Determine if INSN stores to memory. */
14388 is_store_insn (rtx insn)
14390 if (!insn || !INSN_P (insn))
14393 return is_store_insn1 (PATTERN (insn));
14396 /* Returns whether the dependence between INSN and NEXT is considered
14397 costly by the given target. */
14400 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14402 /* If the flag is not enbled - no dependence is considered costly;
14403 allow all dependent insns in the same group.
14404 This is the most aggressive option. */
14405 if (rs6000_sched_costly_dep == no_dep_costly)
14408 /* If the flag is set to 1 - a dependence is always considered costly;
14409 do not allow dependent instructions in the same group.
14410 This is the most conservative option. */
14411 if (rs6000_sched_costly_dep == all_deps_costly)
14414 if (rs6000_sched_costly_dep == store_to_load_dep_costly
14415 && is_load_insn (next)
14416 && is_store_insn (insn))
14417 /* Prevent load after store in the same group. */
14420 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14421 && is_load_insn (next)
14422 && is_store_insn (insn)
14423 && (!link || (int) REG_NOTE_KIND (link) == 0))
14424 /* Prevent load after store in the same group if it is a true dependence. */
14427 /* The flag is set to X; dependences with latency >= X are considered costly,
14428 and will not be scheduled in the same group. */
14429 if (rs6000_sched_costly_dep <= max_dep_latency
14430 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14436 /* Return the next insn after INSN that is found before TAIL is reached,
14437 skipping any "non-active" insns - insns that will not actually occupy
14438 an issue slot. Return NULL_RTX if such an insn is not found. */
14441 get_next_active_insn (rtx insn, rtx tail)
14445 if (!insn || insn == tail)
14448 next_insn = NEXT_INSN (insn);
14451 && next_insn != tail
14452 && (GET_CODE(next_insn) == NOTE
14453 || GET_CODE (PATTERN (next_insn)) == USE
14454 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14456 next_insn = NEXT_INSN (next_insn);
14459 if (!next_insn || next_insn == tail)
14465 /* Return whether the presence of INSN causes a dispatch group termination
14466 of group WHICH_GROUP.
14468 If WHICH_GROUP == current_group, this function will return true if INSN
14469 causes the termination of the current group (i.e, the dispatch group to
14470 which INSN belongs). This means that INSN will be the last insn in the
14471 group it belongs to.
14473 If WHICH_GROUP == previous_group, this function will return true if INSN
14474 causes the termination of the previous group (i.e, the dispatch group that
14475 precedes the group to which INSN belongs). This means that INSN will be
14476 the first insn in the group it belongs to). */
14479 insn_terminates_group_p (rtx insn, enum group_termination which_group)
14481 enum attr_type type;
14486 type = get_attr_type (insn);
14488 if (is_microcoded_insn (insn))
14491 if (which_group == current_group)
14493 if (is_branch_slot_insn (insn))
14497 else if (which_group == previous_group)
14499 if (is_dispatch_slot_restricted (insn))
14507 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14508 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14511 is_costly_group (rtx *group_insns, rtx next_insn)
14516 int issue_rate = rs6000_issue_rate ();
14518 for (i = 0; i < issue_rate; i++)
14520 rtx insn = group_insns[i];
14523 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14525 rtx next = XEXP (link, 0);
14526 if (next == next_insn)
14528 cost = insn_cost (insn, link, next_insn);
14529 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14538 /* Utility of the function redefine_groups.
14539 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14540 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14541 to keep it "far" (in a separate group) from GROUP_INSNS, following
14542 one of the following schemes, depending on the value of the flag
14543 -minsert_sched_nops = X:
14544 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14545 in order to force NEXT_INSN into a separate group.
14546 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14547 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14548 insertion (has a group just ended, how many vacant issue slots remain in the
14549 last group, and how many dispatch groups were encountered so far). */
14552 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14553 bool *group_end, int can_issue_more, int *group_count)
14557 int issue_rate = rs6000_issue_rate ();
14558 bool end = *group_end;
14561 if (next_insn == NULL_RTX)
14562 return can_issue_more;
14564 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14565 return can_issue_more;
14567 force = is_costly_group (group_insns, next_insn);
14569 return can_issue_more;
14571 if (sched_verbose > 6)
14572 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14573 *group_count ,can_issue_more);
14575 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14578 can_issue_more = 0;
14580 /* Since only a branch can be issued in the last issue_slot, it is
14581 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14582 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14583 in this case the last nop will start a new group and the branch will be
14584 forced to the new group. */
14585 if (can_issue_more && !is_branch_slot_insn (next_insn))
14588 while (can_issue_more > 0)
14591 emit_insn_before (nop, next_insn);
14599 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14601 int n_nops = rs6000_sched_insert_nops;
14603 /* Nops can't be issued from the branch slot, so the effective
14604 issue_rate for nops is 'issue_rate - 1'. */
14605 if (can_issue_more == 0)
14606 can_issue_more = issue_rate;
14608 if (can_issue_more == 0)
14610 can_issue_more = issue_rate - 1;
14613 for (i = 0; i < issue_rate; i++)
14615 group_insns[i] = 0;
14622 emit_insn_before (nop, next_insn);
14623 if (can_issue_more == issue_rate - 1) /* new group begins */
14626 if (can_issue_more == 0)
14628 can_issue_more = issue_rate - 1;
14631 for (i = 0; i < issue_rate; i++)
14633 group_insns[i] = 0;
14639 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14642 *group_end = /* Is next_insn going to start a new group? */
14644 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14645 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14646 || (can_issue_more < issue_rate &&
14647 insn_terminates_group_p (next_insn, previous_group)));
14648 if (*group_end && end)
14651 if (sched_verbose > 6)
14652 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14653 *group_count, can_issue_more);
14654 return can_issue_more;
14657 return can_issue_more;
14660 /* This function tries to synch the dispatch groups that the compiler "sees"
14661 with the dispatch groups that the processor dispatcher is expected to
14662 form in practice. It tries to achieve this synchronization by forcing the
14663 estimated processor grouping on the compiler (as opposed to the function
14664 'pad_goups' which tries to force the scheduler's grouping on the processor).
14666 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14667 examines the (estimated) dispatch groups that will be formed by the processor
14668 dispatcher. It marks these group boundaries to reflect the estimated
14669 processor grouping, overriding the grouping that the scheduler had marked.
14670 Depending on the value of the flag '-minsert-sched-nops' this function can
14671 force certain insns into separate groups or force a certain distance between
14672 them by inserting nops, for example, if there exists a "costly dependence"
14675 The function estimates the group boundaries that the processor will form as
14676 folllows: It keeps track of how many vacant issue slots are available after
14677 each insn. A subsequent insn will start a new group if one of the following
14679 - no more vacant issue slots remain in the current dispatch group.
14680 - only the last issue slot, which is the branch slot, is vacant, but the next
14681 insn is not a branch.
14682 - only the last 2 or less issue slots, including the branch slot, are vacant,
14683 which means that a cracked insn (which occupies two issue slots) can't be
14684 issued in this group.
14685 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14686 start a new group. */
14689 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14691 rtx insn, next_insn;
14693 int can_issue_more;
14696 int group_count = 0;
14700 issue_rate = rs6000_issue_rate ();
14701 group_insns = alloca (issue_rate * sizeof (rtx));
14702 for (i = 0; i < issue_rate; i++)
14704 group_insns[i] = 0;
14706 can_issue_more = issue_rate;
14708 insn = get_next_active_insn (prev_head_insn, tail);
14711 while (insn != NULL_RTX)
14713 slot = (issue_rate - can_issue_more);
14714 group_insns[slot] = insn;
14716 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14717 if (insn_terminates_group_p (insn, current_group))
14718 can_issue_more = 0;
14720 next_insn = get_next_active_insn (insn, tail);
14721 if (next_insn == NULL_RTX)
14722 return group_count + 1;
14724 group_end = /* Is next_insn going to start a new group? */
14725 (can_issue_more == 0
14726 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14727 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14728 || (can_issue_more < issue_rate &&
14729 insn_terminates_group_p (next_insn, previous_group)));
14731 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14732 next_insn, &group_end, can_issue_more, &group_count);
14737 can_issue_more = 0;
14738 for (i = 0; i < issue_rate; i++)
14740 group_insns[i] = 0;
14744 if (GET_MODE (next_insn) == TImode && can_issue_more)
14745 PUT_MODE(next_insn, VOIDmode);
14746 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14747 PUT_MODE (next_insn, TImode);
14750 if (can_issue_more == 0)
14751 can_issue_more = issue_rate;
14754 return group_count;
14757 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14758 dispatch group boundaries that the scheduler had marked. Pad with nops
14759 any dispatch groups which have vacant issue slots, in order to force the
14760 scheduler's grouping on the processor dispatcher. The function
14761 returns the number of dispatch groups found. */
14764 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14766 rtx insn, next_insn;
14769 int can_issue_more;
14771 int group_count = 0;
14773 /* Initialize issue_rate. */
14774 issue_rate = rs6000_issue_rate ();
14775 can_issue_more = issue_rate;
14777 insn = get_next_active_insn (prev_head_insn, tail);
14778 next_insn = get_next_active_insn (insn, tail);
14780 while (insn != NULL_RTX)
14783 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14785 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14787 if (next_insn == NULL_RTX)
14792 /* If the scheduler had marked group termination at this location
14793 (between insn and next_indn), and neither insn nor next_insn will
14794 force group termination, pad the group with nops to force group
14797 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14798 && !insn_terminates_group_p (insn, current_group)
14799 && !insn_terminates_group_p (next_insn, previous_group))
14801 if (!is_branch_slot_insn(next_insn))
14804 while (can_issue_more)
14807 emit_insn_before (nop, next_insn);
14812 can_issue_more = issue_rate;
14817 next_insn = get_next_active_insn (insn, tail);
14820 return group_count;
14823 /* The following function is called at the end of scheduling BB.
14824 After reload, it inserts nops at insn group bundling. */
14827 rs6000_sched_finish (FILE *dump, int sched_verbose)
14832 fprintf (dump, "=== Finishing schedule.\n");
14834 if (reload_completed && rs6000_sched_groups)
14836 if (rs6000_sched_insert_nops == sched_finish_none)
14839 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14840 n_groups = pad_groups (dump, sched_verbose,
14841 current_sched_info->prev_head,
14842 current_sched_info->next_tail);
14844 n_groups = redefine_groups (dump, sched_verbose,
14845 current_sched_info->prev_head,
14846 current_sched_info->next_tail);
14848 if (sched_verbose >= 6)
14850 fprintf (dump, "ngroups = %d\n", n_groups);
14851 print_rtl (dump, current_sched_info->prev_head);
14852 fprintf (dump, "Done finish_sched\n");
14857 /* Length in units of the trampoline for entering a nested function. */
14860 rs6000_trampoline_size (void)
14864 switch (DEFAULT_ABI)
14870 ret = (TARGET_32BIT) ? 12 : 24;
14875 ret = (TARGET_32BIT) ? 40 : 48;
14882 /* Emit RTL insns to initialize the variable parts of a trampoline.
14883 FNADDR is an RTX for the address of the function's pure code.
14884 CXT is an RTX for the static chain value for the function. */
14887 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14889 enum machine_mode pmode = Pmode;
14890 int regsize = (TARGET_32BIT) ? 4 : 8;
14891 rtx ctx_reg = force_reg (pmode, cxt);
14893 switch (DEFAULT_ABI)
14898 /* Macros to shorten the code expansions below. */
14899 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14900 #define MEM_PLUS(addr,offset) \
14901 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14903 /* Under AIX, just build the 3 word function descriptor */
14906 rtx fn_reg = gen_reg_rtx (pmode);
14907 rtx toc_reg = gen_reg_rtx (pmode);
14908 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14909 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14910 emit_move_insn (MEM_DEREF (addr), fn_reg);
14911 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14912 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14916 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14919 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14920 FALSE, VOIDmode, 4,
14922 GEN_INT (rs6000_trampoline_size ()), SImode,
14932 /* Table of valid machine attributes. */
14934 const struct attribute_spec rs6000_attribute_table[] =
14936 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14937 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
14938 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14939 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14940 { NULL, 0, 0, false, false, false, NULL }
14943 /* Handle the "altivec" attribute. The attribute may have
14944 arguments as follows:
14946 __attribute__((altivec(vector__)))
14947 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
14948 __attribute__((altivec(bool__))) (always followed by 'unsigned')
14950 and may appear more than once (e.g., 'vector bool char') in a
14951 given declaration. */
14954 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
14955 int flags ATTRIBUTE_UNUSED,
14956 bool *no_add_attrs)
14958 tree type = *node, result = NULL_TREE;
14959 enum machine_mode mode;
14962 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
14963 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
14964 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
14967 while (POINTER_TYPE_P (type)
14968 || TREE_CODE (type) == FUNCTION_TYPE
14969 || TREE_CODE (type) == METHOD_TYPE
14970 || TREE_CODE (type) == ARRAY_TYPE)
14971 type = TREE_TYPE (type);
14973 mode = TYPE_MODE (type);
14975 if (rs6000_warn_altivec_long
14976 && (type == long_unsigned_type_node || type == long_integer_type_node))
14977 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
14979 switch (altivec_type)
14982 unsigned_p = TYPE_UNSIGNED (type);
14986 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
14989 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
14992 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
14994 case SFmode: result = V4SF_type_node; break;
14995 /* If the user says 'vector int bool', we may be handed the 'bool'
14996 attribute _before_ the 'vector' attribute, and so select the proper
14997 type in the 'b' case below. */
14998 case V4SImode: case V8HImode: case V16QImode: result = type;
15005 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
15006 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
15007 case QImode: case V16QImode: result = bool_V16QI_type_node;
15014 case V8HImode: result = pixel_V8HI_type_node;
15020 *no_add_attrs = true; /* No need to hang on to the attribute. */
15023 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
15025 *node = reconstruct_complex_type (*node, result);
15030 /* AltiVec defines four built-in scalar types that serve as vector
15031 elements; we must teach the compiler how to mangle them. */
15033 static const char *
15034 rs6000_mangle_fundamental_type (tree type)
15036 if (type == bool_char_type_node) return "U6__boolc";
15037 if (type == bool_short_type_node) return "U6__bools";
15038 if (type == pixel_type_node) return "u7__pixel";
15039 if (type == bool_int_type_node) return "U6__booli";
15041 /* For all other types, use normal C++ mangling. */
15045 /* Handle a "longcall" or "shortcall" attribute; arguments as in
15046 struct attribute_spec.handler. */
15049 rs6000_handle_longcall_attribute (tree *node, tree name,
15050 tree args ATTRIBUTE_UNUSED,
15051 int flags ATTRIBUTE_UNUSED,
15052 bool *no_add_attrs)
15054 if (TREE_CODE (*node) != FUNCTION_TYPE
15055 && TREE_CODE (*node) != FIELD_DECL
15056 && TREE_CODE (*node) != TYPE_DECL)
15058 warning ("`%s' attribute only applies to functions",
15059 IDENTIFIER_POINTER (name));
15060 *no_add_attrs = true;
15066 /* Set longcall attributes on all functions declared when
15067 rs6000_default_long_calls is true. */
15069 rs6000_set_default_type_attributes (tree type)
15071 if (rs6000_default_long_calls
15072 && (TREE_CODE (type) == FUNCTION_TYPE
15073 || TREE_CODE (type) == METHOD_TYPE))
15074 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
15076 TYPE_ATTRIBUTES (type));
15079 /* Return a reference suitable for calling a function with the
15080 longcall attribute. */
15083 rs6000_longcall_ref (rtx call_ref)
15085 const char *call_name;
15088 if (GET_CODE (call_ref) != SYMBOL_REF)
15091 /* System V adds '.' to the internal name, so skip them. */
15092 call_name = XSTR (call_ref, 0);
15093 if (*call_name == '.')
15095 while (*call_name == '.')
15098 node = get_identifier (call_name);
15099 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
15102 return force_reg (Pmode, call_ref);
15105 #ifdef USING_ELFOS_H
15107 /* A C statement or statements to switch to the appropriate section
15108 for output of RTX in mode MODE. You can assume that RTX is some
15109 kind of constant in RTL. The argument MODE is redundant except in
15110 the case of a `const_int' rtx. Select the section by calling
15111 `text_section' or one of the alternatives for other sections.
15113 Do not define this macro if you put all constants in the read-only
15117 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
15118 unsigned HOST_WIDE_INT align)
15120 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15123 default_elf_select_rtx_section (mode, x, align);
15126 /* A C statement or statements to switch to the appropriate
15127 section for output of DECL. DECL is either a `VAR_DECL' node
15128 or a constant of some sort. RELOC indicates whether forming
15129 the initial value of DECL requires link-time relocations. */
15132 rs6000_elf_select_section (tree decl, int reloc,
15133 unsigned HOST_WIDE_INT align)
15135 /* Pretend that we're always building for a shared library when
15136 ABI_AIX, because otherwise we end up with dynamic relocations
15137 in read-only sections. This happens for function pointers,
15138 references to vtables in typeinfo, and probably other cases. */
15139 default_elf_select_section_1 (decl, reloc, align,
15140 flag_pic || DEFAULT_ABI == ABI_AIX);
15143 /* A C statement to build up a unique section name, expressed as a
15144 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15145 RELOC indicates whether the initial value of EXP requires
15146 link-time relocations. If you do not define this macro, GCC will use
15147 the symbol name prefixed by `.' as the section name. Note - this
15148 macro can now be called for uninitialized data items as well as
15149 initialized data and functions. */
15152 rs6000_elf_unique_section (tree decl, int reloc)
15154 /* As above, pretend that we're always building for a shared library
15155 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
15156 default_unique_section_1 (decl, reloc,
15157 flag_pic || DEFAULT_ABI == ABI_AIX);
15160 /* For a SYMBOL_REF, set generic flags and then perform some
15161 target-specific processing.
15163 When the AIX ABI is requested on a non-AIX system, replace the
15164 function name with the real name (with a leading .) rather than the
15165 function descriptor name. This saves a lot of overriding code to
15166 read the prefixes. */
15169 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15171 default_encode_section_info (decl, rtl, first);
15174 && TREE_CODE (decl) == FUNCTION_DECL
15176 && DEFAULT_ABI == ABI_AIX)
15178 rtx sym_ref = XEXP (rtl, 0);
15179 size_t len = strlen (XSTR (sym_ref, 0));
15180 char *str = alloca (len + 2);
15182 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15183 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15188 rs6000_elf_in_small_data_p (tree decl)
15190 if (rs6000_sdata == SDATA_NONE)
15193 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15195 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15196 if (strcmp (section, ".sdata") == 0
15197 || strcmp (section, ".sdata2") == 0
15198 || strcmp (section, ".sbss") == 0
15199 || strcmp (section, ".sbss2") == 0
15200 || strcmp (section, ".PPC.EMB.sdata0") == 0
15201 || strcmp (section, ".PPC.EMB.sbss0") == 0)
15206 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15209 && (unsigned HOST_WIDE_INT) size <= g_switch_value
15210 /* If it's not public, and we're not going to reference it there,
15211 there's no need to put it in the small data section. */
15212 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15219 #endif /* USING_ELFOS_H */
15222 /* Return a REG that occurs in ADDR with coefficient 1.
15223 ADDR can be effectively incremented by incrementing REG.
15225 r0 is special and we must not select it as an address
15226 register by this routine since our caller will try to
15227 increment the returned register via an "la" instruction. */
15230 find_addr_reg (rtx addr)
15232 while (GET_CODE (addr) == PLUS)
15234 if (GET_CODE (XEXP (addr, 0)) == REG
15235 && REGNO (XEXP (addr, 0)) != 0)
15236 addr = XEXP (addr, 0);
15237 else if (GET_CODE (XEXP (addr, 1)) == REG
15238 && REGNO (XEXP (addr, 1)) != 0)
15239 addr = XEXP (addr, 1);
15240 else if (CONSTANT_P (XEXP (addr, 0)))
15241 addr = XEXP (addr, 1);
15242 else if (CONSTANT_P (XEXP (addr, 1)))
15243 addr = XEXP (addr, 0);
15247 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15253 rs6000_fatal_bad_address (rtx op)
15255 fatal_insn ("bad address", op);
15261 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
15262 reference and a constant. */
15265 symbolic_operand (rtx op)
15267 switch (GET_CODE (op))
15274 return (GET_CODE (op) == SYMBOL_REF ||
15275 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15276 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15277 && GET_CODE (XEXP (op, 1)) == CONST_INT);
15286 static tree branch_island_list = 0;
15288 /* Remember to generate a branch island for far calls to the given
15292 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15294 tree branch_island = build_tree_list (function_name, label_name);
15295 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15296 TREE_CHAIN (branch_island) = branch_island_list;
15297 branch_island_list = branch_island;
15300 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15301 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15302 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15303 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15305 /* Generate far-jump branch islands for everything on the
15306 branch_island_list. Invoked immediately after the last instruction
15307 of the epilogue has been emitted; the branch-islands must be
15308 appended to, and contiguous with, the function body. Mach-O stubs
15309 are generated in machopic_output_stub(). */
15312 macho_branch_islands (void)
15315 tree branch_island;
15317 for (branch_island = branch_island_list;
15319 branch_island = TREE_CHAIN (branch_island))
15321 const char *label =
15322 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15324 darwin_strip_name_encoding (
15325 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15326 char name_buf[512];
15327 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
15328 if (name[0] == '*' || name[0] == '&')
15329 strcpy (name_buf, name+1);
15333 strcpy (name_buf+1, name);
15335 strcpy (tmp_buf, "\n");
15336 strcat (tmp_buf, label);
15337 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15338 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15339 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15340 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15341 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15344 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15345 strcat (tmp_buf, label);
15346 strcat (tmp_buf, "_pic\n");
15347 strcat (tmp_buf, label);
15348 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15350 strcat (tmp_buf, "\taddis r11,r11,ha16(");
15351 strcat (tmp_buf, name_buf);
15352 strcat (tmp_buf, " - ");
15353 strcat (tmp_buf, label);
15354 strcat (tmp_buf, "_pic)\n");
15356 strcat (tmp_buf, "\tmtlr r0\n");
15358 strcat (tmp_buf, "\taddi r12,r11,lo16(");
15359 strcat (tmp_buf, name_buf);
15360 strcat (tmp_buf, " - ");
15361 strcat (tmp_buf, label);
15362 strcat (tmp_buf, "_pic)\n");
15364 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15368 strcat (tmp_buf, ":\nlis r12,hi16(");
15369 strcat (tmp_buf, name_buf);
15370 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15371 strcat (tmp_buf, name_buf);
15372 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15374 output_asm_insn (tmp_buf, 0);
15375 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15376 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15377 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15378 BRANCH_ISLAND_LINE_NUMBER (branch_island));
15379 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15382 branch_island_list = 0;
15385 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
15386 already there or not. */
15389 no_previous_def (tree function_name)
15391 tree branch_island;
15392 for (branch_island = branch_island_list;
15394 branch_island = TREE_CHAIN (branch_island))
15395 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15400 /* GET_PREV_LABEL gets the label name from the previous definition of
15404 get_prev_label (tree function_name)
15406 tree branch_island;
15407 for (branch_island = branch_island_list;
15409 branch_island = TREE_CHAIN (branch_island))
15410 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15411 return BRANCH_ISLAND_LABEL_NAME (branch_island);
15415 /* INSN is either a function call or a millicode call. It may have an
15416 unconditional jump in its delay slot.
15418 CALL_DEST is the routine we are calling. */
15421 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15423 static char buf[256];
15424 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15425 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15428 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15430 if (no_previous_def (funname))
15432 int line_number = 0;
15433 rtx label_rtx = gen_label_rtx ();
15434 char *label_buf, temp_buf[256];
15435 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15436 CODE_LABEL_NUMBER (label_rtx));
15437 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15438 labelname = get_identifier (label_buf);
15439 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15441 line_number = NOTE_LINE_NUMBER (insn);
15442 add_compiler_branch_island (labelname, funname, line_number);
15445 labelname = get_prev_label (funname);
15447 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15448 instruction will reach 'foo', otherwise link as 'bl L42'".
15449 "L42" should be a 'branch island', that will do a far jump to
15450 'foo'. Branch islands are generated in
15451 macho_branch_islands(). */
15452 sprintf (buf, "jbsr %%z%d,%.246s",
15453 dest_operand_number, IDENTIFIER_POINTER (labelname));
15456 sprintf (buf, "bl %%z%d", dest_operand_number);
15460 #endif /* TARGET_MACHO */
15462 /* Generate PIC and indirect symbol stubs. */
15465 machopic_output_stub (FILE *file, const char *symb, const char *stub)
15467 unsigned int length;
15468 char *symbol_name, *lazy_ptr_name;
15469 char *local_label_0;
15470 static int label = 0;
15472 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
15473 symb = (*targetm.strip_name_encoding) (symb);
15476 length = strlen (symb);
15477 symbol_name = alloca (length + 32);
15478 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15480 lazy_ptr_name = alloca (length + 32);
15481 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15484 machopic_picsymbol_stub1_section ();
15486 machopic_symbol_stub1_section ();
15487 fprintf (file, "\t.align 2\n");
15489 fprintf (file, "%s:\n", stub);
15490 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15495 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15496 sprintf (local_label_0, "\"L%011d$spb\"", label);
15498 fprintf (file, "\tmflr r0\n");
15499 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15500 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15501 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15502 lazy_ptr_name, local_label_0);
15503 fprintf (file, "\tmtlr r0\n");
15504 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15505 lazy_ptr_name, local_label_0);
15506 fprintf (file, "\tmtctr r12\n");
15507 fprintf (file, "\tbctr\n");
15511 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15512 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15513 fprintf (file, "\tmtctr r12\n");
15514 fprintf (file, "\tbctr\n");
15517 machopic_lazy_symbol_ptr_section ();
15518 fprintf (file, "%s:\n", lazy_ptr_name);
15519 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15520 fprintf (file, "\t.long dyld_stub_binding_helper\n");
15523 /* Legitimize PIC addresses. If the address is already
15524 position-independent, we return ORIG. Newly generated
15525 position-independent addresses go into a reg. This is REG if non
15526 zero, otherwise we allocate register(s) as necessary. */
15528 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15531 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15536 if (reg == NULL && ! reload_in_progress && ! reload_completed)
15537 reg = gen_reg_rtx (Pmode);
15539 if (GET_CODE (orig) == CONST)
15541 if (GET_CODE (XEXP (orig, 0)) == PLUS
15542 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15545 if (GET_CODE (XEXP (orig, 0)) == PLUS)
15547 /* Use a different reg for the intermediate value, as
15548 it will be marked UNCHANGING. */
15549 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15552 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15555 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15561 if (GET_CODE (offset) == CONST_INT)
15563 if (SMALL_INT (offset))
15564 return plus_constant (base, INTVAL (offset));
15565 else if (! reload_in_progress && ! reload_completed)
15566 offset = force_reg (Pmode, offset);
15569 rtx mem = force_const_mem (Pmode, orig);
15570 return machopic_legitimize_pic_address (mem, Pmode, reg);
15573 return gen_rtx_PLUS (Pmode, base, offset);
15576 /* Fall back on generic machopic code. */
15577 return machopic_legitimize_pic_address (orig, mode, reg);
15580 /* This is just a placeholder to make linking work without having to
15581 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15582 ever needed for Darwin (not too likely!) this would have to get a
15583 real definition. */
15590 #endif /* TARGET_MACHO */
15593 static unsigned int
15594 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15596 return default_section_type_flags_1 (decl, name, reloc,
15597 flag_pic || DEFAULT_ABI == ABI_AIX);
15600 /* Record an element in the table of global constructors. SYMBOL is
15601 a SYMBOL_REF of the function to be called; PRIORITY is a number
15602 between 0 and MAX_INIT_PRIORITY.
15604 This differs from default_named_section_asm_out_constructor in
15605 that we have special handling for -mrelocatable. */
15608 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15610 const char *section = ".ctors";
15613 if (priority != DEFAULT_INIT_PRIORITY)
15615 sprintf (buf, ".ctors.%.5u",
15616 /* Invert the numbering so the linker puts us in the proper
15617 order; constructors are run from right to left, and the
15618 linker sorts in increasing order. */
15619 MAX_INIT_PRIORITY - priority);
15623 named_section_flags (section, SECTION_WRITE);
15624 assemble_align (POINTER_SIZE);
15626 if (TARGET_RELOCATABLE)
15628 fputs ("\t.long (", asm_out_file);
15629 output_addr_const (asm_out_file, symbol);
15630 fputs (")@fixup\n", asm_out_file);
15633 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15637 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15639 const char *section = ".dtors";
15642 if (priority != DEFAULT_INIT_PRIORITY)
15644 sprintf (buf, ".dtors.%.5u",
15645 /* Invert the numbering so the linker puts us in the proper
15646 order; constructors are run from right to left, and the
15647 linker sorts in increasing order. */
15648 MAX_INIT_PRIORITY - priority);
15652 named_section_flags (section, SECTION_WRITE);
15653 assemble_align (POINTER_SIZE);
15655 if (TARGET_RELOCATABLE)
15657 fputs ("\t.long (", asm_out_file);
15658 output_addr_const (asm_out_file, symbol);
15659 fputs (")@fixup\n", asm_out_file);
15662 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15666 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15670 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15671 ASM_OUTPUT_LABEL (file, name);
15672 fputs (DOUBLE_INT_ASM_OP, file);
15674 assemble_name (file, name);
15675 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15676 assemble_name (file, name);
15677 fputs (",24\n\t.type\t.", file);
15678 assemble_name (file, name);
15679 fputs (",@function\n", file);
15680 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15682 fputs ("\t.globl\t.", file);
15683 assemble_name (file, name);
15686 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15688 ASM_OUTPUT_LABEL (file, name);
15692 if (TARGET_RELOCATABLE
15693 && (get_pool_size () != 0 || current_function_profile)
15698 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15700 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15701 fprintf (file, "\t.long ");
15702 assemble_name (file, buf);
15704 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15705 assemble_name (file, buf);
15709 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15710 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15712 if (DEFAULT_ABI == ABI_AIX)
15714 const char *desc_name, *orig_name;
15716 orig_name = (*targetm.strip_name_encoding) (name);
15717 desc_name = orig_name;
15718 while (*desc_name == '.')
15721 if (TREE_PUBLIC (decl))
15722 fprintf (file, "\t.globl %s\n", desc_name);
15724 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15725 fprintf (file, "%s:\n", desc_name);
15726 fprintf (file, "\t.long %s\n", orig_name);
15727 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15728 if (DEFAULT_ABI == ABI_AIX)
15729 fputs ("\t.long 0\n", file);
15730 fprintf (file, "\t.previous\n");
15732 ASM_OUTPUT_LABEL (file, name);
15738 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15740 fputs (GLOBAL_ASM_OP, stream);
15741 RS6000_OUTPUT_BASENAME (stream, name);
15742 putc ('\n', stream);
15746 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15749 static const char * const suffix[3] = { "PR", "RO", "RW" };
15751 if (flags & SECTION_CODE)
15753 else if (flags & SECTION_WRITE)
15758 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15759 (flags & SECTION_CODE) ? "." : "",
15760 name, suffix[smclass], flags & SECTION_ENTSIZE);
15764 rs6000_xcoff_select_section (tree decl, int reloc,
15765 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15767 if (decl_readonly_section_1 (decl, reloc, 1))
15769 if (TREE_PUBLIC (decl))
15770 read_only_data_section ();
15772 read_only_private_data_section ();
15776 if (TREE_PUBLIC (decl))
15779 private_data_section ();
15784 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15788 /* Use select_section for private and uninitialized data. */
15789 if (!TREE_PUBLIC (decl)
15790 || DECL_COMMON (decl)
15791 || DECL_INITIAL (decl) == NULL_TREE
15792 || DECL_INITIAL (decl) == error_mark_node
15793 || (flag_zero_initialized_in_bss
15794 && initializer_zerop (DECL_INITIAL (decl))))
15797 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15798 name = (*targetm.strip_name_encoding) (name);
15799 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15802 /* Select section for constant in constant pool.
15804 On RS/6000, all constants are in the private read-only data area.
15805 However, if this is being placed in the TOC it must be output as a
15809 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15810 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15812 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15815 read_only_private_data_section ();
15818 /* Remove any trailing [DS] or the like from the symbol name. */
15820 static const char *
15821 rs6000_xcoff_strip_name_encoding (const char *name)
15826 len = strlen (name);
15827 if (name[len - 1] == ']')
15828 return ggc_alloc_string (name, len - 4);
15833 /* Section attributes. AIX is always PIC. */
15835 static unsigned int
15836 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15838 unsigned int align;
15839 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15841 /* Align to at least UNIT size. */
15842 if (flags & SECTION_CODE)
15843 align = MIN_UNITS_PER_WORD;
15845 /* Increase alignment of large objects if not already stricter. */
15846 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15847 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15848 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15850 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15853 /* Output at beginning of assembler file.
15855 Initialize the section names for the RS/6000 at this point.
15857 Specify filename, including full path, to assembler.
15859 We want to go into the TOC section so at least one .toc will be emitted.
15860 Also, in order to output proper .bs/.es pairs, we need at least one static
15861 [RW] section emitted.
15863 Finally, declare mcount when profiling to make the assembler happy. */
15866 rs6000_xcoff_file_start (void)
15868 rs6000_gen_section_name (&xcoff_bss_section_name,
15869 main_input_filename, ".bss_");
15870 rs6000_gen_section_name (&xcoff_private_data_section_name,
15871 main_input_filename, ".rw_");
15872 rs6000_gen_section_name (&xcoff_read_only_section_name,
15873 main_input_filename, ".ro_");
15875 fputs ("\t.file\t", asm_out_file);
15876 output_quoted_string (asm_out_file, main_input_filename);
15877 fputc ('\n', asm_out_file);
15879 if (write_symbols != NO_DEBUG)
15880 private_data_section ();
15883 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15884 rs6000_file_start ();
15887 /* Output at end of assembler file.
15888 On the RS/6000, referencing data should automatically pull in text. */
15891 rs6000_xcoff_file_end (void)
15894 fputs ("_section_.text:\n", asm_out_file);
15896 fputs (TARGET_32BIT
15897 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15900 #endif /* TARGET_XCOFF */
15903 /* Cross-module name binding. Darwin does not support overriding
15904 functions at dynamic-link time. */
15907 rs6000_binds_local_p (tree decl)
15909 return default_binds_local_p_1 (decl, 0);
15913 /* Compute a (partial) cost for rtx X. Return true if the complete
15914 cost has been computed, and false if subexpressions should be
15915 scanned. In either case, *TOTAL contains the cost result. */
15918 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15923 /* On the RS/6000, if it is valid in the insn, it is free.
15924 So this always returns 0. */
15935 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15936 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15937 + 0x8000) >= 0x10000)
15938 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15939 ? COSTS_N_INSNS (2)
15940 : COSTS_N_INSNS (1));
15946 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15947 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15948 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15949 ? COSTS_N_INSNS (2)
15950 : COSTS_N_INSNS (1));
15956 *total = COSTS_N_INSNS (2);
15959 switch (rs6000_cpu)
15961 case PROCESSOR_RIOS1:
15962 case PROCESSOR_PPC405:
15963 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15964 ? COSTS_N_INSNS (5)
15965 : (INTVAL (XEXP (x, 1)) >= -256
15966 && INTVAL (XEXP (x, 1)) <= 255)
15967 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15970 case PROCESSOR_PPC440:
15971 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15972 ? COSTS_N_INSNS (3)
15973 : COSTS_N_INSNS (2));
15976 case PROCESSOR_RS64A:
15977 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15978 ? GET_MODE (XEXP (x, 1)) != DImode
15979 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15980 : (INTVAL (XEXP (x, 1)) >= -256
15981 && INTVAL (XEXP (x, 1)) <= 255)
15982 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15985 case PROCESSOR_RIOS2:
15986 case PROCESSOR_MPCCORE:
15987 case PROCESSOR_PPC604e:
15988 *total = COSTS_N_INSNS (2);
15991 case PROCESSOR_PPC601:
15992 *total = COSTS_N_INSNS (5);
15995 case PROCESSOR_PPC603:
15996 case PROCESSOR_PPC7400:
15997 case PROCESSOR_PPC750:
15998 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15999 ? COSTS_N_INSNS (5)
16000 : (INTVAL (XEXP (x, 1)) >= -256
16001 && INTVAL (XEXP (x, 1)) <= 255)
16002 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
16005 case PROCESSOR_PPC7450:
16006 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16007 ? COSTS_N_INSNS (4)
16008 : COSTS_N_INSNS (3));
16011 case PROCESSOR_PPC403:
16012 case PROCESSOR_PPC604:
16013 case PROCESSOR_PPC8540:
16014 *total = COSTS_N_INSNS (4);
16017 case PROCESSOR_PPC620:
16018 case PROCESSOR_PPC630:
16019 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16020 ? GET_MODE (XEXP (x, 1)) != DImode
16021 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
16022 : (INTVAL (XEXP (x, 1)) >= -256
16023 && INTVAL (XEXP (x, 1)) <= 255)
16024 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16027 case PROCESSOR_POWER4:
16028 case PROCESSOR_POWER5:
16029 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16030 ? GET_MODE (XEXP (x, 1)) != DImode
16031 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
16032 : COSTS_N_INSNS (2));
16041 if (GET_CODE (XEXP (x, 1)) == CONST_INT
16042 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
16044 *total = COSTS_N_INSNS (2);
16051 switch (rs6000_cpu)
16053 case PROCESSOR_RIOS1:
16054 *total = COSTS_N_INSNS (19);
16057 case PROCESSOR_RIOS2:
16058 *total = COSTS_N_INSNS (13);
16061 case PROCESSOR_RS64A:
16062 *total = (GET_MODE (XEXP (x, 1)) != DImode
16063 ? COSTS_N_INSNS (65)
16064 : COSTS_N_INSNS (67));
16067 case PROCESSOR_MPCCORE:
16068 *total = COSTS_N_INSNS (6);
16071 case PROCESSOR_PPC403:
16072 *total = COSTS_N_INSNS (33);
16075 case PROCESSOR_PPC405:
16076 *total = COSTS_N_INSNS (35);
16079 case PROCESSOR_PPC440:
16080 *total = COSTS_N_INSNS (34);
16083 case PROCESSOR_PPC601:
16084 *total = COSTS_N_INSNS (36);
16087 case PROCESSOR_PPC603:
16088 *total = COSTS_N_INSNS (37);
16091 case PROCESSOR_PPC604:
16092 case PROCESSOR_PPC604e:
16093 *total = COSTS_N_INSNS (20);
16096 case PROCESSOR_PPC620:
16097 case PROCESSOR_PPC630:
16098 *total = (GET_MODE (XEXP (x, 1)) != DImode
16099 ? COSTS_N_INSNS (21)
16100 : COSTS_N_INSNS (37));
16103 case PROCESSOR_PPC750:
16104 case PROCESSOR_PPC8540:
16105 case PROCESSOR_PPC7400:
16106 *total = COSTS_N_INSNS (19);
16109 case PROCESSOR_PPC7450:
16110 *total = COSTS_N_INSNS (23);
16113 case PROCESSOR_POWER4:
16114 case PROCESSOR_POWER5:
16115 *total = (GET_MODE (XEXP (x, 1)) != DImode
16116 ? COSTS_N_INSNS (18)
16117 : COSTS_N_INSNS (34));
16125 *total = COSTS_N_INSNS (4);
16129 /* MEM should be slightly more expensive than (plus (reg) (const)). */
16138 /* A C expression returning the cost of moving data from a register of class
16139 CLASS1 to one of CLASS2. */
16142 rs6000_register_move_cost (enum machine_mode mode,
16143 enum reg_class from, enum reg_class to)
16145 /* Moves from/to GENERAL_REGS. */
16146 if (reg_classes_intersect_p (to, GENERAL_REGS)
16147 || reg_classes_intersect_p (from, GENERAL_REGS))
16149 if (! reg_classes_intersect_p (to, GENERAL_REGS))
16152 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
16153 return (rs6000_memory_move_cost (mode, from, 0)
16154 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
16156 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
16157 else if (from == CR_REGS)
16161 /* A move will cost one instruction per GPR moved. */
16162 return 2 * HARD_REGNO_NREGS (0, mode);
16165 /* Moving between two similar registers is just one instruction. */
16166 else if (reg_classes_intersect_p (to, from))
16167 return mode == TFmode ? 4 : 2;
16169 /* Everything else has to go through GENERAL_REGS. */
16171 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16172 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16175 /* A C expressions returning the cost of moving data of MODE from a register to
16179 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16180 int in ATTRIBUTE_UNUSED)
16182 if (reg_classes_intersect_p (class, GENERAL_REGS))
16183 return 4 * HARD_REGNO_NREGS (0, mode);
16184 else if (reg_classes_intersect_p (class, FLOAT_REGS))
16185 return 4 * HARD_REGNO_NREGS (32, mode);
16186 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16187 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16189 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16192 /* Return an RTX representing where to find the function value of a
16193 function returning MODE. */
16195 rs6000_complex_function_value (enum machine_mode mode)
16197 unsigned int regno;
16199 enum machine_mode inner = GET_MODE_INNER (mode);
16200 unsigned int inner_bytes = GET_MODE_SIZE (inner);
16202 if (FLOAT_MODE_P (mode))
16203 regno = FP_ARG_RETURN;
16206 regno = GP_ARG_RETURN;
16208 /* 32-bit is OK since it'll go in r3/r4. */
16209 if (TARGET_32BIT && inner_bytes >= 4)
16210 return gen_rtx_REG (mode, regno);
16213 if (inner_bytes >= 8)
16214 return gen_rtx_REG (mode, regno);
16216 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16218 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16219 GEN_INT (inner_bytes));
16220 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16223 /* Define how to find the value returned by a function.
16224 VALTYPE is the data type of the value (as a tree).
16225 If the precise function being called is known, FUNC is its FUNCTION_DECL;
16226 otherwise, FUNC is 0.
16228 On the SPE, both FPs and vectors are returned in r3.
16230 On RS/6000 an integer value is in r3 and a floating-point value is in
16231 fp1, unless -msoft-float. */
16234 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16236 enum machine_mode mode;
16237 unsigned int regno;
16239 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16241 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
16242 return gen_rtx_PARALLEL (DImode,
16244 gen_rtx_EXPR_LIST (VOIDmode,
16245 gen_rtx_REG (SImode, GP_ARG_RETURN),
16247 gen_rtx_EXPR_LIST (VOIDmode,
16248 gen_rtx_REG (SImode,
16249 GP_ARG_RETURN + 1),
16253 if ((INTEGRAL_TYPE_P (valtype)
16254 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16255 || POINTER_TYPE_P (valtype))
16256 mode = TARGET_32BIT ? SImode : DImode;
16258 mode = TYPE_MODE (valtype);
16260 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
16261 regno = FP_ARG_RETURN;
16262 else if (TREE_CODE (valtype) == COMPLEX_TYPE
16263 && TARGET_HARD_FLOAT
16264 && targetm.calls.split_complex_arg)
16265 return rs6000_complex_function_value (mode);
16266 else if (TREE_CODE (valtype) == VECTOR_TYPE
16267 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16268 regno = ALTIVEC_ARG_RETURN;
16270 regno = GP_ARG_RETURN;
16272 return gen_rtx_REG (mode, regno);
16275 /* Define how to find the value returned by a library function
16276 assuming the value has mode MODE. */
16278 rs6000_libcall_value (enum machine_mode mode)
16280 unsigned int regno;
16282 if (GET_MODE_CLASS (mode) == MODE_FLOAT
16283 && TARGET_HARD_FLOAT && TARGET_FPRS)
16284 regno = FP_ARG_RETURN;
16285 else if (ALTIVEC_VECTOR_MODE (mode)
16286 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16287 regno = ALTIVEC_ARG_RETURN;
16288 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16289 return rs6000_complex_function_value (mode);
16291 regno = GP_ARG_RETURN;
16293 return gen_rtx_REG (mode, regno);
16296 /* Define the offset between two registers, FROM to be eliminated and its
16297 replacement TO, at the start of a routine. */
16299 rs6000_initial_elimination_offset (int from, int to)
16301 rs6000_stack_t *info = rs6000_stack_info ();
16302 HOST_WIDE_INT offset;
16304 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16305 offset = info->push_p ? 0 : -info->total_size;
16306 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16307 offset = info->total_size;
16308 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16309 offset = info->push_p ? info->total_size : 0;
16310 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16318 /* Return true if TYPE is of type __ev64_opaque__. */
16321 is_ev64_opaque_type (tree type)
16324 && (type == opaque_V2SI_type_node
16325 || type == opaque_V2SF_type_node
16326 || type == opaque_p_V2SI_type_node));
16330 rs6000_dwarf_register_span (rtx reg)
16334 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16337 regno = REGNO (reg);
16339 /* The duality of the SPE register size wreaks all kinds of havoc.
16340 This is a way of distinguishing r0 in 32-bits from r0 in
16343 gen_rtx_PARALLEL (VOIDmode,
16346 gen_rtx_REG (SImode, regno + 1200),
16347 gen_rtx_REG (SImode, regno))
16349 gen_rtx_REG (SImode, regno),
16350 gen_rtx_REG (SImode, regno + 1200)));
16353 /* Map internal gcc register numbers to DWARF2 register numbers. */
16356 rs6000_dbx_register_number (unsigned int regno)
16358 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16360 if (regno == MQ_REGNO)
16362 if (regno == LINK_REGISTER_REGNUM)
16364 if (regno == COUNT_REGISTER_REGNUM)
16366 if (CR_REGNO_P (regno))
16367 return regno - CR0_REGNO + 86;
16368 if (regno == XER_REGNO)
16370 if (ALTIVEC_REGNO_P (regno))
16371 return regno - FIRST_ALTIVEC_REGNO + 1124;
16372 if (regno == VRSAVE_REGNO)
16374 if (regno == VSCR_REGNO)
16376 if (regno == SPE_ACC_REGNO)
16378 if (regno == SPEFSCR_REGNO)
16380 /* SPE high reg number. We get these values of regno from
16381 rs6000_dwarf_register_span. */
16382 if (regno >= 1200 && regno < 1232)
16388 #include "gt-rs6000.h"