1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
57 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
60 #ifndef TARGET_NO_PROTOTYPE
61 #define TARGET_NO_PROTOTYPE 0
64 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
65 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack {
73 int first_gp_reg_save; /* first callee saved GP register used */
74 int first_fp_reg_save; /* first callee saved FP register used */
75 int first_altivec_reg_save; /* first callee saved AltiVec register used */
76 int lr_save_p; /* true if the link reg needs to be saved */
77 int cr_save_p; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask; /* mask of vec registers to save */
79 int toc_save_p; /* true if the TOC needs to be saved */
80 int push_p; /* true if we need to allocate stack space */
81 int calls_p; /* true if the function makes any calls */
82 enum rs6000_abi abi; /* which ABI to use */
83 int gp_save_offset; /* offset to save GP regs from initial SP */
84 int fp_save_offset; /* offset to save FP regs from initial SP */
85 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
86 int lr_save_offset; /* offset to save LR from initial SP */
87 int cr_save_offset; /* offset to save CR from initial SP */
88 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
89 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
90 int toc_save_offset; /* offset to save the TOC pointer */
91 int varargs_save_offset; /* offset to save the varargs registers */
92 int ehrd_offset; /* offset to EH return data */
93 int reg_size; /* register size (4 or 8) */
94 int varargs_size; /* size to hold V.4 args passed in regs */
95 HOST_WIDE_INT vars_size; /* variable save area size */
96 int parm_size; /* outgoing parameter size */
97 int save_size; /* save area size */
98 int fixed_size; /* fixed size of stack frame */
99 int gp_size; /* size of saved GP registers */
100 int fp_size; /* size of saved FP registers */
101 int altivec_size; /* size of saved AltiVec registers */
102 int cr_size; /* size to hold CR if not in save_size */
103 int lr_size; /* size to hold LR if not in save_size */
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
109 int toc_size; /* size to hold TOC if not in save_size */
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
114 /* Target cpu type */
116 enum processor_type rs6000_cpu;
117 struct rs6000_cpu_select rs6000_select[3] =
119 /* switch name, tune arch */
120 { (const char *)0, "--with-cpu=", 1, 1 },
121 { (const char *)0, "-mcpu=", 1, 1 },
122 { (const char *)0, "-mtune=", 1, 0 },
125 /* Always emit branch hint bits. */
126 static GTY(()) bool rs6000_always_hint;
128 /* Schedule instructions for group formation. */
129 static GTY(()) bool rs6000_sched_groups;
131 /* Support adjust_priority scheduler hook
132 and -mprioritize-restricted-insns= option. */
133 const char *rs6000_sched_restricted_insns_priority_str;
134 int rs6000_sched_restricted_insns_priority;
136 /* Support for -msched-costly-dep option. */
137 const char *rs6000_sched_costly_dep_str;
138 enum rs6000_dependence_cost rs6000_sched_costly_dep;
140 /* Support for -minsert-sched-nops option. */
141 const char *rs6000_sched_insert_nops_str;
142 enum rs6000_nop_insertion rs6000_sched_insert_nops;
144 /* Size of long double */
145 const char *rs6000_long_double_size_string;
146 int rs6000_long_double_type_size;
148 /* Whether -mabi=altivec has appeared */
149 int rs6000_altivec_abi;
151 /* Whether VRSAVE instructions should be generated. */
152 int rs6000_altivec_vrsave;
154 /* String from -mvrsave= option. */
155 const char *rs6000_altivec_vrsave_string;
157 /* Nonzero if we want SPE ABI extensions. */
160 /* Whether isel instructions should be generated. */
163 /* Whether SPE simd instructions should be generated. */
166 /* Nonzero if floating point operations are done in the GPRs. */
167 int rs6000_float_gprs = 0;
169 /* String from -mfloat-gprs=. */
170 const char *rs6000_float_gprs_string;
172 /* String from -misel=. */
173 const char *rs6000_isel_string;
175 /* String from -mspe=. */
176 const char *rs6000_spe_string;
178 /* Set to nonzero once AIX common-mode calls have been defined. */
179 static GTY(()) int common_mode_defined;
181 /* Save information from a "cmpxx" operation until the branch or scc is
183 rtx rs6000_compare_op0, rs6000_compare_op1;
184 int rs6000_compare_fp_p;
186 /* Label number of label created for -mrelocatable, to call to so we can
187 get the address of the GOT section */
188 int rs6000_pic_labelno;
191 /* Which abi to adhere to */
192 const char *rs6000_abi_name;
194 /* Semantics of the small data area */
195 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
197 /* Which small data model to use */
198 const char *rs6000_sdata_name = (char *)0;
200 /* Counter for labels which are to be placed in .fixup. */
201 int fixuplabelno = 0;
204 /* Bit size of immediate TLS offsets and string from which it is decoded. */
205 int rs6000_tls_size = 32;
206 const char *rs6000_tls_size_string;
208 /* ABI enumeration available for subtarget to use. */
209 enum rs6000_abi rs6000_current_abi;
211 /* ABI string from -mabi= option. */
212 const char *rs6000_abi_string;
215 const char *rs6000_debug_name;
216 int rs6000_debug_stack; /* debug stack applications */
217 int rs6000_debug_arg; /* debug argument handling */
219 /* Value is TRUE if register/mode pair is accepatable. */
220 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
223 static GTY(()) tree opaque_V2SI_type_node;
224 static GTY(()) tree opaque_V2SF_type_node;
225 static GTY(()) tree opaque_p_V2SI_type_node;
226 static GTY(()) tree V16QI_type_node;
227 static GTY(()) tree V2SI_type_node;
228 static GTY(()) tree V2SF_type_node;
229 static GTY(()) tree V4HI_type_node;
230 static GTY(()) tree V4SI_type_node;
231 static GTY(()) tree V4SF_type_node;
232 static GTY(()) tree V8HI_type_node;
233 static GTY(()) tree unsigned_V16QI_type_node;
234 static GTY(()) tree unsigned_V8HI_type_node;
235 static GTY(()) tree unsigned_V4SI_type_node;
236 static GTY(()) tree bool_char_type_node; /* __bool char */
237 static GTY(()) tree bool_short_type_node; /* __bool short */
238 static GTY(()) tree bool_int_type_node; /* __bool int */
239 static GTY(()) tree pixel_type_node; /* __pixel */
240 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
241 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
242 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
243 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
245 int rs6000_warn_altivec_long = 1; /* On by default. */
246 const char *rs6000_warn_altivec_long_switch;
248 const char *rs6000_traceback_name;
250 traceback_default = 0,
256 /* Flag to say the TOC is initialized */
258 char toc_label_name[10];
260 /* Alias set for saves and restores from the rs6000 stack. */
261 static GTY(()) int rs6000_sr_alias_set;
263 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
264 The only place that looks at this is rs6000_set_default_type_attributes;
265 everywhere else should rely on the presence or absence of a longcall
266 attribute on the function declaration. */
267 int rs6000_default_long_calls;
268 const char *rs6000_longcall_switch;
270 /* Control alignment for fields within structures. */
271 /* String from -malign-XXXXX. */
272 const char *rs6000_alignment_string;
273 int rs6000_alignment_flags;
275 struct builtin_description
277 /* mask is not const because we're going to alter it below. This
278 nonsense will go away when we rewrite the -march infrastructure
279 to give us more target flag bits. */
281 const enum insn_code icode;
282 const char *const name;
283 const enum rs6000_builtins code;
286 static bool rs6000_function_ok_for_sibcall (tree, tree);
287 static int num_insns_constant_wide (HOST_WIDE_INT);
288 static void validate_condition_mode (enum rtx_code, enum machine_mode);
289 static rtx rs6000_generate_compare (enum rtx_code);
290 static void rs6000_maybe_dead (rtx);
291 static void rs6000_emit_stack_tie (void);
292 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
293 static rtx spe_synthesize_frame_save (rtx);
294 static bool spe_func_has_64bit_regs_p (void);
295 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
297 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
298 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
299 static unsigned rs6000_hash_constant (rtx);
300 static unsigned toc_hash_function (const void *);
301 static int toc_hash_eq (const void *, const void *);
302 static int constant_pool_expr_1 (rtx, int *, int *);
303 static bool constant_pool_expr_p (rtx);
304 static bool toc_relative_expr_p (rtx);
305 static bool legitimate_small_data_p (enum machine_mode, rtx);
306 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
307 static bool legitimate_indexed_address_p (rtx, int);
308 static bool legitimate_indirect_address_p (rtx, int);
309 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
310 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
311 static struct machine_function * rs6000_init_machine_status (void);
312 static bool rs6000_assemble_integer (rtx, unsigned int, int);
313 #ifdef HAVE_GAS_HIDDEN
314 static void rs6000_assemble_visibility (tree, int);
316 static int rs6000_ra_ever_killed (void);
317 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
318 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
319 static const char *rs6000_mangle_fundamental_type (tree);
320 extern const struct attribute_spec rs6000_attribute_table[];
321 static void rs6000_set_default_type_attributes (tree);
322 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
323 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
324 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
326 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
327 static bool rs6000_return_in_memory (tree, tree);
328 static void rs6000_file_start (void);
330 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
331 static void rs6000_elf_asm_out_constructor (rtx, int);
332 static void rs6000_elf_asm_out_destructor (rtx, int);
333 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
334 static void rs6000_elf_unique_section (tree, int);
335 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
336 unsigned HOST_WIDE_INT);
337 static void rs6000_elf_encode_section_info (tree, rtx, int)
339 static bool rs6000_elf_in_small_data_p (tree);
342 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
343 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
344 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
345 static void rs6000_xcoff_unique_section (tree, int);
346 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
347 unsigned HOST_WIDE_INT);
348 static const char * rs6000_xcoff_strip_name_encoding (const char *);
349 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
350 static void rs6000_xcoff_file_start (void);
351 static void rs6000_xcoff_file_end (void);
354 static bool rs6000_binds_local_p (tree);
356 static int rs6000_use_dfa_pipeline_interface (void);
357 static int rs6000_variable_issue (FILE *, int, rtx, int);
358 static bool rs6000_rtx_costs (rtx, int, int, int *);
359 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
360 static bool is_microcoded_insn (rtx);
361 static int is_dispatch_slot_restricted (rtx);
362 static bool is_cracked_insn (rtx);
363 static bool is_branch_slot_insn (rtx);
364 static int rs6000_adjust_priority (rtx, int);
365 static int rs6000_issue_rate (void);
366 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
367 static rtx get_next_active_insn (rtx, rtx);
368 static bool insn_terminates_group_p (rtx , enum group_termination);
369 static bool is_costly_group (rtx *, rtx);
370 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
371 static int redefine_groups (FILE *, int, rtx, rtx);
372 static int pad_groups (FILE *, int, rtx, rtx);
373 static void rs6000_sched_finish (FILE *, int);
374 static int rs6000_use_sched_lookahead (void);
376 static void rs6000_init_builtins (void);
377 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
378 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
379 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
380 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
381 static void altivec_init_builtins (void);
382 static void rs6000_common_init_builtins (void);
383 static void rs6000_init_libfuncs (void);
385 static void enable_mask_for_builtins (struct builtin_description *, int,
386 enum rs6000_builtins,
387 enum rs6000_builtins);
388 static tree build_opaque_vector_type (tree, int);
389 static void spe_init_builtins (void);
390 static rtx spe_expand_builtin (tree, rtx, bool *);
391 static rtx spe_expand_stv_builtin (enum insn_code, tree);
392 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
393 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
394 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
395 static rs6000_stack_t *rs6000_stack_info (void);
396 static void debug_stack_info (rs6000_stack_t *);
398 static rtx altivec_expand_builtin (tree, rtx, bool *);
399 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
400 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
401 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
402 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
403 static rtx altivec_expand_predicate_builtin (enum insn_code,
404 const char *, tree, rtx);
405 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
406 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
407 static void rs6000_parse_abi_options (void);
408 static void rs6000_parse_alignment_option (void);
409 static void rs6000_parse_tls_size_option (void);
410 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
411 static int first_altivec_reg_to_save (void);
412 static unsigned int compute_vrsave_mask (void);
413 static void is_altivec_return_reg (rtx, void *);
414 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
415 int easy_vector_constant (rtx, enum machine_mode);
416 static int easy_vector_same (rtx, enum machine_mode);
417 static int easy_vector_splat_const (int, enum machine_mode);
418 static bool is_ev64_opaque_type (tree);
419 static rtx rs6000_dwarf_register_span (rtx);
420 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
421 static rtx rs6000_tls_get_addr (void);
422 static rtx rs6000_got_sym (void);
423 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
424 static const char *rs6000_get_some_local_dynamic_name (void);
425 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
426 static rtx rs6000_complex_function_value (enum machine_mode);
427 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
428 enum machine_mode, tree);
429 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
430 enum machine_mode, tree, int);
431 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
432 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
433 enum machine_mode, tree,
436 static void macho_branch_islands (void);
437 static void add_compiler_branch_island (tree, tree, int);
438 static int no_previous_def (tree function_name);
439 static tree get_prev_label (tree function_name);
442 static tree rs6000_build_builtin_va_list (void);
443 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
445 /* Hash table stuff for keeping track of TOC entries. */
447 struct toc_hash_struct GTY(())
449 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
450 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
452 enum machine_mode key_mode;
456 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
458 /* Default register names. */
459 char rs6000_reg_names[][8] =
461 "0", "1", "2", "3", "4", "5", "6", "7",
462 "8", "9", "10", "11", "12", "13", "14", "15",
463 "16", "17", "18", "19", "20", "21", "22", "23",
464 "24", "25", "26", "27", "28", "29", "30", "31",
465 "0", "1", "2", "3", "4", "5", "6", "7",
466 "8", "9", "10", "11", "12", "13", "14", "15",
467 "16", "17", "18", "19", "20", "21", "22", "23",
468 "24", "25", "26", "27", "28", "29", "30", "31",
469 "mq", "lr", "ctr","ap",
470 "0", "1", "2", "3", "4", "5", "6", "7",
472 /* AltiVec registers. */
473 "0", "1", "2", "3", "4", "5", "6", "7",
474 "8", "9", "10", "11", "12", "13", "14", "15",
475 "16", "17", "18", "19", "20", "21", "22", "23",
476 "24", "25", "26", "27", "28", "29", "30", "31",
482 #ifdef TARGET_REGNAMES
483 static const char alt_reg_names[][8] =
485 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
486 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
487 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
488 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
489 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
490 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
491 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
492 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
493 "mq", "lr", "ctr", "ap",
494 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
496 /* AltiVec registers. */
497 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
498 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
499 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
500 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
507 #ifndef MASK_STRICT_ALIGN
508 #define MASK_STRICT_ALIGN 0
510 #ifndef TARGET_PROFILE_KERNEL
511 #define TARGET_PROFILE_KERNEL 0
514 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
515 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
517 /* Return 1 for a symbol ref for a thread-local storage symbol. */
518 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
519 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
521 /* Initialize the GCC target structure. */
522 #undef TARGET_ATTRIBUTE_TABLE
523 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
524 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
525 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
527 #undef TARGET_ASM_ALIGNED_DI_OP
528 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
530 /* Default unaligned ops are only provided for ELF. Find the ops needed
531 for non-ELF systems. */
532 #ifndef OBJECT_FORMAT_ELF
534 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
536 #undef TARGET_ASM_UNALIGNED_HI_OP
537 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
538 #undef TARGET_ASM_UNALIGNED_SI_OP
539 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
540 #undef TARGET_ASM_UNALIGNED_DI_OP
541 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
544 #undef TARGET_ASM_UNALIGNED_HI_OP
545 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
546 #undef TARGET_ASM_UNALIGNED_SI_OP
547 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
551 /* This hook deals with fixups for relocatable code and DI-mode objects
553 #undef TARGET_ASM_INTEGER
554 #define TARGET_ASM_INTEGER rs6000_assemble_integer
556 #ifdef HAVE_GAS_HIDDEN
557 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
558 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
561 #undef TARGET_HAVE_TLS
562 #define TARGET_HAVE_TLS HAVE_AS_TLS
564 #undef TARGET_CANNOT_FORCE_CONST_MEM
565 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
567 #undef TARGET_ASM_FUNCTION_PROLOGUE
568 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
569 #undef TARGET_ASM_FUNCTION_EPILOGUE
570 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
572 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
573 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
574 #undef TARGET_SCHED_VARIABLE_ISSUE
575 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
577 #undef TARGET_SCHED_ISSUE_RATE
578 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
579 #undef TARGET_SCHED_ADJUST_COST
580 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
581 #undef TARGET_SCHED_ADJUST_PRIORITY
582 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
583 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
584 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
585 #undef TARGET_SCHED_FINISH
586 #define TARGET_SCHED_FINISH rs6000_sched_finish
588 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
589 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
591 #undef TARGET_INIT_BUILTINS
592 #define TARGET_INIT_BUILTINS rs6000_init_builtins
594 #undef TARGET_EXPAND_BUILTIN
595 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
597 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
598 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
600 #undef TARGET_INIT_LIBFUNCS
601 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
604 #undef TARGET_BINDS_LOCAL_P
605 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
608 #undef TARGET_ASM_OUTPUT_MI_THUNK
609 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
611 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
612 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
614 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
615 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
617 #undef TARGET_RTX_COSTS
618 #define TARGET_RTX_COSTS rs6000_rtx_costs
619 #undef TARGET_ADDRESS_COST
620 #define TARGET_ADDRESS_COST hook_int_rtx_0
622 #undef TARGET_VECTOR_OPAQUE_P
623 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
625 #undef TARGET_DWARF_REGISTER_SPAN
626 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
628 /* On rs6000, function arguments are promoted, as are function return
630 #undef TARGET_PROMOTE_FUNCTION_ARGS
631 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
632 #undef TARGET_PROMOTE_FUNCTION_RETURN
633 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
635 #undef TARGET_RETURN_IN_MEMORY
636 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
638 #undef TARGET_SETUP_INCOMING_VARARGS
639 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
641 /* Always strict argument naming on rs6000. */
642 #undef TARGET_STRICT_ARGUMENT_NAMING
643 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
644 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
645 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
646 #undef TARGET_SPLIT_COMPLEX_ARG
647 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
649 #undef TARGET_BUILD_BUILTIN_VA_LIST
650 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
652 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
653 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
655 struct gcc_target targetm = TARGET_INITIALIZER;
658 /* Value is 1 if hard register REGNO can hold a value of machine-mode
661 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
663 /* The GPRs can hold any mode, but values bigger than one register
664 cannot go past R31. */
665 if (INT_REGNO_P (regno))
666 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
668 /* The float registers can only hold floating modes and DImode. */
669 if (FP_REGNO_P (regno))
671 (GET_MODE_CLASS (mode) == MODE_FLOAT
672 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
673 || (GET_MODE_CLASS (mode) == MODE_INT
674 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
676 /* The CR register can only hold CC modes. */
677 if (CR_REGNO_P (regno))
678 return GET_MODE_CLASS (mode) == MODE_CC;
680 if (XER_REGNO_P (regno))
681 return mode == PSImode;
683 /* AltiVec only in AldyVec registers. */
684 if (ALTIVEC_REGNO_P (regno))
685 return ALTIVEC_VECTOR_MODE (mode);
687 /* ...but GPRs can hold SIMD data on the SPE in one register. */
688 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
691 /* We cannot put TImode anywhere except general register and it must be
692 able to fit within the register set. */
694 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
697 /* Initialize rs6000_hard_regno_mode_ok_p table. */
699 rs6000_init_hard_regno_mode_ok (void)
703 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
704 for (m = 0; m < NUM_MACHINE_MODES; ++m)
705 if (rs6000_hard_regno_mode_ok (r, m))
706 rs6000_hard_regno_mode_ok_p[m][r] = true;
709 /* Override command line options. Mostly we process the processor
710 type and sometimes adjust other TARGET_ options. */
713 rs6000_override_options (const char *default_cpu)
716 struct rs6000_cpu_select *ptr;
719 /* Simplifications for entries below. */
722 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
723 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
726 /* This table occasionally claims that a processor does not support
727 a particular feature even though it does, but the feature is slower
728 than the alternative. Thus, it shouldn't be relied on as a
729 complete description of the processor's support.
731 Please keep this list in order, and don't forget to update the
732 documentation in invoke.texi when adding a new processor or
736 const char *const name; /* Canonical processor name. */
737 const enum processor_type processor; /* Processor type enum value. */
738 const int target_enable; /* Target flags to enable. */
739 } const processor_target_table[]
740 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
741 {"403", PROCESSOR_PPC403,
742 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
743 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
744 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
745 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
746 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
747 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
748 {"601", PROCESSOR_PPC601,
749 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
750 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
751 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
752 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
753 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
754 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
755 {"620", PROCESSOR_PPC620,
756 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
757 {"630", PROCESSOR_PPC630,
758 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
759 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
760 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
761 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
762 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
763 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
764 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
765 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
766 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
767 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
768 {"970", PROCESSOR_POWER4,
769 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
770 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
771 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
772 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
773 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
774 {"G5", PROCESSOR_POWER4,
775 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
776 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
777 {"power2", PROCESSOR_POWER,
778 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
779 {"power3", PROCESSOR_PPC630,
780 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
781 {"power4", PROCESSOR_POWER4,
782 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
783 {"power5", PROCESSOR_POWER5,
784 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
785 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
786 {"powerpc64", PROCESSOR_POWERPC64,
787 POWERPC_BASE_MASK | MASK_POWERPC64},
788 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
789 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
790 {"rios2", PROCESSOR_RIOS2,
791 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
792 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
793 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
794 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
797 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
799 /* Some OSs don't support saving the high part of 64-bit registers on
800 context switch. Other OSs don't support saving Altivec registers.
801 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
802 settings; if the user wants either, the user must explicitly specify
803 them and we won't interfere with the user's specification. */
806 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
807 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
808 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
812 rs6000_init_hard_regno_mode_ok ();
814 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
815 #ifdef OS_MISSING_POWERPC64
816 if (OS_MISSING_POWERPC64)
817 set_masks &= ~MASK_POWERPC64;
819 #ifdef OS_MISSING_ALTIVEC
820 if (OS_MISSING_ALTIVEC)
821 set_masks &= ~MASK_ALTIVEC;
824 /* Don't override these by the processor default if given explicitly. */
825 set_masks &= ~(target_flags_explicit
826 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
828 /* Identify the processor type. */
829 rs6000_select[0].string = default_cpu;
830 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
832 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
834 ptr = &rs6000_select[i];
835 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
837 for (j = 0; j < ptt_size; j++)
838 if (! strcmp (ptr->string, processor_target_table[j].name))
841 rs6000_cpu = processor_target_table[j].processor;
845 target_flags &= ~set_masks;
846 target_flags |= (processor_target_table[j].target_enable
853 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
860 /* If we are optimizing big endian systems for space, use the load/store
861 multiple and string instructions. */
862 if (BYTES_BIG_ENDIAN && optimize_size)
863 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
865 /* Don't allow -mmultiple or -mstring on little endian systems
866 unless the cpu is a 750, because the hardware doesn't support the
867 instructions used in little endian mode, and causes an alignment
868 trap. The 750 does not cause an alignment trap (except when the
869 target is unaligned). */
871 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
875 target_flags &= ~MASK_MULTIPLE;
876 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
877 warning ("-mmultiple is not supported on little endian systems");
882 target_flags &= ~MASK_STRING;
883 if ((target_flags_explicit & MASK_STRING) != 0)
884 warning ("-mstring is not supported on little endian systems");
888 /* Set debug flags */
889 if (rs6000_debug_name)
891 if (! strcmp (rs6000_debug_name, "all"))
892 rs6000_debug_stack = rs6000_debug_arg = 1;
893 else if (! strcmp (rs6000_debug_name, "stack"))
894 rs6000_debug_stack = 1;
895 else if (! strcmp (rs6000_debug_name, "arg"))
896 rs6000_debug_arg = 1;
898 error ("unknown -mdebug-%s switch", rs6000_debug_name);
901 if (rs6000_traceback_name)
903 if (! strncmp (rs6000_traceback_name, "full", 4))
904 rs6000_traceback = traceback_full;
905 else if (! strncmp (rs6000_traceback_name, "part", 4))
906 rs6000_traceback = traceback_part;
907 else if (! strncmp (rs6000_traceback_name, "no", 2))
908 rs6000_traceback = traceback_none;
910 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
911 rs6000_traceback_name);
914 /* Set size of long double */
915 rs6000_long_double_type_size = 64;
916 if (rs6000_long_double_size_string)
919 int size = strtol (rs6000_long_double_size_string, &tail, 10);
920 if (*tail != '\0' || (size != 64 && size != 128))
921 error ("Unknown switch -mlong-double-%s",
922 rs6000_long_double_size_string);
924 rs6000_long_double_type_size = size;
927 /* Set Altivec ABI as default for powerpc64 linux. */
928 if (TARGET_ELF && TARGET_64BIT)
930 rs6000_altivec_abi = 1;
931 rs6000_altivec_vrsave = 1;
934 /* Handle -mabi= options. */
935 rs6000_parse_abi_options ();
937 /* Handle -malign-XXXXX option. */
938 rs6000_parse_alignment_option ();
940 /* Handle generic -mFOO=YES/NO options. */
941 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
942 &rs6000_altivec_vrsave);
943 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
945 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
946 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
949 /* Handle -mtls-size option. */
950 rs6000_parse_tls_size_option ();
952 #ifdef SUBTARGET_OVERRIDE_OPTIONS
953 SUBTARGET_OVERRIDE_OPTIONS;
955 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
956 SUBSUBTARGET_OVERRIDE_OPTIONS;
962 error ("AltiVec and E500 instructions cannot coexist");
964 /* The e500 does not have string instructions, and we set
965 MASK_STRING above when optimizing for size. */
966 if ((target_flags & MASK_STRING) != 0)
967 target_flags = target_flags & ~MASK_STRING;
969 /* No SPE means 64-bit long doubles, even if an E500. */
970 if (rs6000_spe_string != 0
971 && !strcmp (rs6000_spe_string, "no"))
972 rs6000_long_double_type_size = 64;
974 else if (rs6000_select[1].string != NULL)
976 /* For the powerpc-eabispe configuration, we set all these by
977 default, so let's unset them if we manually set another
978 CPU that is not the E500. */
979 if (rs6000_abi_string == 0)
981 if (rs6000_spe_string == 0)
983 if (rs6000_float_gprs_string == 0)
984 rs6000_float_gprs = 0;
985 if (rs6000_isel_string == 0)
987 if (rs6000_long_double_size_string == 0)
988 rs6000_long_double_type_size = 64;
991 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
992 && rs6000_cpu != PROCESSOR_POWER5);
993 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
994 || rs6000_cpu == PROCESSOR_POWER5);
996 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
997 using TARGET_OPTIONS to handle a toggle switch, but we're out of
998 bits in target_flags so TARGET_SWITCHES cannot be used.
999 Assumption here is that rs6000_longcall_switch points into the
1000 text of the complete option, rather than being a copy, so we can
1001 scan back for the presence or absence of the no- modifier. */
1002 if (rs6000_longcall_switch)
1004 const char *base = rs6000_longcall_switch;
1005 while (base[-1] != 'm') base--;
1007 if (*rs6000_longcall_switch != '\0')
1008 error ("invalid option `%s'", base);
1009 rs6000_default_long_calls = (base[0] != 'n');
1012 /* Handle -m(no-)warn-altivec-long similarly. */
1013 if (rs6000_warn_altivec_long_switch)
1015 const char *base = rs6000_warn_altivec_long_switch;
1016 while (base[-1] != 'm') base--;
1018 if (*rs6000_warn_altivec_long_switch != '\0')
1019 error ("invalid option `%s'", base);
1020 rs6000_warn_altivec_long = (base[0] != 'n');
1023 /* Handle -mprioritize-restricted-insns option. */
1024 rs6000_sched_restricted_insns_priority
1025 = (rs6000_sched_groups ? 1 : 0);
1026 if (rs6000_sched_restricted_insns_priority_str)
1027 rs6000_sched_restricted_insns_priority =
1028 atoi (rs6000_sched_restricted_insns_priority_str);
1030 /* Handle -msched-costly-dep option. */
1031 rs6000_sched_costly_dep
1032 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1033 if (rs6000_sched_costly_dep_str)
1035 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1036 rs6000_sched_costly_dep = no_dep_costly;
1037 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1038 rs6000_sched_costly_dep = all_deps_costly;
1039 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1040 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1041 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1042 rs6000_sched_costly_dep = store_to_load_dep_costly;
1044 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1047 /* Handle -minsert-sched-nops option. */
1048 rs6000_sched_insert_nops
1049 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1050 if (rs6000_sched_insert_nops_str)
1052 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1053 rs6000_sched_insert_nops = sched_finish_none;
1054 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1055 rs6000_sched_insert_nops = sched_finish_pad_groups;
1056 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1057 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1059 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1062 #ifdef TARGET_REGNAMES
1063 /* If the user desires alternate register names, copy in the
1064 alternate names now. */
1065 if (TARGET_REGNAMES)
1066 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1069 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1070 If -maix-struct-return or -msvr4-struct-return was explicitly
1071 used, don't override with the ABI default. */
1072 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1074 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1075 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1077 target_flags |= MASK_AIX_STRUCT_RET;
1080 if (TARGET_LONG_DOUBLE_128
1081 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1082 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1084 /* Allocate an alias set for register saves & restores from stack. */
1085 rs6000_sr_alias_set = new_alias_set ();
1088 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1090 /* We can only guarantee the availability of DI pseudo-ops when
1091 assembling for 64-bit targets. */
1094 targetm.asm_out.aligned_op.di = NULL;
1095 targetm.asm_out.unaligned_op.di = NULL;
1098 /* Set maximum branch target alignment at two instructions, eight bytes. */
1099 align_jumps_max_skip = 8;
1100 align_loops_max_skip = 8;
1102 /* Arrange to save and restore machine status around nested functions. */
1103 init_machine_status = rs6000_init_machine_status;
1105 /* We should always be splitting complex arguments, but we can't break
1106 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1107 if (DEFAULT_ABI != ABI_AIX)
1108 targetm.calls.split_complex_arg = NULL;
1111 /* Handle generic options of the form -mfoo=yes/no.
1112 NAME is the option name.
1113 VALUE is the option value.
1114 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1115 whether the option value is 'yes' or 'no' respectively. */
1117 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1121 else if (!strcmp (value, "yes"))
1123 else if (!strcmp (value, "no"))
1126 error ("unknown -m%s= option specified: '%s'", name, value);
1129 /* Handle -mabi= options. */
1131 rs6000_parse_abi_options (void)
1133 if (rs6000_abi_string == 0)
1135 else if (! strcmp (rs6000_abi_string, "altivec"))
1137 rs6000_altivec_abi = 1;
1140 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1141 rs6000_altivec_abi = 0;
1142 else if (! strcmp (rs6000_abi_string, "spe"))
1145 rs6000_altivec_abi = 0;
1146 if (!TARGET_SPE_ABI)
1147 error ("not configured for ABI: '%s'", rs6000_abi_string);
1150 else if (! strcmp (rs6000_abi_string, "no-spe"))
1153 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1156 /* Handle -malign-XXXXXX options. */
1158 rs6000_parse_alignment_option (void)
1160 if (rs6000_alignment_string == 0)
1162 else if (! strcmp (rs6000_alignment_string, "power"))
1163 rs6000_alignment_flags = MASK_ALIGN_POWER;
1164 else if (! strcmp (rs6000_alignment_string, "natural"))
1165 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1167 error ("unknown -malign-XXXXX option specified: '%s'",
1168 rs6000_alignment_string);
1171 /* Validate and record the size specified with the -mtls-size option. */
1174 rs6000_parse_tls_size_option (void)
1176 if (rs6000_tls_size_string == 0)
1178 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1179 rs6000_tls_size = 16;
1180 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1181 rs6000_tls_size = 32;
1182 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1183 rs6000_tls_size = 64;
1185 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1189 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1193 /* Do anything needed at the start of the asm file. */
1196 rs6000_file_start (void)
1200 const char *start = buffer;
1201 struct rs6000_cpu_select *ptr;
1202 const char *default_cpu = TARGET_CPU_DEFAULT;
1203 FILE *file = asm_out_file;
1205 default_file_start ();
1207 #ifdef TARGET_BI_ARCH
1208 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1212 if (flag_verbose_asm)
1214 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1215 rs6000_select[0].string = default_cpu;
1217 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1219 ptr = &rs6000_select[i];
1220 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1222 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1227 #ifdef USING_ELFOS_H
1228 switch (rs6000_sdata)
1230 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1231 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1232 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1233 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1236 if (rs6000_sdata && g_switch_value)
1238 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1249 /* Return nonzero if this function is known to have a null epilogue. */
1252 direct_return (void)
1254 if (reload_completed)
1256 rs6000_stack_t *info = rs6000_stack_info ();
1258 if (info->first_gp_reg_save == 32
1259 && info->first_fp_reg_save == 64
1260 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1261 && ! info->lr_save_p
1262 && ! info->cr_save_p
1263 && info->vrsave_mask == 0
1271 /* Returns 1 always. */
1274 any_operand (rtx op ATTRIBUTE_UNUSED,
1275 enum machine_mode mode ATTRIBUTE_UNUSED)
1280 /* Returns 1 if op is the count register. */
1282 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1284 if (GET_CODE (op) != REG)
1287 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1290 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1296 /* Returns 1 if op is an altivec register. */
1298 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1301 return (register_operand (op, mode)
1302 && (GET_CODE (op) != REG
1303 || REGNO (op) > FIRST_PSEUDO_REGISTER
1304 || ALTIVEC_REGNO_P (REGNO (op))));
1308 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1310 if (GET_CODE (op) != REG)
1313 if (XER_REGNO_P (REGNO (op)))
1319 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1320 by such constants completes more quickly. */
1323 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1325 return ( GET_CODE (op) == CONST_INT
1326 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1329 /* Return 1 if OP is a constant that can fit in a D field. */
1332 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1334 return (GET_CODE (op) == CONST_INT
1335 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1338 /* Similar for an unsigned D field. */
1341 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1343 return (GET_CODE (op) == CONST_INT
1344 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1347 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1350 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1352 return (GET_CODE (op) == CONST_INT
1353 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1356 /* Returns 1 if OP is a CONST_INT that is a positive value
1357 and an exact power of 2. */
1360 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1362 return (GET_CODE (op) == CONST_INT
1364 && exact_log2 (INTVAL (op)) >= 0);
1367 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1371 gpc_reg_operand (rtx op, enum machine_mode mode)
1373 return (register_operand (op, mode)
1374 && (GET_CODE (op) != REG
1375 || (REGNO (op) >= ARG_POINTER_REGNUM
1376 && !XER_REGNO_P (REGNO (op)))
1377 || REGNO (op) < MQ_REGNO));
1380 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1384 cc_reg_operand (rtx op, enum machine_mode mode)
1386 return (register_operand (op, mode)
1387 && (GET_CODE (op) != REG
1388 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1389 || CR_REGNO_P (REGNO (op))));
1392 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1393 CR field that isn't CR0. */
1396 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1398 return (register_operand (op, mode)
1399 && (GET_CODE (op) != REG
1400 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1401 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1404 /* Returns 1 if OP is either a constant integer valid for a D-field or
1405 a non-special register. If a register, it must be in the proper
1406 mode unless MODE is VOIDmode. */
1409 reg_or_short_operand (rtx op, enum machine_mode mode)
1411 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1414 /* Similar, except check if the negation of the constant would be
1415 valid for a D-field. Don't allow a constant zero, since all the
1416 patterns that call this predicate use "addic r1,r2,-constant" on
1417 a constant value to set a carry when r2 is greater or equal to
1418 "constant". That doesn't work for zero. */
1421 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1423 if (GET_CODE (op) == CONST_INT)
1424 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1426 return gpc_reg_operand (op, mode);
1429 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1430 a non-special register. If a register, it must be in the proper
1431 mode unless MODE is VOIDmode. */
1434 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1436 if (gpc_reg_operand (op, mode))
1438 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1445 /* Return 1 if the operand is either a register or an integer whose
1446 high-order 16 bits are zero. */
1449 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1451 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1454 /* Return 1 is the operand is either a non-special register or ANY
1455 constant integer. */
1458 reg_or_cint_operand (rtx op, enum machine_mode mode)
1460 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1463 /* Return 1 is the operand is either a non-special register or ANY
1464 32-bit signed constant integer. */
1467 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1469 return (gpc_reg_operand (op, mode)
1470 || (GET_CODE (op) == CONST_INT
1471 #if HOST_BITS_PER_WIDE_INT != 32
1472 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1473 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1478 /* Return 1 is the operand is either a non-special register or a 32-bit
1479 signed constant integer valid for 64-bit addition. */
1482 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1484 return (gpc_reg_operand (op, mode)
1485 || (GET_CODE (op) == CONST_INT
1486 #if HOST_BITS_PER_WIDE_INT == 32
1487 && INTVAL (op) < 0x7fff8000
1489 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1495 /* Return 1 is the operand is either a non-special register or a 32-bit
1496 signed constant integer valid for 64-bit subtraction. */
1499 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1501 return (gpc_reg_operand (op, mode)
1502 || (GET_CODE (op) == CONST_INT
1503 #if HOST_BITS_PER_WIDE_INT == 32
1504 && (- INTVAL (op)) < 0x7fff8000
1506 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1512 /* Return 1 is the operand is either a non-special register or ANY
1513 32-bit unsigned constant integer. */
1516 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1518 if (GET_CODE (op) == CONST_INT)
1520 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1522 if (GET_MODE_BITSIZE (mode) <= 32)
1525 if (INTVAL (op) < 0)
1529 return ((INTVAL (op) & GET_MODE_MASK (mode)
1530 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1532 else if (GET_CODE (op) == CONST_DOUBLE)
1534 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1538 return CONST_DOUBLE_HIGH (op) == 0;
1541 return gpc_reg_operand (op, mode);
1544 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1547 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1549 return (GET_CODE (op) == SYMBOL_REF
1550 || GET_CODE (op) == CONST
1551 || GET_CODE (op) == LABEL_REF);
1554 /* Return 1 if the operand is a simple references that can be loaded via
1555 the GOT (labels involving addition aren't allowed). */
1558 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1560 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1563 /* Return the number of instructions it takes to form a constant in an
1564 integer register. */
1567 num_insns_constant_wide (HOST_WIDE_INT value)
1569 /* signed constant loadable with {cal|addi} */
1570 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1573 /* constant loadable with {cau|addis} */
1574 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1577 #if HOST_BITS_PER_WIDE_INT == 64
1578 else if (TARGET_POWERPC64)
1580 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1581 HOST_WIDE_INT high = value >> 31;
1583 if (high == 0 || high == -1)
1589 return num_insns_constant_wide (high) + 1;
1591 return (num_insns_constant_wide (high)
1592 + num_insns_constant_wide (low) + 1);
1601 num_insns_constant (rtx op, enum machine_mode mode)
1603 if (GET_CODE (op) == CONST_INT)
1605 #if HOST_BITS_PER_WIDE_INT == 64
1606 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1607 && mask64_operand (op, mode))
1611 return num_insns_constant_wide (INTVAL (op));
1614 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1619 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1620 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1621 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1624 else if (GET_CODE (op) == CONST_DOUBLE)
1630 int endian = (WORDS_BIG_ENDIAN == 0);
1632 if (mode == VOIDmode || mode == DImode)
1634 high = CONST_DOUBLE_HIGH (op);
1635 low = CONST_DOUBLE_LOW (op);
1639 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1640 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1642 low = l[1 - endian];
1646 return (num_insns_constant_wide (low)
1647 + num_insns_constant_wide (high));
1651 if (high == 0 && low >= 0)
1652 return num_insns_constant_wide (low);
1654 else if (high == -1 && low < 0)
1655 return num_insns_constant_wide (low);
1657 else if (mask64_operand (op, mode))
1661 return num_insns_constant_wide (high) + 1;
1664 return (num_insns_constant_wide (high)
1665 + num_insns_constant_wide (low) + 1);
1673 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1674 register with one instruction per word. We only do this if we can
1675 safely read CONST_DOUBLE_{LOW,HIGH}. */
1678 easy_fp_constant (rtx op, enum machine_mode mode)
1680 if (GET_CODE (op) != CONST_DOUBLE
1681 || GET_MODE (op) != mode
1682 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1685 /* Consider all constants with -msoft-float to be easy. */
1686 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1690 /* If we are using V.4 style PIC, consider all constants to be hard. */
1691 if (flag_pic && DEFAULT_ABI == ABI_V4)
1694 #ifdef TARGET_RELOCATABLE
1695 /* Similarly if we are using -mrelocatable, consider all constants
1697 if (TARGET_RELOCATABLE)
1706 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1707 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1709 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1710 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1711 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1712 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1715 else if (mode == DFmode)
1720 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1721 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1723 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1724 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1727 else if (mode == SFmode)
1732 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1733 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1735 return num_insns_constant_wide (l) == 1;
1738 else if (mode == DImode)
1739 return ((TARGET_POWERPC64
1740 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1741 || (num_insns_constant (op, DImode) <= 2));
1743 else if (mode == SImode)
1749 /* Returns the constant for the splat instruction, if exists. */
1752 easy_vector_splat_const (int cst, enum machine_mode mode)
1757 if (EASY_VECTOR_15 (cst)
1758 || EASY_VECTOR_15_ADD_SELF (cst))
1760 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1764 if (EASY_VECTOR_15 (cst)
1765 || EASY_VECTOR_15_ADD_SELF (cst))
1767 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1771 if (EASY_VECTOR_15 (cst)
1772 || EASY_VECTOR_15_ADD_SELF (cst))
1781 /* Return nonzero if all elements of a vector have the same value. */
1784 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1788 units = CONST_VECTOR_NUNITS (op);
1790 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1791 for (i = 1; i < units; ++i)
1792 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1794 if (i == units && easy_vector_splat_const (cst, mode))
1799 /* Return 1 if the operand is a CONST_INT and can be put into a
1800 register without using memory. */
1803 easy_vector_constant (rtx op, enum machine_mode mode)
1807 if (GET_CODE (op) != CONST_VECTOR
1812 if (zero_constant (op, mode)
1813 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1814 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1817 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1820 if (TARGET_SPE && mode == V1DImode)
1823 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1824 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1826 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1828 evmergelo r0, r0, r0
1831 I don't know how efficient it would be to allow bigger constants,
1832 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1833 instructions is better than a 64-bit memory load, but I don't
1834 have the e500 timing specs. */
1835 if (TARGET_SPE && mode == V2SImode
1836 && cst >= -0x7fff && cst <= 0x7fff
1837 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1841 && easy_vector_same (op, mode))
1843 cst = easy_vector_splat_const (cst, mode);
1844 if (EASY_VECTOR_15_ADD_SELF (cst)
1845 || EASY_VECTOR_15 (cst))
1851 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1854 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1858 && GET_CODE (op) == CONST_VECTOR
1859 && easy_vector_same (op, mode))
1861 cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
1862 if (EASY_VECTOR_15_ADD_SELF (cst))
1868 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
1871 gen_easy_vector_constant_add_self (rtx op)
1875 units = GET_MODE_NUNITS (GET_MODE (op));
1876 v = rtvec_alloc (units);
1878 for (i = 0; i < units; i++)
1880 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
1881 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
1885 output_vec_const_move (rtx *operands)
1888 enum machine_mode mode;
1894 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1895 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1896 mode = GET_MODE (dest);
1900 if (zero_constant (vec, mode))
1901 return "vxor %0,%0,%0";
1902 else if (easy_vector_constant (vec, mode))
1904 operands[1] = GEN_INT (cst);
1908 if (EASY_VECTOR_15 (cst))
1910 operands[1] = GEN_INT (cst);
1911 return "vspltisw %0,%1";
1913 else if (EASY_VECTOR_15_ADD_SELF (cst))
1917 if (EASY_VECTOR_15 (cst))
1919 operands[1] = GEN_INT (cst);
1920 return "vspltish %0,%1";
1922 else if (EASY_VECTOR_15_ADD_SELF (cst))
1926 if (EASY_VECTOR_15 (cst))
1928 operands[1] = GEN_INT (cst);
1929 return "vspltisb %0,%1";
1931 else if (EASY_VECTOR_15_ADD_SELF (cst))
1943 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1944 pattern of V1DI, V4HI, and V2SF.
1946 FIXME: We should probably return # and add post reload
1947 splitters for these, but this way is so easy ;-).
1949 operands[1] = GEN_INT (cst);
1950 operands[2] = GEN_INT (cst2);
1952 return "li %0,%1\n\tevmergelo %0,%0,%0";
1954 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1960 /* Return 1 if the operand is the constant 0. This works for scalars
1961 as well as vectors. */
1963 zero_constant (rtx op, enum machine_mode mode)
1965 return op == CONST0_RTX (mode);
1968 /* Return 1 if the operand is 0.0. */
1970 zero_fp_constant (rtx op, enum machine_mode mode)
1972 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1975 /* Return 1 if the operand is in volatile memory. Note that during
1976 the RTL generation phase, memory_operand does not return TRUE for
1977 volatile memory references. So this function allows us to
1978 recognize volatile references where its safe. */
1981 volatile_mem_operand (rtx op, enum machine_mode mode)
1983 if (GET_CODE (op) != MEM)
1986 if (!MEM_VOLATILE_P (op))
1989 if (mode != GET_MODE (op))
1992 if (reload_completed)
1993 return memory_operand (op, mode);
1995 if (reload_in_progress)
1996 return strict_memory_address_p (mode, XEXP (op, 0));
1998 return memory_address_p (mode, XEXP (op, 0));
2001 /* Return 1 if the operand is an offsettable memory operand. */
2004 offsettable_mem_operand (rtx op, enum machine_mode mode)
2006 return ((GET_CODE (op) == MEM)
2007 && offsettable_address_p (reload_completed || reload_in_progress,
2008 mode, XEXP (op, 0)));
2011 /* Return 1 if the operand is either an easy FP constant (see above) or
2015 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
2017 return memory_operand (op, mode) || easy_fp_constant (op, mode);
2020 /* Return 1 if the operand is either a non-special register or an item
2021 that can be used as the operand of a `mode' add insn. */
2024 add_operand (rtx op, enum machine_mode mode)
2026 if (GET_CODE (op) == CONST_INT)
2027 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2028 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2030 return gpc_reg_operand (op, mode);
2033 /* Return 1 if OP is a constant but not a valid add_operand. */
2036 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2038 return (GET_CODE (op) == CONST_INT
2039 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2040 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2043 /* Return 1 if the operand is a non-special register or a constant that
2044 can be used as the operand of an OR or XOR insn on the RS/6000. */
2047 logical_operand (rtx op, enum machine_mode mode)
2049 HOST_WIDE_INT opl, oph;
2051 if (gpc_reg_operand (op, mode))
2054 if (GET_CODE (op) == CONST_INT)
2056 opl = INTVAL (op) & GET_MODE_MASK (mode);
2058 #if HOST_BITS_PER_WIDE_INT <= 32
2059 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
2063 else if (GET_CODE (op) == CONST_DOUBLE)
2065 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2068 opl = CONST_DOUBLE_LOW (op);
2069 oph = CONST_DOUBLE_HIGH (op);
2076 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2077 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2080 /* Return 1 if C is a constant that is not a logical operand (as
2081 above), but could be split into one. */
2084 non_logical_cint_operand (rtx op, enum machine_mode mode)
2086 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2087 && ! logical_operand (op, mode)
2088 && reg_or_logical_cint_operand (op, mode));
2091 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2092 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2093 Reject all ones and all zeros, since these should have been optimized
2094 away and confuse the making of MB and ME. */
2097 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2099 HOST_WIDE_INT c, lsb;
2101 if (GET_CODE (op) != CONST_INT)
2106 /* Fail in 64-bit mode if the mask wraps around because the upper
2107 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2108 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2111 /* We don't change the number of transitions by inverting,
2112 so make sure we start with the LS bit zero. */
2116 /* Reject all zeros or all ones. */
2120 /* Find the first transition. */
2123 /* Invert to look for a second transition. */
2126 /* Erase first transition. */
2129 /* Find the second transition (if any). */
2132 /* Match if all the bits above are 1's (or c is zero). */
2136 /* Return 1 for the PowerPC64 rlwinm corner case. */
2139 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2141 HOST_WIDE_INT c, lsb;
2143 if (GET_CODE (op) != CONST_INT)
2148 if ((c & 0x80000001) != 0x80000001)
2162 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2163 It is if there are no more than one 1->0 or 0->1 transitions.
2164 Reject all zeros, since zero should have been optimized away and
2165 confuses the making of MB and ME. */
2168 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2170 if (GET_CODE (op) == CONST_INT)
2172 HOST_WIDE_INT c, lsb;
2176 /* Reject all zeros. */
2180 /* We don't change the number of transitions by inverting,
2181 so make sure we start with the LS bit zero. */
2185 /* Find the transition, and check that all bits above are 1's. */
2188 /* Match if all the bits above are 1's (or c is zero). */
2194 /* Like mask64_operand, but allow up to three transitions. This
2195 predicate is used by insn patterns that generate two rldicl or
2196 rldicr machine insns. */
2199 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2201 if (GET_CODE (op) == CONST_INT)
2203 HOST_WIDE_INT c, lsb;
2207 /* Disallow all zeros. */
2211 /* We don't change the number of transitions by inverting,
2212 so make sure we start with the LS bit zero. */
2216 /* Find the first transition. */
2219 /* Invert to look for a second transition. */
2222 /* Erase first transition. */
2225 /* Find the second transition. */
2228 /* Invert to look for a third transition. */
2231 /* Erase second transition. */
2234 /* Find the third transition (if any). */
2237 /* Match if all the bits above are 1's (or c is zero). */
2243 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2244 implement ANDing by the mask IN. */
2246 build_mask64_2_operands (rtx in, rtx *out)
2248 #if HOST_BITS_PER_WIDE_INT >= 64
2249 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2252 if (GET_CODE (in) != CONST_INT)
2258 /* Assume c initially something like 0x00fff000000fffff. The idea
2259 is to rotate the word so that the middle ^^^^^^ group of zeros
2260 is at the MS end and can be cleared with an rldicl mask. We then
2261 rotate back and clear off the MS ^^ group of zeros with a
2263 c = ~c; /* c == 0xff000ffffff00000 */
2264 lsb = c & -c; /* lsb == 0x0000000000100000 */
2265 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2266 c = ~c; /* c == 0x00fff000000fffff */
2267 c &= -lsb; /* c == 0x00fff00000000000 */
2268 lsb = c & -c; /* lsb == 0x0000100000000000 */
2269 c = ~c; /* c == 0xff000fffffffffff */
2270 c &= -lsb; /* c == 0xff00000000000000 */
2272 while ((lsb >>= 1) != 0)
2273 shift++; /* shift == 44 on exit from loop */
2274 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2275 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2276 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2280 /* Assume c initially something like 0xff000f0000000000. The idea
2281 is to rotate the word so that the ^^^ middle group of zeros
2282 is at the LS end and can be cleared with an rldicr mask. We then
2283 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2285 lsb = c & -c; /* lsb == 0x0000010000000000 */
2286 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2287 c = ~c; /* c == 0x00fff0ffffffffff */
2288 c &= -lsb; /* c == 0x00fff00000000000 */
2289 lsb = c & -c; /* lsb == 0x0000100000000000 */
2290 c = ~c; /* c == 0xff000fffffffffff */
2291 c &= -lsb; /* c == 0xff00000000000000 */
2293 while ((lsb >>= 1) != 0)
2294 shift++; /* shift == 44 on exit from loop */
2295 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2296 m1 >>= shift; /* m1 == 0x0000000000000fff */
2297 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2300 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2301 masks will be all 1's. We are guaranteed more than one transition. */
2302 out[0] = GEN_INT (64 - shift);
2303 out[1] = GEN_INT (m1);
2304 out[2] = GEN_INT (shift);
2305 out[3] = GEN_INT (m2);
2313 /* Return 1 if the operand is either a non-special register or a constant
2314 that can be used as the operand of a PowerPC64 logical AND insn. */
2317 and64_operand (rtx op, enum machine_mode mode)
2319 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2320 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2322 return (logical_operand (op, mode) || mask64_operand (op, mode));
2325 /* Like the above, but also match constants that can be implemented
2326 with two rldicl or rldicr insns. */
2329 and64_2_operand (rtx op, enum machine_mode mode)
2331 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2332 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2334 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2337 /* Return 1 if the operand is either a non-special register or a
2338 constant that can be used as the operand of an RS/6000 logical AND insn. */
2341 and_operand (rtx op, enum machine_mode mode)
2343 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2344 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2346 return (logical_operand (op, mode) || mask_operand (op, mode));
2349 /* Return 1 if the operand is a general register or memory operand. */
2352 reg_or_mem_operand (rtx op, enum machine_mode mode)
2354 return (gpc_reg_operand (op, mode)
2355 || memory_operand (op, mode)
2356 || macho_lo_sum_memory_operand (op, mode)
2357 || volatile_mem_operand (op, mode));
2360 /* Return 1 if the operand is a general register or memory operand without
2361 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2365 lwa_operand (rtx op, enum machine_mode mode)
2369 if (reload_completed && GET_CODE (inner) == SUBREG)
2370 inner = SUBREG_REG (inner);
2372 return gpc_reg_operand (inner, mode)
2373 || (memory_operand (inner, mode)
2374 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2375 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2376 && (GET_CODE (XEXP (inner, 0)) != PLUS
2377 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2378 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2381 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2384 symbol_ref_operand (rtx op, enum machine_mode mode)
2386 if (mode != VOIDmode && GET_MODE (op) != mode)
2389 return (GET_CODE (op) == SYMBOL_REF
2390 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2393 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2394 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2397 call_operand (rtx op, enum machine_mode mode)
2399 if (mode != VOIDmode && GET_MODE (op) != mode)
2402 return (GET_CODE (op) == SYMBOL_REF
2403 || (GET_CODE (op) == REG
2404 && (REGNO (op) == LINK_REGISTER_REGNUM
2405 || REGNO (op) == COUNT_REGISTER_REGNUM
2406 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2409 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2413 current_file_function_operand (rtx op,
2414 enum machine_mode mode ATTRIBUTE_UNUSED)
2416 return (GET_CODE (op) == SYMBOL_REF
2417 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2418 && (SYMBOL_REF_LOCAL_P (op)
2419 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2422 /* Return 1 if this operand is a valid input for a move insn. */
2425 input_operand (rtx op, enum machine_mode mode)
2427 /* Memory is always valid. */
2428 if (memory_operand (op, mode))
2431 /* For floating-point, easy constants are valid. */
2432 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2434 && easy_fp_constant (op, mode))
2437 /* Allow any integer constant. */
2438 if (GET_MODE_CLASS (mode) == MODE_INT
2439 && (GET_CODE (op) == CONST_INT
2440 || GET_CODE (op) == CONST_DOUBLE))
2443 /* Allow easy vector constants. */
2444 if (GET_CODE (op) == CONST_VECTOR
2445 && easy_vector_constant (op, mode))
2448 /* For floating-point or multi-word mode, the only remaining valid type
2450 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2451 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2452 return register_operand (op, mode);
2454 /* The only cases left are integral modes one word or smaller (we
2455 do not get called for MODE_CC values). These can be in any
2457 if (register_operand (op, mode))
2460 /* A SYMBOL_REF referring to the TOC is valid. */
2461 if (legitimate_constant_pool_address_p (op))
2464 /* A constant pool expression (relative to the TOC) is valid */
2465 if (toc_relative_expr_p (op))
2468 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2470 if (DEFAULT_ABI == ABI_V4
2471 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2472 && small_data_operand (op, Pmode))
2479 /* Darwin, AIX increases natural record alignment to doubleword if the first
2480 field is an FP double while the FP fields remain word aligned. */
2483 rs6000_special_round_type_align (tree type, int computed, int specified)
2485 tree field = TYPE_FIELDS (type);
2487 /* Skip all the static variables only if ABI is greater than
2489 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2490 field = TREE_CHAIN (field);
2492 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2493 return MAX (computed, specified);
2495 return MAX (MAX (computed, specified), 64);
2498 /* Return 1 for an operand in small memory on V.4/eabi. */
2501 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2502 enum machine_mode mode ATTRIBUTE_UNUSED)
2507 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2510 if (DEFAULT_ABI != ABI_V4)
2513 if (GET_CODE (op) == SYMBOL_REF)
2516 else if (GET_CODE (op) != CONST
2517 || GET_CODE (XEXP (op, 0)) != PLUS
2518 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2519 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2524 rtx sum = XEXP (op, 0);
2525 HOST_WIDE_INT summand;
2527 /* We have to be careful here, because it is the referenced address
2528 that must be 32k from _SDA_BASE_, not just the symbol. */
2529 summand = INTVAL (XEXP (sum, 1));
2530 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2533 sym_ref = XEXP (sum, 0);
2536 return SYMBOL_REF_SMALL_P (sym_ref);
2542 /* Return true, if operand is a memory operand and has a
2543 displacement divisible by 4. */
2546 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2551 if (!memory_operand (op, mode))
2554 addr = XEXP (op, 0);
2555 if (GET_CODE (addr) == PLUS
2556 && GET_CODE (XEXP (addr, 0)) == REG
2557 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2558 off = INTVAL (XEXP (addr, 1));
2560 return (off % 4) == 0;
2563 /* Return true if either operand is a general purpose register. */
2566 gpr_or_gpr_p (rtx op0, rtx op1)
2568 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2569 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2573 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2576 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2578 switch (GET_CODE(op))
2581 if (RS6000_SYMBOL_REF_TLS_P (op))
2583 else if (CONSTANT_POOL_ADDRESS_P (op))
2585 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2593 else if (! strcmp (XSTR (op, 0), toc_label_name))
2602 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2603 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2605 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2614 constant_pool_expr_p (rtx op)
2618 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2622 toc_relative_expr_p (rtx op)
2626 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2629 /* SPE offset addressing is limited to 5-bits worth of double words. */
2630 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2633 legitimate_constant_pool_address_p (rtx x)
2636 && GET_CODE (x) == PLUS
2637 && GET_CODE (XEXP (x, 0)) == REG
2638 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2639 && constant_pool_expr_p (XEXP (x, 1)));
2643 legitimate_small_data_p (enum machine_mode mode, rtx x)
2645 return (DEFAULT_ABI == ABI_V4
2646 && !flag_pic && !TARGET_TOC
2647 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2648 && small_data_operand (x, mode));
2652 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2654 unsigned HOST_WIDE_INT offset, extra;
2656 if (GET_CODE (x) != PLUS)
2658 if (GET_CODE (XEXP (x, 0)) != REG)
2660 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2662 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2665 offset = INTVAL (XEXP (x, 1));
2673 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2674 which leaves the only valid constant offset of zero, which by
2675 canonicalization rules is also invalid. */
2682 /* SPE vector modes. */
2683 return SPE_CONST_OFFSET_OK (offset);
2687 if (mode == DFmode || !TARGET_POWERPC64)
2689 else if (offset & 3)
2695 if (mode == TFmode || !TARGET_POWERPC64)
2697 else if (offset & 3)
2708 return (offset < 0x10000) && (offset + extra < 0x10000);
2712 legitimate_indexed_address_p (rtx x, int strict)
2716 if (GET_CODE (x) != PLUS)
2721 if (!REG_P (op0) || !REG_P (op1))
2724 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2725 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2726 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2727 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2731 legitimate_indirect_address_p (rtx x, int strict)
2733 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2737 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2739 if (!TARGET_MACHO || !flag_pic
2740 || mode != SImode || GET_CODE(x) != MEM)
2744 if (GET_CODE (x) != LO_SUM)
2746 if (GET_CODE (XEXP (x, 0)) != REG)
2748 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2752 return CONSTANT_P (x);
2756 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2758 if (GET_CODE (x) != LO_SUM)
2760 if (GET_CODE (XEXP (x, 0)) != REG)
2762 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2766 if (TARGET_ELF || TARGET_MACHO)
2768 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2772 if (GET_MODE_NUNITS (mode) != 1)
2774 if (GET_MODE_BITSIZE (mode) > 32
2775 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2778 return CONSTANT_P (x);
2785 /* Try machine-dependent ways of modifying an illegitimate address
2786 to be legitimate. If we find one, return the new, valid address.
2787 This is used from only one place: `memory_address' in explow.c.
2789 OLDX is the address as it was before break_out_memory_refs was
2790 called. In some cases it is useful to look at this to decide what
2793 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2795 It is always safe for this function to do nothing. It exists to
2796 recognize opportunities to optimize the output.
2798 On RS/6000, first check for the sum of a register with a constant
2799 integer that is out of range. If so, generate code to add the
2800 constant with the low-order 16 bits masked to the register and force
2801 this result into another register (this can be done with `cau').
2802 Then generate an address of REG+(CONST&0xffff), allowing for the
2803 possibility of bit 16 being a one.
2805 Then check for the sum of a register and something not constant, try to
2806 load the other things into a register and return the sum. */
2809 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2810 enum machine_mode mode)
2812 if (GET_CODE (x) == SYMBOL_REF)
2814 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2816 return rs6000_legitimize_tls_address (x, model);
2819 if (GET_CODE (x) == PLUS
2820 && GET_CODE (XEXP (x, 0)) == REG
2821 && GET_CODE (XEXP (x, 1)) == CONST_INT
2822 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2824 HOST_WIDE_INT high_int, low_int;
2826 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2827 high_int = INTVAL (XEXP (x, 1)) - low_int;
2828 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2829 GEN_INT (high_int)), 0);
2830 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2832 else if (GET_CODE (x) == PLUS
2833 && GET_CODE (XEXP (x, 0)) == REG
2834 && GET_CODE (XEXP (x, 1)) != CONST_INT
2835 && GET_MODE_NUNITS (mode) == 1
2836 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2838 || (mode != DFmode && mode != TFmode))
2839 && (TARGET_POWERPC64 || mode != DImode)
2842 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2843 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2845 else if (ALTIVEC_VECTOR_MODE (mode))
2849 /* Make sure both operands are registers. */
2850 if (GET_CODE (x) == PLUS)
2851 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2852 force_reg (Pmode, XEXP (x, 1)));
2854 reg = force_reg (Pmode, x);
2857 else if (SPE_VECTOR_MODE (mode))
2859 /* We accept [reg + reg] and [reg + OFFSET]. */
2861 if (GET_CODE (x) == PLUS)
2863 rtx op1 = XEXP (x, 0);
2864 rtx op2 = XEXP (x, 1);
2866 op1 = force_reg (Pmode, op1);
2868 if (GET_CODE (op2) != REG
2869 && (GET_CODE (op2) != CONST_INT
2870 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2871 op2 = force_reg (Pmode, op2);
2873 return gen_rtx_PLUS (Pmode, op1, op2);
2876 return force_reg (Pmode, x);
2882 && GET_CODE (x) != CONST_INT
2883 && GET_CODE (x) != CONST_DOUBLE
2885 && GET_MODE_NUNITS (mode) == 1
2886 && (GET_MODE_BITSIZE (mode) <= 32
2887 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2889 rtx reg = gen_reg_rtx (Pmode);
2890 emit_insn (gen_elf_high (reg, x));
2891 return gen_rtx_LO_SUM (Pmode, reg, x);
2893 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2896 && ! MACHO_DYNAMIC_NO_PIC_P
2898 && GET_CODE (x) != CONST_INT
2899 && GET_CODE (x) != CONST_DOUBLE
2901 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2905 rtx reg = gen_reg_rtx (Pmode);
2906 emit_insn (gen_macho_high (reg, x));
2907 return gen_rtx_LO_SUM (Pmode, reg, x);
2910 && constant_pool_expr_p (x)
2911 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2913 return create_TOC_reference (x);
2919 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2920 We need to emit DTP-relative relocations. */
2923 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2928 fputs ("\t.long\t", file);
2931 fputs (DOUBLE_INT_ASM_OP, file);
2936 output_addr_const (file, x);
2937 fputs ("@dtprel+0x8000", file);
2940 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2942 static GTY(()) rtx rs6000_tls_symbol;
2944 rs6000_tls_get_addr (void)
2946 if (!rs6000_tls_symbol)
2947 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2949 return rs6000_tls_symbol;
2952 /* Construct the SYMBOL_REF for TLS GOT references. */
2954 static GTY(()) rtx rs6000_got_symbol;
2956 rs6000_got_sym (void)
2958 if (!rs6000_got_symbol)
2960 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2961 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2962 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2965 return rs6000_got_symbol;
2968 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2969 this (thread-local) address. */
2972 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2976 dest = gen_reg_rtx (Pmode);
2977 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2983 tlsreg = gen_rtx_REG (Pmode, 13);
2984 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2988 tlsreg = gen_rtx_REG (Pmode, 2);
2989 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2993 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2997 tmp = gen_reg_rtx (Pmode);
3000 tlsreg = gen_rtx_REG (Pmode, 13);
3001 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3005 tlsreg = gen_rtx_REG (Pmode, 2);
3006 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3010 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3012 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3017 rtx r3, got, tga, tmp1, tmp2, eqv;
3020 got = gen_rtx_REG (Pmode, TOC_REGISTER);
3024 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3027 rtx gsym = rs6000_got_sym ();
3028 got = gen_reg_rtx (Pmode);
3030 rs6000_emit_move (got, gsym, Pmode);
3034 static int tls_got_labelno = 0;
3035 rtx tempLR, lab, tmp3, mem;
3038 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
3039 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
3040 tempLR = gen_reg_rtx (Pmode);
3041 tmp1 = gen_reg_rtx (Pmode);
3042 tmp2 = gen_reg_rtx (Pmode);
3043 tmp3 = gen_reg_rtx (Pmode);
3044 mem = gen_rtx_MEM (Pmode, tmp1);
3045 RTX_UNCHANGING_P (mem) = 1;
3047 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
3049 emit_move_insn (tmp1, tempLR);
3050 emit_move_insn (tmp2, mem);
3051 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3052 last = emit_move_insn (got, tmp3);
3053 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3055 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3057 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3063 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3065 r3 = gen_rtx_REG (Pmode, 3);
3067 insn = gen_tls_gd_64 (r3, got, addr);
3069 insn = gen_tls_gd_32 (r3, got, addr);
3072 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3073 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3074 insn = emit_call_insn (insn);
3075 CONST_OR_PURE_CALL_P (insn) = 1;
3076 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3077 insn = get_insns ();
3079 emit_libcall_block (insn, dest, r3, addr);
3081 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3083 r3 = gen_rtx_REG (Pmode, 3);
3085 insn = gen_tls_ld_64 (r3, got);
3087 insn = gen_tls_ld_32 (r3, got);
3090 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3091 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3092 insn = emit_call_insn (insn);
3093 CONST_OR_PURE_CALL_P (insn) = 1;
3094 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3095 insn = get_insns ();
3097 tmp1 = gen_reg_rtx (Pmode);
3098 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3100 emit_libcall_block (insn, tmp1, r3, eqv);
3101 if (rs6000_tls_size == 16)
3104 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3106 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3108 else if (rs6000_tls_size == 32)
3110 tmp2 = gen_reg_rtx (Pmode);
3112 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3114 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3117 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3119 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3123 tmp2 = gen_reg_rtx (Pmode);
3125 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3127 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3129 insn = gen_rtx_SET (Pmode, dest,
3130 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3136 /* IE, or 64 bit offset LE. */
3137 tmp2 = gen_reg_rtx (Pmode);
3139 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3141 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3144 insn = gen_tls_tls_64 (dest, tmp2, addr);
3146 insn = gen_tls_tls_32 (dest, tmp2, addr);
3154 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3155 instruction definitions. */
3158 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3160 return RS6000_SYMBOL_REF_TLS_P (x);
3163 /* Return 1 if X contains a thread-local symbol. */
3166 rs6000_tls_referenced_p (rtx x)
3168 if (! TARGET_HAVE_TLS)
3171 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3174 /* Return 1 if *X is a thread-local symbol. This is the same as
3175 rs6000_tls_symbol_ref except for the type of the unused argument. */
3178 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3180 return RS6000_SYMBOL_REF_TLS_P (*x);
3183 /* The convention appears to be to define this wherever it is used.
3184 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3185 is now used here. */
3186 #ifndef REG_MODE_OK_FOR_BASE_P
3187 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3190 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3191 replace the input X, or the original X if no replacement is called for.
3192 The output parameter *WIN is 1 if the calling macro should goto WIN,
3195 For RS/6000, we wish to handle large displacements off a base
3196 register by splitting the addend across an addiu/addis and the mem insn.
3197 This cuts number of extra insns needed from 3 to 1.
3199 On Darwin, we use this to generate code for floating point constants.
3200 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3201 The Darwin code is inside #if TARGET_MACHO because only then is
3202 machopic_function_base_name() defined. */
3204 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3205 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3207 /* We must recognize output that we have already generated ourselves. */
3208 if (GET_CODE (x) == PLUS
3209 && GET_CODE (XEXP (x, 0)) == PLUS
3210 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3211 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3212 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3214 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3215 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3216 opnum, (enum reload_type)type);
3222 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3223 && GET_CODE (x) == LO_SUM
3224 && GET_CODE (XEXP (x, 0)) == PLUS
3225 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3226 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3227 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3228 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3229 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3230 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3231 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3233 /* Result of previous invocation of this function on Darwin
3234 floating point constant. */
3235 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3236 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3237 opnum, (enum reload_type)type);
3242 if (GET_CODE (x) == PLUS
3243 && GET_CODE (XEXP (x, 0)) == REG
3244 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3245 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3246 && GET_CODE (XEXP (x, 1)) == CONST_INT
3247 && !SPE_VECTOR_MODE (mode)
3248 && !ALTIVEC_VECTOR_MODE (mode))
3250 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3251 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3253 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3255 /* Check for 32-bit overflow. */
3256 if (high + low != val)
3262 /* Reload the high part into a base reg; leave the low part
3263 in the mem directly. */
3265 x = gen_rtx_PLUS (GET_MODE (x),
3266 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3270 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3271 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3272 opnum, (enum reload_type)type);
3277 if (GET_CODE (x) == SYMBOL_REF
3278 && DEFAULT_ABI == ABI_DARWIN
3279 && !ALTIVEC_VECTOR_MODE (mode)
3280 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3281 /* Don't do this for TFmode, since the result isn't offsettable. */
3286 rtx offset = gen_rtx_CONST (Pmode,
3287 gen_rtx_MINUS (Pmode, x,
3288 gen_rtx_SYMBOL_REF (Pmode,
3289 machopic_function_base_name ())));
3290 x = gen_rtx_LO_SUM (GET_MODE (x),
3291 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3292 gen_rtx_HIGH (Pmode, offset)), offset);
3295 x = gen_rtx_LO_SUM (GET_MODE (x),
3296 gen_rtx_HIGH (Pmode, x), x);
3298 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3299 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3300 opnum, (enum reload_type)type);
3306 && constant_pool_expr_p (x)
3307 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3309 (x) = create_TOC_reference (x);
3317 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3318 that is a valid memory address for an instruction.
3319 The MODE argument is the machine mode for the MEM expression
3320 that wants to use this address.
3322 On the RS/6000, there are four valid address: a SYMBOL_REF that
3323 refers to a constant pool entry of an address (or the sum of it
3324 plus a constant), a short (16-bit signed) constant plus a register,
3325 the sum of two registers, or a register indirect, possibly with an
3326 auto-increment. For DFmode and DImode with a constant plus register,
3327 we must ensure that both words are addressable or PowerPC64 with offset
3330 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3331 32-bit DImode, TImode), indexed addressing cannot be used because
3332 adjacent memory cells are accessed by adding word-sized offsets
3333 during assembly output. */
3335 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3337 if (RS6000_SYMBOL_REF_TLS_P (x))
3339 if (legitimate_indirect_address_p (x, reg_ok_strict))
3341 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3342 && !ALTIVEC_VECTOR_MODE (mode)
3343 && !SPE_VECTOR_MODE (mode)
3345 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3347 if (legitimate_small_data_p (mode, x))
3349 if (legitimate_constant_pool_address_p (x))
3351 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3353 && GET_CODE (x) == PLUS
3354 && GET_CODE (XEXP (x, 0)) == REG
3355 && (XEXP (x, 0) == virtual_stack_vars_rtx
3356 || XEXP (x, 0) == arg_pointer_rtx)
3357 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3359 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3362 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3364 || (mode != DFmode && mode != TFmode))
3365 && (TARGET_POWERPC64 || mode != DImode)
3366 && legitimate_indexed_address_p (x, reg_ok_strict))
3368 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3373 /* Go to LABEL if ADDR (a legitimate address expression)
3374 has an effect that depends on the machine mode it is used for.
3376 On the RS/6000 this is true of all integral offsets (since AltiVec
3377 modes don't allow them) or is a pre-increment or decrement.
3379 ??? Except that due to conceptual problems in offsettable_address_p
3380 we can't really report the problems of integral offsets. So leave
3381 this assuming that the adjustable offset must be valid for the
3382 sub-words of a TFmode operand, which is what we had before. */
3385 rs6000_mode_dependent_address (rtx addr)
3387 switch (GET_CODE (addr))
3390 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3392 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3393 return val + 12 + 0x8000 >= 0x10000;
3402 return TARGET_UPDATE;
3411 /* Return number of consecutive hard regs needed starting at reg REGNO
3412 to hold something of mode MODE.
3413 This is ordinarily the length in words of a value of mode MODE
3414 but can be less for certain modes in special long registers.
3416 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3417 scalar instructions. The upper 32 bits are only available to the
3420 POWER and PowerPC GPRs hold 32 bits worth;
3421 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3424 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3426 if (FP_REGNO_P (regno))
3427 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3429 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3430 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3432 if (ALTIVEC_REGNO_P (regno))
3434 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3436 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3439 /* Change register usage conditional on target flags. */
3441 rs6000_conditional_register_usage (void)
3445 /* Set MQ register fixed (already call_used) if not POWER
3446 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3451 /* 64-bit AIX reserves GPR13 for thread-private data. */
3453 fixed_regs[13] = call_used_regs[13]
3454 = call_really_used_regs[13] = 1;
3456 /* Conditionally disable FPRs. */
3457 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3458 for (i = 32; i < 64; i++)
3459 fixed_regs[i] = call_used_regs[i]
3460 = call_really_used_regs[i] = 1;
3462 if (DEFAULT_ABI == ABI_V4
3463 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3465 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3467 if (DEFAULT_ABI == ABI_V4
3468 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3470 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3471 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3472 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3474 if (DEFAULT_ABI == ABI_DARWIN
3475 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3476 global_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3477 = fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3478 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3479 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3482 global_regs[VSCR_REGNO] = 1;
3486 global_regs[SPEFSCR_REGNO] = 1;
3487 fixed_regs[FIXED_SCRATCH]
3488 = call_used_regs[FIXED_SCRATCH]
3489 = call_really_used_regs[FIXED_SCRATCH] = 1;
3492 if (! TARGET_ALTIVEC)
3494 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3495 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3496 call_really_used_regs[VRSAVE_REGNO] = 1;
3499 if (TARGET_ALTIVEC_ABI)
3500 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3501 call_used_regs[i] = call_really_used_regs[i] = 1;
3504 /* Try to output insns to set TARGET equal to the constant C if it can
3505 be done in less than N insns. Do all computations in MODE.
3506 Returns the place where the output has been placed if it can be
3507 done and the insns have been emitted. If it would take more than N
3508 insns, zero is returned and no insns and emitted. */
3511 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3512 rtx source, int n ATTRIBUTE_UNUSED)
3514 rtx result, insn, set;
3515 HOST_WIDE_INT c0, c1;
3517 if (mode == QImode || mode == HImode)
3520 dest = gen_reg_rtx (mode);
3521 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3524 else if (mode == SImode)
3526 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3528 emit_insn (gen_rtx_SET (VOIDmode, result,
3529 GEN_INT (INTVAL (source)
3530 & (~ (HOST_WIDE_INT) 0xffff))));
3531 emit_insn (gen_rtx_SET (VOIDmode, dest,
3532 gen_rtx_IOR (SImode, result,
3533 GEN_INT (INTVAL (source) & 0xffff))));
3536 else if (mode == DImode)
3538 if (GET_CODE (source) == CONST_INT)
3540 c0 = INTVAL (source);
3543 else if (GET_CODE (source) == CONST_DOUBLE)
3545 #if HOST_BITS_PER_WIDE_INT >= 64
3546 c0 = CONST_DOUBLE_LOW (source);
3549 c0 = CONST_DOUBLE_LOW (source);
3550 c1 = CONST_DOUBLE_HIGH (source);
3556 result = rs6000_emit_set_long_const (dest, c0, c1);
3561 insn = get_last_insn ();
3562 set = single_set (insn);
3563 if (! CONSTANT_P (SET_SRC (set)))
3564 set_unique_reg_note (insn, REG_EQUAL, source);
3569 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3570 fall back to a straight forward decomposition. We do this to avoid
3571 exponential run times encountered when looking for longer sequences
3572 with rs6000_emit_set_const. */
3574 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3576 if (!TARGET_POWERPC64)
3578 rtx operand1, operand2;
3580 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3582 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3584 emit_move_insn (operand1, GEN_INT (c1));
3585 emit_move_insn (operand2, GEN_INT (c2));
3589 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3592 ud2 = (c1 & 0xffff0000) >> 16;
3593 #if HOST_BITS_PER_WIDE_INT >= 64
3597 ud4 = (c2 & 0xffff0000) >> 16;
3599 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3600 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3603 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3605 emit_move_insn (dest, GEN_INT (ud1));
3608 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3609 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3612 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3615 emit_move_insn (dest, GEN_INT (ud2 << 16));
3617 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3619 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3620 || (ud4 == 0 && ! (ud3 & 0x8000)))
3623 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3626 emit_move_insn (dest, GEN_INT (ud3 << 16));
3629 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3630 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3632 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3637 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3640 emit_move_insn (dest, GEN_INT (ud4 << 16));
3643 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3645 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3647 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3648 GEN_INT (ud2 << 16)));
3650 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3656 /* Emit a move from SOURCE to DEST in mode MODE. */
3658 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3662 operands[1] = source;
3664 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3665 if (GET_CODE (operands[1]) == CONST_DOUBLE
3666 && ! FLOAT_MODE_P (mode)
3667 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3669 /* FIXME. This should never happen. */
3670 /* Since it seems that it does, do the safe thing and convert
3672 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3674 if (GET_CODE (operands[1]) == CONST_DOUBLE
3675 && ! FLOAT_MODE_P (mode)
3676 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3677 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3678 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3679 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3682 /* Check if GCC is setting up a block move that will end up using FP
3683 registers as temporaries. We must make sure this is acceptable. */
3684 if (GET_CODE (operands[0]) == MEM
3685 && GET_CODE (operands[1]) == MEM
3687 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3688 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3689 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3690 ? 32 : MEM_ALIGN (operands[0])))
3691 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3693 : MEM_ALIGN (operands[1]))))
3694 && ! MEM_VOLATILE_P (operands [0])
3695 && ! MEM_VOLATILE_P (operands [1]))
3697 emit_move_insn (adjust_address (operands[0], SImode, 0),
3698 adjust_address (operands[1], SImode, 0));
3699 emit_move_insn (adjust_address (operands[0], SImode, 4),
3700 adjust_address (operands[1], SImode, 4));
3704 if (!no_new_pseudos)
3706 if (GET_CODE (operands[1]) == MEM && optimize > 0
3707 && (mode == QImode || mode == HImode || mode == SImode)
3708 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3710 rtx reg = gen_reg_rtx (word_mode);
3712 emit_insn (gen_rtx_SET (word_mode, reg,
3713 gen_rtx_ZERO_EXTEND (word_mode,
3715 operands[1] = gen_lowpart (mode, reg);
3717 if (GET_CODE (operands[0]) != REG)
3718 operands[1] = force_reg (mode, operands[1]);
3721 if (mode == SFmode && ! TARGET_POWERPC
3722 && TARGET_HARD_FLOAT && TARGET_FPRS
3723 && GET_CODE (operands[0]) == MEM)
3727 if (reload_in_progress || reload_completed)
3728 regnum = true_regnum (operands[1]);
3729 else if (GET_CODE (operands[1]) == REG)
3730 regnum = REGNO (operands[1]);
3734 /* If operands[1] is a register, on POWER it may have
3735 double-precision data in it, so truncate it to single
3737 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3740 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3741 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3742 operands[1] = newreg;
3746 /* Recognize the case where operand[1] is a reference to thread-local
3747 data and load its address to a register. */
3748 if (GET_CODE (operands[1]) == SYMBOL_REF)
3750 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3752 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3755 /* Handle the case where reload calls us with an invalid address. */
3756 if (reload_in_progress && mode == Pmode
3757 && (! general_operand (operands[1], mode)
3758 || ! nonimmediate_operand (operands[0], mode)))
3761 /* 128-bit constant floating-point values on Darwin should really be
3762 loaded as two parts. */
3763 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3764 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3765 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3767 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3768 know how to get a DFmode SUBREG of a TFmode. */
3769 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3770 simplify_gen_subreg (DImode, operands[1], mode, 0),
3772 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3773 GET_MODE_SIZE (DImode)),
3774 simplify_gen_subreg (DImode, operands[1], mode,
3775 GET_MODE_SIZE (DImode)),
3780 /* FIXME: In the long term, this switch statement should go away
3781 and be replaced by a sequence of tests based on things like
3787 if (CONSTANT_P (operands[1])
3788 && GET_CODE (operands[1]) != CONST_INT)
3789 operands[1] = force_const_mem (mode, operands[1]);
3795 if (CONSTANT_P (operands[1])
3796 && ! easy_fp_constant (operands[1], mode))
3797 operands[1] = force_const_mem (mode, operands[1]);
3808 if (CONSTANT_P (operands[1])
3809 && !easy_vector_constant (operands[1], mode))
3810 operands[1] = force_const_mem (mode, operands[1]);
3815 /* Use default pattern for address of ELF small data */
3818 && DEFAULT_ABI == ABI_V4
3819 && (GET_CODE (operands[1]) == SYMBOL_REF
3820 || GET_CODE (operands[1]) == CONST)
3821 && small_data_operand (operands[1], mode))
3823 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3827 if (DEFAULT_ABI == ABI_V4
3828 && mode == Pmode && mode == SImode
3829 && flag_pic == 1 && got_operand (operands[1], mode))
3831 emit_insn (gen_movsi_got (operands[0], operands[1]));
3835 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3839 && CONSTANT_P (operands[1])
3840 && GET_CODE (operands[1]) != HIGH
3841 && GET_CODE (operands[1]) != CONST_INT)
3843 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3845 /* If this is a function address on -mcall-aixdesc,
3846 convert it to the address of the descriptor. */
3847 if (DEFAULT_ABI == ABI_AIX
3848 && GET_CODE (operands[1]) == SYMBOL_REF
3849 && XSTR (operands[1], 0)[0] == '.')
3851 const char *name = XSTR (operands[1], 0);
3853 while (*name == '.')
3855 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3856 CONSTANT_POOL_ADDRESS_P (new_ref)
3857 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3858 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3859 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3860 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3861 operands[1] = new_ref;
3864 if (DEFAULT_ABI == ABI_DARWIN)
3867 if (MACHO_DYNAMIC_NO_PIC_P)
3869 /* Take care of any required data indirection. */
3870 operands[1] = rs6000_machopic_legitimize_pic_address (
3871 operands[1], mode, operands[0]);
3872 if (operands[0] != operands[1])
3873 emit_insn (gen_rtx_SET (VOIDmode,
3874 operands[0], operands[1]));
3878 emit_insn (gen_macho_high (target, operands[1]));
3879 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3883 emit_insn (gen_elf_high (target, operands[1]));
3884 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3888 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3889 and we have put it in the TOC, we just need to make a TOC-relative
3892 && GET_CODE (operands[1]) == SYMBOL_REF
3893 && constant_pool_expr_p (operands[1])
3894 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3895 get_pool_mode (operands[1])))
3897 operands[1] = create_TOC_reference (operands[1]);
3899 else if (mode == Pmode
3900 && CONSTANT_P (operands[1])
3901 && ((GET_CODE (operands[1]) != CONST_INT
3902 && ! easy_fp_constant (operands[1], mode))
3903 || (GET_CODE (operands[1]) == CONST_INT
3904 && num_insns_constant (operands[1], mode) > 2)
3905 || (GET_CODE (operands[0]) == REG
3906 && FP_REGNO_P (REGNO (operands[0]))))
3907 && GET_CODE (operands[1]) != HIGH
3908 && ! legitimate_constant_pool_address_p (operands[1])
3909 && ! toc_relative_expr_p (operands[1]))
3911 /* Emit a USE operation so that the constant isn't deleted if
3912 expensive optimizations are turned on because nobody
3913 references it. This should only be done for operands that
3914 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3915 This should not be done for operands that contain LABEL_REFs.
3916 For now, we just handle the obvious case. */
3917 if (GET_CODE (operands[1]) != LABEL_REF)
3918 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3921 /* Darwin uses a special PIC legitimizer. */
3922 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3925 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3927 if (operands[0] != operands[1])
3928 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3933 /* If we are to limit the number of things we put in the TOC and
3934 this is a symbol plus a constant we can add in one insn,
3935 just put the symbol in the TOC and add the constant. Don't do
3936 this if reload is in progress. */
3937 if (GET_CODE (operands[1]) == CONST
3938 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3939 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3940 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3941 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3942 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3943 && ! side_effects_p (operands[0]))
3946 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3947 rtx other = XEXP (XEXP (operands[1], 0), 1);
3949 sym = force_reg (mode, sym);
3951 emit_insn (gen_addsi3 (operands[0], sym, other));
3953 emit_insn (gen_adddi3 (operands[0], sym, other));
3957 operands[1] = force_const_mem (mode, operands[1]);
3960 && constant_pool_expr_p (XEXP (operands[1], 0))
3961 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3962 get_pool_constant (XEXP (operands[1], 0)),
3963 get_pool_mode (XEXP (operands[1], 0))))
3966 = gen_rtx_MEM (mode,
3967 create_TOC_reference (XEXP (operands[1], 0)));
3968 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3969 RTX_UNCHANGING_P (operands[1]) = 1;
3975 if (GET_CODE (operands[0]) == MEM
3976 && GET_CODE (XEXP (operands[0], 0)) != REG
3977 && ! reload_in_progress)
3979 = replace_equiv_address (operands[0],
3980 copy_addr_to_reg (XEXP (operands[0], 0)));
3982 if (GET_CODE (operands[1]) == MEM
3983 && GET_CODE (XEXP (operands[1], 0)) != REG
3984 && ! reload_in_progress)
3986 = replace_equiv_address (operands[1],
3987 copy_addr_to_reg (XEXP (operands[1], 0)));
3990 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3992 gen_rtx_SET (VOIDmode,
3993 operands[0], operands[1]),
3994 gen_rtx_CLOBBER (VOIDmode,
3995 gen_rtx_SCRATCH (SImode)))));
4004 /* Above, we may have called force_const_mem which may have returned
4005 an invalid address. If we can, fix this up; otherwise, reload will
4006 have to deal with it. */
4007 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4008 operands[1] = validize_mem (operands[1]);
4011 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4014 /* Nonzero if we can use a floating-point register to pass this arg. */
4015 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4016 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4017 && (CUM)->fregno <= FP_ARG_MAX_REG \
4018 && TARGET_HARD_FLOAT && TARGET_FPRS)
4020 /* Nonzero if we can use an AltiVec register to pass this arg. */
4021 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4022 (ALTIVEC_VECTOR_MODE (MODE) \
4023 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4024 && TARGET_ALTIVEC_ABI \
4027 /* Return a nonzero value to say to return the function value in
4028 memory, just as large structures are always returned. TYPE will be
4029 the data type of the value, and FNTYPE will be the type of the
4030 function doing the returning, or @code{NULL} for libcalls.
4032 The AIX ABI for the RS/6000 specifies that all structures are
4033 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4034 specifies that structures <= 8 bytes are returned in r3/r4, but a
4035 draft put them in memory, and GCC used to implement the draft
4036 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
4037 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4038 compatibility can change DRAFT_V4_STRUCT_RET to override the
4039 default, and -m switches get the final word. See
4040 rs6000_override_options for more details.
4042 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4043 long double support is enabled. These values are returned in memory.
4045 int_size_in_bytes returns -1 for variable size objects, which go in
4046 memory always. The cast to unsigned makes -1 > 8. */
4049 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4051 if (AGGREGATE_TYPE_P (type)
4052 && (TARGET_AIX_STRUCT_RET
4053 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4055 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4060 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4061 for a call to a function whose data type is FNTYPE.
4062 For a library call, FNTYPE is 0.
4064 For incoming args we set the number of arguments in the prototype large
4065 so we never return a PARALLEL. */
4068 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4069 rtx libname ATTRIBUTE_UNUSED, int incoming,
4070 int libcall, int n_named_args)
4072 static CUMULATIVE_ARGS zero_cumulative;
4074 *cum = zero_cumulative;
4076 cum->fregno = FP_ARG_MIN_REG;
4077 cum->vregno = ALTIVEC_ARG_MIN_REG;
4078 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4079 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4080 ? CALL_LIBCALL : CALL_NORMAL);
4081 cum->sysv_gregno = GP_ARG_MIN_REG;
4082 cum->stdarg = fntype
4083 && (TYPE_ARG_TYPES (fntype) != 0
4084 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4085 != void_type_node));
4087 cum->nargs_prototype = 0;
4088 if (incoming || cum->prototype)
4089 cum->nargs_prototype = n_named_args;
4091 /* Check for a longcall attribute. */
4093 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4094 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
4095 cum->call_cookie = CALL_LONG;
4097 if (TARGET_DEBUG_ARG)
4099 fprintf (stderr, "\ninit_cumulative_args:");
4102 tree ret_type = TREE_TYPE (fntype);
4103 fprintf (stderr, " ret code = %s,",
4104 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4107 if (cum->call_cookie & CALL_LONG)
4108 fprintf (stderr, " longcall,");
4110 fprintf (stderr, " proto = %d, nargs = %d\n",
4111 cum->prototype, cum->nargs_prototype);
4116 && TARGET_ALTIVEC_ABI
4117 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4119 error ("Cannot return value in vector register because"
4120 " altivec instructions are disabled, use -maltivec"
4121 " to enable them.");
4125 /* If defined, a C expression which determines whether, and in which
4126 direction, to pad out an argument with extra space. The value
4127 should be of type `enum direction': either `upward' to pad above
4128 the argument, `downward' to pad below, or `none' to inhibit
4131 For the AIX ABI structs are always stored left shifted in their
4135 function_arg_padding (enum machine_mode mode, tree type)
4137 #ifndef AGGREGATE_PADDING_FIXED
4138 #define AGGREGATE_PADDING_FIXED 0
4140 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4141 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4144 if (!AGGREGATE_PADDING_FIXED)
4146 /* GCC used to pass structures of the same size as integer types as
4147 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4148 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4149 passed padded downward, except that -mstrict-align further
4150 muddied the water in that multi-component structures of 2 and 4
4151 bytes in size were passed padded upward.
4153 The following arranges for best compatibility with previous
4154 versions of gcc, but removes the -mstrict-align dependency. */
4155 if (BYTES_BIG_ENDIAN)
4157 HOST_WIDE_INT size = 0;
4159 if (mode == BLKmode)
4161 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4162 size = int_size_in_bytes (type);
4165 size = GET_MODE_SIZE (mode);
4167 if (size == 1 || size == 2 || size == 4)
4173 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4175 if (type != 0 && AGGREGATE_TYPE_P (type))
4179 /* Fall back to the default. */
4180 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4183 /* If defined, a C expression that gives the alignment boundary, in bits,
4184 of an argument with the specified mode and type. If it is not defined,
4185 PARM_BOUNDARY is used for all arguments.
4187 V.4 wants long longs to be double word aligned. */
4190 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4192 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4194 else if (SPE_VECTOR_MODE (mode))
4196 else if (ALTIVEC_VECTOR_MODE (mode))
4199 return PARM_BOUNDARY;
4202 /* Compute the size (in words) of a function argument. */
4204 static unsigned long
4205 rs6000_arg_size (enum machine_mode mode, tree type)
4209 if (mode != BLKmode)
4210 size = GET_MODE_SIZE (mode);
4212 size = int_size_in_bytes (type);
4215 return (size + 3) >> 2;
4217 return (size + 7) >> 3;
4220 /* Update the data in CUM to advance over an argument
4221 of mode MODE and data type TYPE.
4222 (TYPE is null for libcalls where that information may not be available.)
4224 Note that for args passed by reference, function_arg will be called
4225 with MODE and TYPE set to that of the pointer to the arg, not the arg
4229 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4230 tree type, int named)
4232 cum->nargs_prototype--;
4234 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4238 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4241 if (!TARGET_ALTIVEC)
4242 error ("Cannot pass argument in vector register because"
4243 " altivec instructions are disabled, use -maltivec"
4244 " to enable them.");
4246 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4247 even if it is going to be passed in a vector register.
4248 Darwin does the same for variable-argument functions. */
4249 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4250 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4260 /* Vector parameters must be 16-byte aligned. This places
4261 them at 2 mod 4 in terms of words in 32-bit mode, since
4262 the parameter save area starts at offset 24 from the
4263 stack. In 64-bit mode, they just have to start on an
4264 even word, since the parameter save area is 16-byte
4265 aligned. Space for GPRs is reserved even if the argument
4266 will be passed in memory. */
4268 align = (2 - cum->words) & 3;
4270 align = cum->words & 1;
4271 cum->words += align + rs6000_arg_size (mode, type);
4273 if (TARGET_DEBUG_ARG)
4275 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4277 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4278 cum->nargs_prototype, cum->prototype,
4279 GET_MODE_NAME (mode));
4283 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4285 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4287 else if (DEFAULT_ABI == ABI_V4)
4289 if (TARGET_HARD_FLOAT && TARGET_FPRS
4290 && (mode == SFmode || mode == DFmode))
4292 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4297 cum->words += cum->words & 1;
4298 cum->words += rs6000_arg_size (mode, type);
4303 int n_words = rs6000_arg_size (mode, type);
4304 int gregno = cum->sysv_gregno;
4306 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4307 (r7,r8) or (r9,r10). As does any other 2 word item such
4308 as complex int due to a historical mistake. */
4310 gregno += (1 - gregno) & 1;
4312 /* Multi-reg args are not split between registers and stack. */
4313 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4315 /* Long long and SPE vectors are aligned on the stack.
4316 So are other 2 word items such as complex int due to
4317 a historical mistake. */
4319 cum->words += cum->words & 1;
4320 cum->words += n_words;
4323 /* Note: continuing to accumulate gregno past when we've started
4324 spilling to the stack indicates the fact that we've started
4325 spilling to the stack to expand_builtin_saveregs. */
4326 cum->sysv_gregno = gregno + n_words;
4329 if (TARGET_DEBUG_ARG)
4331 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4332 cum->words, cum->fregno);
4333 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4334 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4335 fprintf (stderr, "mode = %4s, named = %d\n",
4336 GET_MODE_NAME (mode), named);
4341 int n_words = rs6000_arg_size (mode, type);
4342 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4344 /* The simple alignment calculation here works because
4345 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4346 If we ever want to handle alignments larger than 8 bytes for
4347 32-bit or 16 bytes for 64-bit, then we'll need to take into
4348 account the offset to the start of the parm save area. */
4349 align &= cum->words;
4350 cum->words += align + n_words;
4352 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4353 && TARGET_HARD_FLOAT && TARGET_FPRS)
4354 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4356 if (TARGET_DEBUG_ARG)
4358 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4359 cum->words, cum->fregno);
4360 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4361 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4362 fprintf (stderr, "named = %d, align = %d\n", named, align);
4367 /* Determine where to put a SIMD argument on the SPE. */
4370 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4375 int gregno = cum->sysv_gregno;
4376 int n_words = rs6000_arg_size (mode, type);
4378 /* SPE vectors are put in odd registers. */
4379 if (n_words == 2 && (gregno & 1) == 0)
4382 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4385 enum machine_mode m = SImode;
4387 r1 = gen_rtx_REG (m, gregno);
4388 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4389 r2 = gen_rtx_REG (m, gregno + 1);
4390 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4391 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4398 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4399 return gen_rtx_REG (mode, cum->sysv_gregno);
4405 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4408 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4409 tree type, int align_words)
4413 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4414 in vararg list into zero, one or two GPRs */
4415 if (align_words >= GP_ARG_NUM_REG)
4416 return gen_rtx_PARALLEL (DFmode,
4418 gen_rtx_EXPR_LIST (VOIDmode,
4419 NULL_RTX, const0_rtx),
4420 gen_rtx_EXPR_LIST (VOIDmode,
4424 else if (align_words + rs6000_arg_size (mode, type)
4426 /* If this is partially on the stack, then we only
4427 include the portion actually in registers here. */
4428 return gen_rtx_PARALLEL (DFmode,
4430 gen_rtx_EXPR_LIST (VOIDmode,
4431 gen_rtx_REG (SImode,
4435 gen_rtx_EXPR_LIST (VOIDmode,
4440 /* split a DFmode arg into two GPRs */
4441 return gen_rtx_PARALLEL (DFmode,
4443 gen_rtx_EXPR_LIST (VOIDmode,
4444 gen_rtx_REG (SImode,
4448 gen_rtx_EXPR_LIST (VOIDmode,
4449 gen_rtx_REG (SImode,
4453 gen_rtx_EXPR_LIST (VOIDmode,
4454 gen_rtx_REG (mode, cum->fregno),
4457 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4459 else if (mode == DImode)
4461 if (align_words < GP_ARG_NUM_REG - 1)
4462 return gen_rtx_PARALLEL (DImode,
4464 gen_rtx_EXPR_LIST (VOIDmode,
4465 gen_rtx_REG (SImode,
4469 gen_rtx_EXPR_LIST (VOIDmode,
4470 gen_rtx_REG (SImode,
4474 else if (align_words == GP_ARG_NUM_REG - 1)
4475 return gen_rtx_PARALLEL (DImode,
4477 gen_rtx_EXPR_LIST (VOIDmode,
4478 NULL_RTX, const0_rtx),
4479 gen_rtx_EXPR_LIST (VOIDmode,
4480 gen_rtx_REG (SImode,
4485 else if (ALTIVEC_VECTOR_MODE (mode) && align_words == GP_ARG_NUM_REG - 2)
4487 /* Varargs vector regs must be saved in R9-R10. */
4488 return gen_rtx_PARALLEL (mode,
4490 gen_rtx_EXPR_LIST (VOIDmode,
4491 NULL_RTX, const0_rtx),
4492 gen_rtx_EXPR_LIST (VOIDmode,
4493 gen_rtx_REG (SImode,
4497 gen_rtx_EXPR_LIST (VOIDmode,
4498 gen_rtx_REG (SImode,
4503 else if ((mode == BLKmode || ALTIVEC_VECTOR_MODE (mode))
4504 && align_words <= (GP_ARG_NUM_REG - 1))
4506 /* AltiVec vector regs are saved in R5-R8. */
4508 int size = int_size_in_bytes (type);
4509 int no_units = ((size - 1) / 4) + 1;
4510 int max_no_words = GP_ARG_NUM_REG - align_words;
4511 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4512 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4514 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4516 for (k=0; k < rtlvec_len; k++)
4517 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4518 gen_rtx_REG (SImode,
4521 k == 0 ? const0_rtx : GEN_INT (k*4));
4523 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rtlvec));
4528 /* Determine where to put an argument to a function.
4529 Value is zero to push the argument on the stack,
4530 or a hard register in which to store the argument.
4532 MODE is the argument's machine mode.
4533 TYPE is the data type of the argument (as a tree).
4534 This is null for libcalls where that information may
4536 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4537 the preceding args and about the function being called.
4538 NAMED is nonzero if this argument is a named parameter
4539 (otherwise it is an extra parameter matching an ellipsis).
4541 On RS/6000 the first eight words of non-FP are normally in registers
4542 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4543 Under V.4, the first 8 FP args are in registers.
4545 If this is floating-point and no prototype is specified, we use
4546 both an FP and integer register (or possibly FP reg and stack). Library
4547 functions (when CALL_LIBCALL is set) always have the proper types for args,
4548 so we can pass the FP value just in one register. emit_library_function
4549 doesn't support PARALLEL anyway.
4551 Note that for args passed by reference, function_arg will be called
4552 with MODE and TYPE set to that of the pointer to the arg, not the arg
4556 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4557 tree type, int named)
4559 enum rs6000_abi abi = DEFAULT_ABI;
4561 /* Return a marker to indicate whether CR1 needs to set or clear the
4562 bit that V.4 uses to say fp args were passed in registers.
4563 Assume that we don't need the marker for software floating point,
4564 or compiler generated library calls. */
4565 if (mode == VOIDmode)
4568 && cum->nargs_prototype < 0
4569 && (cum->call_cookie & CALL_LIBCALL) == 0
4570 && (cum->prototype || TARGET_NO_PROTOTYPE))
4572 /* For the SPE, we need to crxor CR6 always. */
4574 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4575 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4576 return GEN_INT (cum->call_cookie
4577 | ((cum->fregno == FP_ARG_MIN_REG)
4578 ? CALL_V4_SET_FP_ARGS
4579 : CALL_V4_CLEAR_FP_ARGS));
4582 return GEN_INT (cum->call_cookie);
4585 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4586 if (TARGET_64BIT && ! cum->prototype)
4588 /* Vector parameters get passed in vector register
4589 and also in GPRs or memory, in absence of prototype. */
4592 align_words = (cum->words + 1) & ~1;
4594 if (align_words >= GP_ARG_NUM_REG)
4600 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4602 return gen_rtx_PARALLEL (mode,
4604 gen_rtx_EXPR_LIST (VOIDmode,
4606 gen_rtx_EXPR_LIST (VOIDmode,
4607 gen_rtx_REG (mode, cum->vregno),
4611 return gen_rtx_REG (mode, cum->vregno);
4612 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4614 if (named || abi == ABI_V4)
4618 /* Vector parameters to varargs functions under AIX or Darwin
4619 get passed in memory and possibly also in GPRs. */
4620 int align, align_words;
4621 enum machine_mode part_mode = mode;
4623 /* Vector parameters must be 16-byte aligned. This places them at
4624 2 mod 4 in terms of words in 32-bit mode, since the parameter
4625 save area starts at offset 24 from the stack. In 64-bit mode,
4626 they just have to start on an even word, since the parameter
4627 save area is 16-byte aligned. */
4629 align = (2 - cum->words) & 3;
4631 align = cum->words & 1;
4632 align_words = cum->words + align;
4634 /* Out of registers? Memory, then. */
4635 if (align_words >= GP_ARG_NUM_REG)
4638 /* The vector value goes in GPRs. Only the part of the
4639 value in GPRs is reported here. */
4640 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4642 /* Fortunately, there are only two possibilities, the value
4643 is either wholly in GPRs or half in GPRs and half not. */
4647 && (TARGET_POWERPC64 || (align_words == GP_ARG_NUM_REG - 2)))
4648 return rs6000_mixed_function_arg (cum, part_mode, type, align_words);
4650 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4653 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4654 return rs6000_spe_function_arg (cum, mode, type);
4655 else if (abi == ABI_V4)
4657 if (TARGET_HARD_FLOAT && TARGET_FPRS
4658 && (mode == SFmode || mode == DFmode))
4660 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4661 return gen_rtx_REG (mode, cum->fregno);
4667 int n_words = rs6000_arg_size (mode, type);
4668 int gregno = cum->sysv_gregno;
4670 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4671 (r7,r8) or (r9,r10). As does any other 2 word item such
4672 as complex int due to a historical mistake. */
4674 gregno += (1 - gregno) & 1;
4676 /* Multi-reg args are not split between registers and stack. */
4677 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4678 return gen_rtx_REG (mode, gregno);
4685 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4686 int align_words = cum->words + (cum->words & align);
4688 if (USE_FP_FOR_ARG_P (cum, mode, type))
4693 enum machine_mode fmode = mode;
4695 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4697 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4699 /* Long double split over regs and memory. */
4700 if (fmode == TFmode)
4703 /* Currently, we only ever need one reg here because complex
4704 doubles are split. */
4705 if (cum->fregno != FP_ARG_MAX_REG - 1)
4708 fpr[1] = gen_rtx_REG (fmode, cum->fregno);
4710 /* Do we also need to pass this arg in the parameter save
4713 && (cum->nargs_prototype <= 0
4714 || (DEFAULT_ABI == ABI_AIX
4716 && align_words >= GP_ARG_NUM_REG)));
4718 if (!needs_psave && mode == fmode)
4721 if (TARGET_32BIT && TARGET_POWERPC64
4722 && mode == DFmode && cum->stdarg)
4723 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4725 /* Describe where this piece goes. */
4727 *r = gen_rtx_EXPR_LIST (VOIDmode, *r, const0_rtx);
4732 /* Now describe the part that goes in gprs or the stack.
4733 This piece must come first, before the fprs. */
4735 if (align_words < GP_ARG_NUM_REG)
4737 unsigned long n_words = rs6000_arg_size (mode, type);
4738 enum machine_mode rmode = mode;
4740 if (align_words + n_words > GP_ARG_NUM_REG)
4741 /* If this is partially on the stack, then we only
4742 include the portion actually in registers here.
4743 We know this can only be one register because
4744 complex doubles are splt. */
4746 reg = gen_rtx_REG (rmode, GP_ARG_MIN_REG + align_words);
4748 *--r = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
4752 return gen_rtx_PARALLEL (mode, gen_rtvec_v (n, r));
4754 else if (align_words < GP_ARG_NUM_REG)
4756 if (TARGET_32BIT && TARGET_POWERPC64
4757 && (mode == DImode || mode == BLKmode))
4758 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4760 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4767 /* For an arg passed partly in registers and partly in memory,
4768 this is the number of registers used.
4769 For args passed entirely in registers or entirely in memory, zero. */
4772 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4773 tree type, int named)
4777 if (DEFAULT_ABI == ABI_V4)
4780 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4781 && cum->nargs_prototype >= 0)
4784 if (USE_FP_FOR_ARG_P (cum, mode, type))
4786 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4787 ret = FP_ARG_MAX_REG - cum->fregno;
4788 else if (cum->nargs_prototype >= 0)
4792 if (cum->words < GP_ARG_NUM_REG
4793 && GP_ARG_NUM_REG < cum->words + rs6000_arg_size (mode, type))
4794 ret = GP_ARG_NUM_REG - cum->words;
4796 if (ret != 0 && TARGET_DEBUG_ARG)
4797 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4802 /* A C expression that indicates when an argument must be passed by
4803 reference. If nonzero for an argument, a copy of that argument is
4804 made in memory and a pointer to the argument is passed instead of
4805 the argument itself. The pointer is passed in whatever way is
4806 appropriate for passing a pointer to that type.
4808 Under V.4, aggregates and long double are passed by reference.
4810 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
4811 reference unless the AltiVec vector extension ABI is in force.
4813 As an extension to all ABIs, variable sized types are passed by
4817 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4818 enum machine_mode mode ATTRIBUTE_UNUSED,
4819 tree type, int named ATTRIBUTE_UNUSED)
4821 if ((DEFAULT_ABI == ABI_V4
4822 && ((type && AGGREGATE_TYPE_P (type))
4824 || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4825 || (type && int_size_in_bytes (type) < 0))
4827 if (TARGET_DEBUG_ARG)
4828 fprintf (stderr, "function_arg_pass_by_reference\n");
4836 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4839 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4844 for (i = 0; i < nregs; i++)
4846 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4847 if (reload_completed)
4849 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4852 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4853 i * GET_MODE_SIZE(reg_mode));
4856 tem = replace_equiv_address (tem, XEXP (tem, 0));
4858 if (tem == NULL_RTX)
4861 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4866 /* Perform any needed actions needed for a function that is receiving a
4867 variable number of arguments.
4871 MODE and TYPE are the mode and type of the current parameter.
4873 PRETEND_SIZE is a variable that should be set to the amount of stack
4874 that must be pushed by the prolog to pretend that our caller pushed
4877 Normally, this macro will push all remaining incoming registers on the
4878 stack and set PRETEND_SIZE to the length of the registers pushed. */
4881 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4882 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4884 CUMULATIVE_ARGS next_cum;
4885 int reg_size = TARGET_32BIT ? 4 : 8;
4886 rtx save_area = NULL_RTX, mem;
4887 int first_reg_offset, set;
4889 /* Skip the last named argument. */
4891 function_arg_advance (&next_cum, mode, type, 1);
4893 if (DEFAULT_ABI == ABI_V4)
4895 /* Indicate to allocate space on the stack for varargs save area. */
4896 cfun->machine->sysv_varargs_p = 1;
4898 save_area = plus_constant (virtual_stack_vars_rtx,
4899 - RS6000_VARARGS_SIZE);
4901 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4905 first_reg_offset = next_cum.words;
4906 save_area = virtual_incoming_args_rtx;
4907 cfun->machine->sysv_varargs_p = 0;
4909 if (MUST_PASS_IN_STACK (mode, type))
4910 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4913 set = get_varargs_alias_set ();
4914 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4916 mem = gen_rtx_MEM (BLKmode,
4917 plus_constant (save_area,
4918 first_reg_offset * reg_size)),
4919 set_mem_alias_set (mem, set);
4920 set_mem_align (mem, BITS_PER_WORD);
4922 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4923 GP_ARG_NUM_REG - first_reg_offset);
4926 /* Save FP registers if needed. */
4927 if (DEFAULT_ABI == ABI_V4
4928 && TARGET_HARD_FLOAT && TARGET_FPRS
4930 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4932 int fregno = next_cum.fregno;
4933 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4934 rtx lab = gen_label_rtx ();
4935 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4937 emit_jump_insn (gen_rtx_SET (VOIDmode,
4939 gen_rtx_IF_THEN_ELSE (VOIDmode,
4940 gen_rtx_NE (VOIDmode, cr1,
4942 gen_rtx_LABEL_REF (VOIDmode, lab),
4945 while (fregno <= FP_ARG_V4_MAX_REG)
4947 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4948 set_mem_alias_set (mem, set);
4949 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4958 /* Create the va_list data type. */
4961 rs6000_build_builtin_va_list (void)
4963 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4965 /* For AIX, prefer 'char *' because that's what the system
4966 header files like. */
4967 if (DEFAULT_ABI != ABI_V4)
4968 return build_pointer_type (char_type_node);
4970 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4971 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4973 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4974 unsigned_char_type_node);
4975 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4976 unsigned_char_type_node);
4977 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4979 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4980 short_unsigned_type_node);
4981 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4983 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4986 DECL_FIELD_CONTEXT (f_gpr) = record;
4987 DECL_FIELD_CONTEXT (f_fpr) = record;
4988 DECL_FIELD_CONTEXT (f_res) = record;
4989 DECL_FIELD_CONTEXT (f_ovf) = record;
4990 DECL_FIELD_CONTEXT (f_sav) = record;
4992 TREE_CHAIN (record) = type_decl;
4993 TYPE_NAME (record) = type_decl;
4994 TYPE_FIELDS (record) = f_gpr;
4995 TREE_CHAIN (f_gpr) = f_fpr;
4996 TREE_CHAIN (f_fpr) = f_res;
4997 TREE_CHAIN (f_res) = f_ovf;
4998 TREE_CHAIN (f_ovf) = f_sav;
5000 layout_type (record);
5002 /* The correct type is an array type of one element. */
5003 return build_array_type (record, build_index_type (size_zero_node));
5006 /* Implement va_start. */
5009 rs6000_va_start (tree valist, rtx nextarg)
5011 HOST_WIDE_INT words, n_gpr, n_fpr;
5012 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5013 tree gpr, fpr, ovf, sav, t;
5015 /* Only SVR4 needs something special. */
5016 if (DEFAULT_ABI != ABI_V4)
5018 std_expand_builtin_va_start (valist, nextarg);
5022 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5023 f_fpr = TREE_CHAIN (f_gpr);
5024 f_res = TREE_CHAIN (f_fpr);
5025 f_ovf = TREE_CHAIN (f_res);
5026 f_sav = TREE_CHAIN (f_ovf);
5028 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5029 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5030 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5031 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5032 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5034 /* Count number of gp and fp argument registers used. */
5035 words = current_function_args_info.words;
5036 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
5037 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
5039 if (TARGET_DEBUG_ARG)
5040 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5041 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5042 words, n_gpr, n_fpr);
5044 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5045 TREE_SIDE_EFFECTS (t) = 1;
5046 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5048 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5049 TREE_SIDE_EFFECTS (t) = 1;
5050 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5052 /* Find the overflow area. */
5053 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5055 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5056 build_int_2 (words * UNITS_PER_WORD, 0));
5057 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5058 TREE_SIDE_EFFECTS (t) = 1;
5059 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5061 /* Find the register save area. */
5062 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5063 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5064 build_int_2 (-RS6000_VARARGS_SIZE, -1));
5065 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5066 TREE_SIDE_EFFECTS (t) = 1;
5067 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5070 /* Implement va_arg. */
5073 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5075 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5076 tree gpr, fpr, ovf, sav, reg, t, u;
5077 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
5078 tree lab_false, lab_over, addr;
5080 tree ptrtype = build_pointer_type (type);
5082 if (DEFAULT_ABI != ABI_V4)
5084 /* Variable sized types are passed by reference, as are AltiVec
5085 vectors when 32-bit and not using the AltiVec ABI extension. */
5086 if (int_size_in_bytes (type) < 0
5088 && !TARGET_ALTIVEC_ABI
5089 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
5091 /* Args grow upward. */
5092 t = build2 (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
5093 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
5094 t = build1 (NOP_EXPR, build_pointer_type (ptrtype), t);
5095 t = build_fold_indirect_ref (t);
5096 return build_fold_indirect_ref (t);
5098 if (targetm.calls.split_complex_arg
5099 && TREE_CODE (type) == COMPLEX_TYPE)
5101 tree elem_type = TREE_TYPE (type);
5102 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5103 int elem_size = GET_MODE_SIZE (elem_mode);
5105 if (elem_size < UNITS_PER_WORD)
5107 tree real_part, imag_part;
5108 tree post = NULL_TREE;
5110 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5112 /* Copy the value into a temporary, lest the formal temporary
5113 be reused out from under us. */
5114 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5115 append_to_statement_list (post, pre_p);
5117 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5120 return build (COMPLEX_EXPR, type, real_part, imag_part);
5124 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5127 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5128 f_fpr = TREE_CHAIN (f_gpr);
5129 f_res = TREE_CHAIN (f_fpr);
5130 f_ovf = TREE_CHAIN (f_res);
5131 f_sav = TREE_CHAIN (f_ovf);
5133 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5134 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5135 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5136 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5137 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5139 size = int_size_in_bytes (type);
5140 rsize = (size + 3) / 4;
5143 if (AGGREGATE_TYPE_P (type)
5144 || TYPE_MODE (type) == TFmode
5145 || (!TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type))))
5147 /* Aggregates, long doubles, and AltiVec vectors are passed by
5157 else if (TARGET_HARD_FLOAT && TARGET_FPRS
5158 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5160 /* FP args go in FP registers, if present. */
5166 if (TYPE_MODE (type) == DFmode)
5171 /* Otherwise into GP registers. */
5181 /* Pull the value out of the saved registers.... */
5184 addr = create_tmp_var (ptr_type_node, "addr");
5185 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5187 /* AltiVec vectors never go in registers when -mabi=altivec. */
5188 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5192 lab_false = create_artificial_label ();
5193 lab_over = create_artificial_label ();
5195 /* Long long and SPE vectors are aligned in the registers.
5196 As are any other 2 gpr item such as complex int due to a
5197 historical mistake. */
5201 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5202 build_int_2 (n_reg - 1, 0));
5203 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5206 t = build_int_2 (8 - n_reg + 1, 0);
5207 TREE_TYPE (t) = TREE_TYPE (reg);
5208 t = build2 (GE_EXPR, boolean_type_node, u, t);
5209 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5210 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5211 gimplify_and_add (t, pre_p);
5215 t = build2 (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5217 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
5218 build_int_2 (n_reg, 0));
5219 u = build1 (CONVERT_EXPR, integer_type_node, u);
5220 u = build2 (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
5221 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5223 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5224 gimplify_and_add (t, pre_p);
5226 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5227 gimplify_and_add (t, pre_p);
5229 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5230 append_to_statement_list (t, pre_p);
5234 /* Ensure that we don't find any more args in regs.
5235 Alignment has taken care of the n_reg == 2 case. */
5236 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
5237 gimplify_and_add (t, pre_p);
5241 /* ... otherwise out of the overflow area. */
5243 /* Care for on-stack alignment if needed. */
5247 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (align - 1, 0));
5248 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align, -1));
5250 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5252 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5253 gimplify_and_add (u, pre_p);
5255 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5256 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5257 gimplify_and_add (t, pre_p);
5261 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5262 append_to_statement_list (t, pre_p);
5267 addr = fold_convert (build_pointer_type (ptrtype), addr);
5268 addr = build_fold_indirect_ref (addr);
5271 addr = fold_convert (ptrtype, addr);
5273 return build_fold_indirect_ref (addr);
5278 #define def_builtin(MASK, NAME, TYPE, CODE) \
5280 if ((MASK) & target_flags) \
5281 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5285 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5287 static const struct builtin_description bdesc_3arg[] =
5289 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5290 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5291 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5292 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5293 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5294 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5295 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5296 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5297 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5298 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5299 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5300 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5301 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5302 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5303 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5304 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5305 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5306 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5307 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5308 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5309 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5310 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5311 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5314 /* DST operations: void foo (void *, const int, const char). */
5316 static const struct builtin_description bdesc_dst[] =
5318 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5319 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5320 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5321 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5324 /* Simple binary operations: VECc = foo (VECa, VECb). */
5326 static struct builtin_description bdesc_2arg[] =
5328 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5329 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5330 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5331 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5332 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5333 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5334 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5335 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5336 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5337 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5338 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5339 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5340 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5341 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5342 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5343 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5344 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5345 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5346 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5347 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5348 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5349 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5350 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5351 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5352 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5353 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5354 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5355 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5356 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5357 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5358 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5359 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5360 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5361 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5362 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5363 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5364 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5365 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5366 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5367 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5368 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5369 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5370 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5371 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5372 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5373 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5374 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5375 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5376 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5377 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5378 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5379 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5380 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5381 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5382 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5383 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5384 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5385 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5386 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5387 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5388 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5389 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5390 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5391 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5392 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5393 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5394 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5395 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5396 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5397 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5398 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5399 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5400 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5401 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5402 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5403 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5404 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5405 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5406 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5407 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5408 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5409 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5410 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5411 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5412 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5413 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5414 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5415 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5416 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5417 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5418 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5419 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5420 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5421 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5422 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5423 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5424 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5425 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5426 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5427 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5428 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5429 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5430 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5431 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5432 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5433 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5434 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5435 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5436 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5437 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5438 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5439 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5440 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5442 /* Place holder, leave as first spe builtin. */
5443 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5444 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5445 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5446 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5447 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5448 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5449 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5450 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5451 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5452 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5453 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5454 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5455 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5456 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5457 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5458 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5459 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5460 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5461 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5462 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5463 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5464 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5465 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5466 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5467 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5468 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5469 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5470 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5471 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5472 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5473 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5474 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5475 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5476 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5477 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5478 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5479 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5480 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5481 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5482 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5483 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5484 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5485 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5486 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5487 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5488 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5489 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5490 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5491 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5492 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5493 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5494 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5495 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5496 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5497 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5498 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5499 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5500 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5501 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5502 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5503 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5504 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5505 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5506 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5507 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5508 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5509 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5510 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5511 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5512 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5513 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5514 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5515 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5516 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5517 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5518 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5519 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5520 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5521 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5522 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5523 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5524 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5525 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5526 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5527 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5528 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5529 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5530 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5531 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5532 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5533 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5534 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5535 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5536 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5537 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5538 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5539 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5540 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5541 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5542 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5543 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5544 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5545 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5546 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5547 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5548 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5549 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5550 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5551 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5553 /* SPE binary operations expecting a 5-bit unsigned literal. */
5554 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5556 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5557 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5558 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5559 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5560 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5561 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5562 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5563 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5564 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5565 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5566 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5567 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5568 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5569 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5570 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5571 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5572 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5573 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5574 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5575 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5576 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5577 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5578 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5579 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5580 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5581 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5583 /* Place-holder. Leave as last binary SPE builtin. */
5584 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5587 /* AltiVec predicates. */
5589 struct builtin_description_predicates
5591 const unsigned int mask;
5592 const enum insn_code icode;
5594 const char *const name;
5595 const enum rs6000_builtins code;
5598 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5600 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5601 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5602 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5603 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5604 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5605 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5606 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5607 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5608 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5609 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5610 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5611 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5612 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5615 /* SPE predicates. */
5616 static struct builtin_description bdesc_spe_predicates[] =
5618 /* Place-holder. Leave as first. */
5619 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5620 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5621 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5622 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5623 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5624 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5625 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5626 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5627 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5628 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5629 /* Place-holder. Leave as last. */
5630 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5633 /* SPE evsel predicates. */
5634 static struct builtin_description bdesc_spe_evsel[] =
5636 /* Place-holder. Leave as first. */
5637 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5638 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5639 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5640 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5641 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5642 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5643 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5644 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5645 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5646 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5647 /* Place-holder. Leave as last. */
5648 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5651 /* ABS* operations. */
5653 static const struct builtin_description bdesc_abs[] =
5655 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5656 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5657 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5658 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5659 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5660 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5661 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5664 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5667 static struct builtin_description bdesc_1arg[] =
5669 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5670 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5671 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5672 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5673 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5674 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5675 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5676 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5677 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5678 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5679 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5680 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5681 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5682 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5683 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5684 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5685 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5687 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5688 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5689 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5690 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5691 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5692 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5693 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5694 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5695 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5696 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5697 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5698 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5699 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5700 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5701 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5702 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5703 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5704 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5705 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5706 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5707 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5708 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5709 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5710 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5711 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5712 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5713 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5714 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5715 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5716 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5718 /* Place-holder. Leave as last unary SPE builtin. */
5719 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5723 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5726 tree arg0 = TREE_VALUE (arglist);
5727 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5728 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5729 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5731 if (icode == CODE_FOR_nothing)
5732 /* Builtin not supported on this processor. */
5735 /* If we got invalid arguments bail out before generating bad rtl. */
5736 if (arg0 == error_mark_node)
5739 if (icode == CODE_FOR_altivec_vspltisb
5740 || icode == CODE_FOR_altivec_vspltish
5741 || icode == CODE_FOR_altivec_vspltisw
5742 || icode == CODE_FOR_spe_evsplatfi
5743 || icode == CODE_FOR_spe_evsplati)
5745 /* Only allow 5-bit *signed* literals. */
5746 if (GET_CODE (op0) != CONST_INT
5747 || INTVAL (op0) > 0x1f
5748 || INTVAL (op0) < -0x1f)
5750 error ("argument 1 must be a 5-bit signed literal");
5756 || GET_MODE (target) != tmode
5757 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5758 target = gen_reg_rtx (tmode);
5760 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5761 op0 = copy_to_mode_reg (mode0, op0);
5763 pat = GEN_FCN (icode) (target, op0);
5772 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5774 rtx pat, scratch1, scratch2;
5775 tree arg0 = TREE_VALUE (arglist);
5776 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5777 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5778 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5780 /* If we have invalid arguments, bail out before generating bad rtl. */
5781 if (arg0 == error_mark_node)
5785 || GET_MODE (target) != tmode
5786 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5787 target = gen_reg_rtx (tmode);
5789 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5790 op0 = copy_to_mode_reg (mode0, op0);
5792 scratch1 = gen_reg_rtx (mode0);
5793 scratch2 = gen_reg_rtx (mode0);
5795 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5804 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5807 tree arg0 = TREE_VALUE (arglist);
5808 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5809 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5810 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5811 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5812 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5813 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5815 if (icode == CODE_FOR_nothing)
5816 /* Builtin not supported on this processor. */
5819 /* If we got invalid arguments bail out before generating bad rtl. */
5820 if (arg0 == error_mark_node || arg1 == error_mark_node)
5823 if (icode == CODE_FOR_altivec_vcfux
5824 || icode == CODE_FOR_altivec_vcfsx
5825 || icode == CODE_FOR_altivec_vctsxs
5826 || icode == CODE_FOR_altivec_vctuxs
5827 || icode == CODE_FOR_altivec_vspltb
5828 || icode == CODE_FOR_altivec_vsplth
5829 || icode == CODE_FOR_altivec_vspltw
5830 || icode == CODE_FOR_spe_evaddiw
5831 || icode == CODE_FOR_spe_evldd
5832 || icode == CODE_FOR_spe_evldh
5833 || icode == CODE_FOR_spe_evldw
5834 || icode == CODE_FOR_spe_evlhhesplat
5835 || icode == CODE_FOR_spe_evlhhossplat
5836 || icode == CODE_FOR_spe_evlhhousplat
5837 || icode == CODE_FOR_spe_evlwhe
5838 || icode == CODE_FOR_spe_evlwhos
5839 || icode == CODE_FOR_spe_evlwhou
5840 || icode == CODE_FOR_spe_evlwhsplat
5841 || icode == CODE_FOR_spe_evlwwsplat
5842 || icode == CODE_FOR_spe_evrlwi
5843 || icode == CODE_FOR_spe_evslwi
5844 || icode == CODE_FOR_spe_evsrwis
5845 || icode == CODE_FOR_spe_evsubifw
5846 || icode == CODE_FOR_spe_evsrwiu)
5848 /* Only allow 5-bit unsigned literals. */
5850 if (TREE_CODE (arg1) != INTEGER_CST
5851 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5853 error ("argument 2 must be a 5-bit unsigned literal");
5859 || GET_MODE (target) != tmode
5860 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5861 target = gen_reg_rtx (tmode);
5863 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5864 op0 = copy_to_mode_reg (mode0, op0);
5865 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5866 op1 = copy_to_mode_reg (mode1, op1);
5868 pat = GEN_FCN (icode) (target, op0, op1);
5877 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5878 tree arglist, rtx target)
5881 tree cr6_form = TREE_VALUE (arglist);
5882 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5883 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5884 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5885 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5886 enum machine_mode tmode = SImode;
5887 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5888 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5891 if (TREE_CODE (cr6_form) != INTEGER_CST)
5893 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5897 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5902 /* If we have invalid arguments, bail out before generating bad rtl. */
5903 if (arg0 == error_mark_node || arg1 == error_mark_node)
5907 || GET_MODE (target) != tmode
5908 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5909 target = gen_reg_rtx (tmode);
5911 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5912 op0 = copy_to_mode_reg (mode0, op0);
5913 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5914 op1 = copy_to_mode_reg (mode1, op1);
5916 scratch = gen_reg_rtx (mode0);
5918 pat = GEN_FCN (icode) (scratch, op0, op1,
5919 gen_rtx_SYMBOL_REF (Pmode, opcode));
5924 /* The vec_any* and vec_all* predicates use the same opcodes for two
5925 different operations, but the bits in CR6 will be different
5926 depending on what information we want. So we have to play tricks
5927 with CR6 to get the right bits out.
5929 If you think this is disgusting, look at the specs for the
5930 AltiVec predicates. */
5932 switch (cr6_form_int)
5935 emit_insn (gen_cr6_test_for_zero (target));
5938 emit_insn (gen_cr6_test_for_zero_reverse (target));
5941 emit_insn (gen_cr6_test_for_lt (target));
5944 emit_insn (gen_cr6_test_for_lt_reverse (target));
5947 error ("argument 1 of __builtin_altivec_predicate is out of range");
5955 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5958 tree arg0 = TREE_VALUE (arglist);
5959 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5960 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5961 enum machine_mode mode0 = Pmode;
5962 enum machine_mode mode1 = Pmode;
5963 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5964 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5966 if (icode == CODE_FOR_nothing)
5967 /* Builtin not supported on this processor. */
5970 /* If we got invalid arguments bail out before generating bad rtl. */
5971 if (arg0 == error_mark_node || arg1 == error_mark_node)
5975 || GET_MODE (target) != tmode
5976 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5977 target = gen_reg_rtx (tmode);
5979 op1 = copy_to_mode_reg (mode1, op1);
5981 if (op0 == const0_rtx)
5983 addr = gen_rtx_MEM (tmode, op1);
5987 op0 = copy_to_mode_reg (mode0, op0);
5988 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5991 pat = GEN_FCN (icode) (target, addr);
6001 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6003 tree arg0 = TREE_VALUE (arglist);
6004 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6005 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6006 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6007 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6008 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6010 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6011 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6012 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6014 /* Invalid arguments. Bail before doing anything stoopid! */
6015 if (arg0 == error_mark_node
6016 || arg1 == error_mark_node
6017 || arg2 == error_mark_node)
6020 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6021 op0 = copy_to_mode_reg (mode2, op0);
6022 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6023 op1 = copy_to_mode_reg (mode0, op1);
6024 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6025 op2 = copy_to_mode_reg (mode1, op2);
6027 pat = GEN_FCN (icode) (op1, op2, op0);
6034 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6036 tree arg0 = TREE_VALUE (arglist);
6037 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6038 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6039 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6040 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6041 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6043 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6044 enum machine_mode mode1 = Pmode;
6045 enum machine_mode mode2 = Pmode;
6047 /* Invalid arguments. Bail before doing anything stoopid! */
6048 if (arg0 == error_mark_node
6049 || arg1 == error_mark_node
6050 || arg2 == error_mark_node)
6053 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6054 op0 = copy_to_mode_reg (tmode, op0);
6056 op2 = copy_to_mode_reg (mode2, op2);
6058 if (op1 == const0_rtx)
6060 addr = gen_rtx_MEM (tmode, op2);
6064 op1 = copy_to_mode_reg (mode1, op1);
6065 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6068 pat = GEN_FCN (icode) (addr, op0);
6075 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6078 tree arg0 = TREE_VALUE (arglist);
6079 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6080 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6081 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6082 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6083 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6084 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6085 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6086 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6087 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6089 if (icode == CODE_FOR_nothing)
6090 /* Builtin not supported on this processor. */
6093 /* If we got invalid arguments bail out before generating bad rtl. */
6094 if (arg0 == error_mark_node
6095 || arg1 == error_mark_node
6096 || arg2 == error_mark_node)
6099 if (icode == CODE_FOR_altivec_vsldoi_4sf
6100 || icode == CODE_FOR_altivec_vsldoi_4si
6101 || icode == CODE_FOR_altivec_vsldoi_8hi
6102 || icode == CODE_FOR_altivec_vsldoi_16qi)
6104 /* Only allow 4-bit unsigned literals. */
6106 if (TREE_CODE (arg2) != INTEGER_CST
6107 || TREE_INT_CST_LOW (arg2) & ~0xf)
6109 error ("argument 3 must be a 4-bit unsigned literal");
6115 || GET_MODE (target) != tmode
6116 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6117 target = gen_reg_rtx (tmode);
6119 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6120 op0 = copy_to_mode_reg (mode0, op0);
6121 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6122 op1 = copy_to_mode_reg (mode1, op1);
6123 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6124 op2 = copy_to_mode_reg (mode2, op2);
6126 pat = GEN_FCN (icode) (target, op0, op1, op2);
6134 /* Expand the lvx builtins. */
6136 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6138 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6139 tree arglist = TREE_OPERAND (exp, 1);
6140 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6142 enum machine_mode tmode, mode0;
6144 enum insn_code icode;
6148 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6149 icode = CODE_FOR_altivec_lvx_16qi;
6151 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6152 icode = CODE_FOR_altivec_lvx_8hi;
6154 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6155 icode = CODE_FOR_altivec_lvx_4si;
6157 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6158 icode = CODE_FOR_altivec_lvx_4sf;
6167 arg0 = TREE_VALUE (arglist);
6168 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6169 tmode = insn_data[icode].operand[0].mode;
6170 mode0 = insn_data[icode].operand[1].mode;
6173 || GET_MODE (target) != tmode
6174 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6175 target = gen_reg_rtx (tmode);
6177 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6178 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6180 pat = GEN_FCN (icode) (target, op0);
6187 /* Expand the stvx builtins. */
6189 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6192 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6193 tree arglist = TREE_OPERAND (exp, 1);
6194 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6196 enum machine_mode mode0, mode1;
6198 enum insn_code icode;
6202 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6203 icode = CODE_FOR_altivec_stvx_16qi;
6205 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6206 icode = CODE_FOR_altivec_stvx_8hi;
6208 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6209 icode = CODE_FOR_altivec_stvx_4si;
6211 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6212 icode = CODE_FOR_altivec_stvx_4sf;
6219 arg0 = TREE_VALUE (arglist);
6220 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6221 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6222 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6223 mode0 = insn_data[icode].operand[0].mode;
6224 mode1 = insn_data[icode].operand[1].mode;
6226 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6227 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6228 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6229 op1 = copy_to_mode_reg (mode1, op1);
6231 pat = GEN_FCN (icode) (op0, op1);
6239 /* Expand the dst builtins. */
6241 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6244 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6245 tree arglist = TREE_OPERAND (exp, 1);
6246 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6247 tree arg0, arg1, arg2;
6248 enum machine_mode mode0, mode1, mode2;
6249 rtx pat, op0, op1, op2;
6250 struct builtin_description *d;
6255 /* Handle DST variants. */
6256 d = (struct builtin_description *) bdesc_dst;
6257 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6258 if (d->code == fcode)
6260 arg0 = TREE_VALUE (arglist);
6261 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6262 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6263 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6264 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6265 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6266 mode0 = insn_data[d->icode].operand[0].mode;
6267 mode1 = insn_data[d->icode].operand[1].mode;
6268 mode2 = insn_data[d->icode].operand[2].mode;
6270 /* Invalid arguments, bail out before generating bad rtl. */
6271 if (arg0 == error_mark_node
6272 || arg1 == error_mark_node
6273 || arg2 == error_mark_node)
6278 if (TREE_CODE (arg2) != INTEGER_CST
6279 || TREE_INT_CST_LOW (arg2) & ~0x3)
6281 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6285 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6286 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6287 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6288 op1 = copy_to_mode_reg (mode1, op1);
6290 pat = GEN_FCN (d->icode) (op0, op1, op2);
6300 /* Expand the builtin in EXP and store the result in TARGET. Store
6301 true in *EXPANDEDP if we found a builtin to expand. */
6303 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6305 struct builtin_description *d;
6306 struct builtin_description_predicates *dp;
6308 enum insn_code icode;
6309 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6310 tree arglist = TREE_OPERAND (exp, 1);
6313 enum machine_mode tmode, mode0;
6314 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6316 target = altivec_expand_ld_builtin (exp, target, expandedp);
6320 target = altivec_expand_st_builtin (exp, target, expandedp);
6324 target = altivec_expand_dst_builtin (exp, target, expandedp);
6332 case ALTIVEC_BUILTIN_STVX:
6333 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6334 case ALTIVEC_BUILTIN_STVEBX:
6335 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6336 case ALTIVEC_BUILTIN_STVEHX:
6337 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6338 case ALTIVEC_BUILTIN_STVEWX:
6339 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6340 case ALTIVEC_BUILTIN_STVXL:
6341 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6343 case ALTIVEC_BUILTIN_MFVSCR:
6344 icode = CODE_FOR_altivec_mfvscr;
6345 tmode = insn_data[icode].operand[0].mode;
6348 || GET_MODE (target) != tmode
6349 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6350 target = gen_reg_rtx (tmode);
6352 pat = GEN_FCN (icode) (target);
6358 case ALTIVEC_BUILTIN_MTVSCR:
6359 icode = CODE_FOR_altivec_mtvscr;
6360 arg0 = TREE_VALUE (arglist);
6361 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6362 mode0 = insn_data[icode].operand[0].mode;
6364 /* If we got invalid arguments bail out before generating bad rtl. */
6365 if (arg0 == error_mark_node)
6368 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6369 op0 = copy_to_mode_reg (mode0, op0);
6371 pat = GEN_FCN (icode) (op0);
6376 case ALTIVEC_BUILTIN_DSSALL:
6377 emit_insn (gen_altivec_dssall ());
6380 case ALTIVEC_BUILTIN_DSS:
6381 icode = CODE_FOR_altivec_dss;
6382 arg0 = TREE_VALUE (arglist);
6384 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6385 mode0 = insn_data[icode].operand[0].mode;
6387 /* If we got invalid arguments bail out before generating bad rtl. */
6388 if (arg0 == error_mark_node)
6391 if (TREE_CODE (arg0) != INTEGER_CST
6392 || TREE_INT_CST_LOW (arg0) & ~0x3)
6394 error ("argument to dss must be a 2-bit unsigned literal");
6398 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6399 op0 = copy_to_mode_reg (mode0, op0);
6401 emit_insn (gen_altivec_dss (op0));
6404 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6405 arg0 = TREE_VALUE (arglist);
6406 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6407 arg0 = TREE_OPERAND (arg0, 0);
6408 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6409 TREE_STRING_POINTER (arg0));
6414 /* Expand abs* operations. */
6415 d = (struct builtin_description *) bdesc_abs;
6416 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6417 if (d->code == fcode)
6418 return altivec_expand_abs_builtin (d->icode, arglist, target);
6420 /* Expand the AltiVec predicates. */
6421 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6422 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6423 if (dp->code == fcode)
6424 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6426 /* LV* are funky. We initialized them differently. */
6429 case ALTIVEC_BUILTIN_LVSL:
6430 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6432 case ALTIVEC_BUILTIN_LVSR:
6433 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6435 case ALTIVEC_BUILTIN_LVEBX:
6436 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6438 case ALTIVEC_BUILTIN_LVEHX:
6439 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6441 case ALTIVEC_BUILTIN_LVEWX:
6442 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6444 case ALTIVEC_BUILTIN_LVXL:
6445 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6447 case ALTIVEC_BUILTIN_LVX:
6448 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6459 /* Binops that need to be initialized manually, but can be expanded
6460 automagically by rs6000_expand_binop_builtin. */
6461 static struct builtin_description bdesc_2arg_spe[] =
6463 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6464 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6465 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6466 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6467 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6468 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6469 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6470 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6471 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6472 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6473 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6474 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6475 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6476 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6477 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6478 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6479 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6480 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6481 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6482 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6483 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6484 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6487 /* Expand the builtin in EXP and store the result in TARGET. Store
6488 true in *EXPANDEDP if we found a builtin to expand.
6490 This expands the SPE builtins that are not simple unary and binary
6493 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6495 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6496 tree arglist = TREE_OPERAND (exp, 1);
6498 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6499 enum insn_code icode;
6500 enum machine_mode tmode, mode0;
6502 struct builtin_description *d;
6507 /* Syntax check for a 5-bit unsigned immediate. */
6510 case SPE_BUILTIN_EVSTDD:
6511 case SPE_BUILTIN_EVSTDH:
6512 case SPE_BUILTIN_EVSTDW:
6513 case SPE_BUILTIN_EVSTWHE:
6514 case SPE_BUILTIN_EVSTWHO:
6515 case SPE_BUILTIN_EVSTWWE:
6516 case SPE_BUILTIN_EVSTWWO:
6517 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6518 if (TREE_CODE (arg1) != INTEGER_CST
6519 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6521 error ("argument 2 must be a 5-bit unsigned literal");
6529 /* The evsplat*i instructions are not quite generic. */
6532 case SPE_BUILTIN_EVSPLATFI:
6533 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6535 case SPE_BUILTIN_EVSPLATI:
6536 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6542 d = (struct builtin_description *) bdesc_2arg_spe;
6543 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6544 if (d->code == fcode)
6545 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6547 d = (struct builtin_description *) bdesc_spe_predicates;
6548 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6549 if (d->code == fcode)
6550 return spe_expand_predicate_builtin (d->icode, arglist, target);
6552 d = (struct builtin_description *) bdesc_spe_evsel;
6553 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6554 if (d->code == fcode)
6555 return spe_expand_evsel_builtin (d->icode, arglist, target);
6559 case SPE_BUILTIN_EVSTDDX:
6560 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6561 case SPE_BUILTIN_EVSTDHX:
6562 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6563 case SPE_BUILTIN_EVSTDWX:
6564 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6565 case SPE_BUILTIN_EVSTWHEX:
6566 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6567 case SPE_BUILTIN_EVSTWHOX:
6568 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6569 case SPE_BUILTIN_EVSTWWEX:
6570 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6571 case SPE_BUILTIN_EVSTWWOX:
6572 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6573 case SPE_BUILTIN_EVSTDD:
6574 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6575 case SPE_BUILTIN_EVSTDH:
6576 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6577 case SPE_BUILTIN_EVSTDW:
6578 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6579 case SPE_BUILTIN_EVSTWHE:
6580 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6581 case SPE_BUILTIN_EVSTWHO:
6582 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6583 case SPE_BUILTIN_EVSTWWE:
6584 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6585 case SPE_BUILTIN_EVSTWWO:
6586 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6587 case SPE_BUILTIN_MFSPEFSCR:
6588 icode = CODE_FOR_spe_mfspefscr;
6589 tmode = insn_data[icode].operand[0].mode;
6592 || GET_MODE (target) != tmode
6593 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6594 target = gen_reg_rtx (tmode);
6596 pat = GEN_FCN (icode) (target);
6601 case SPE_BUILTIN_MTSPEFSCR:
6602 icode = CODE_FOR_spe_mtspefscr;
6603 arg0 = TREE_VALUE (arglist);
6604 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6605 mode0 = insn_data[icode].operand[0].mode;
6607 if (arg0 == error_mark_node)
6610 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6611 op0 = copy_to_mode_reg (mode0, op0);
6613 pat = GEN_FCN (icode) (op0);
6626 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6628 rtx pat, scratch, tmp;
6629 tree form = TREE_VALUE (arglist);
6630 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6631 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6632 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6633 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6634 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6635 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6639 if (TREE_CODE (form) != INTEGER_CST)
6641 error ("argument 1 of __builtin_spe_predicate must be a constant");
6645 form_int = TREE_INT_CST_LOW (form);
6650 if (arg0 == error_mark_node || arg1 == error_mark_node)
6654 || GET_MODE (target) != SImode
6655 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6656 target = gen_reg_rtx (SImode);
6658 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6659 op0 = copy_to_mode_reg (mode0, op0);
6660 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6661 op1 = copy_to_mode_reg (mode1, op1);
6663 scratch = gen_reg_rtx (CCmode);
6665 pat = GEN_FCN (icode) (scratch, op0, op1);
6670 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6671 _lower_. We use one compare, but look in different bits of the
6672 CR for each variant.
6674 There are 2 elements in each SPE simd type (upper/lower). The CR
6675 bits are set as follows:
6677 BIT0 | BIT 1 | BIT 2 | BIT 3
6678 U | L | (U | L) | (U & L)
6680 So, for an "all" relationship, BIT 3 would be set.
6681 For an "any" relationship, BIT 2 would be set. Etc.
6683 Following traditional nomenclature, these bits map to:
6685 BIT0 | BIT 1 | BIT 2 | BIT 3
6688 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6693 /* All variant. OV bit. */
6695 /* We need to get to the OV bit, which is the ORDERED bit. We
6696 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6697 that's ugly and will trigger a validate_condition_mode abort.
6698 So let's just use another pattern. */
6699 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6701 /* Any variant. EQ bit. */
6705 /* Upper variant. LT bit. */
6709 /* Lower variant. GT bit. */
6714 error ("argument 1 of __builtin_spe_predicate is out of range");
6718 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6719 emit_move_insn (target, tmp);
6724 /* The evsel builtins look like this:
6726 e = __builtin_spe_evsel_OP (a, b, c, d);
6730 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6731 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6735 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6738 tree arg0 = TREE_VALUE (arglist);
6739 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6740 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6741 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6742 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6743 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6744 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6745 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6746 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6747 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6752 if (arg0 == error_mark_node || arg1 == error_mark_node
6753 || arg2 == error_mark_node || arg3 == error_mark_node)
6757 || GET_MODE (target) != mode0
6758 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6759 target = gen_reg_rtx (mode0);
6761 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6762 op0 = copy_to_mode_reg (mode0, op0);
6763 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6764 op1 = copy_to_mode_reg (mode0, op1);
6765 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6766 op2 = copy_to_mode_reg (mode0, op2);
6767 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6768 op3 = copy_to_mode_reg (mode0, op3);
6770 /* Generate the compare. */
6771 scratch = gen_reg_rtx (CCmode);
6772 pat = GEN_FCN (icode) (scratch, op0, op1);
6777 if (mode0 == V2SImode)
6778 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6780 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6785 /* Expand an expression EXP that calls a built-in function,
6786 with result going to TARGET if that's convenient
6787 (and in mode MODE if that's convenient).
6788 SUBTARGET may be used as the target for computing one of EXP's operands.
6789 IGNORE is nonzero if the value is to be ignored. */
6792 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6793 enum machine_mode mode ATTRIBUTE_UNUSED,
6794 int ignore ATTRIBUTE_UNUSED)
6796 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6797 tree arglist = TREE_OPERAND (exp, 1);
6798 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6799 struct builtin_description *d;
6806 ret = altivec_expand_builtin (exp, target, &success);
6813 ret = spe_expand_builtin (exp, target, &success);
6819 if (TARGET_ALTIVEC || TARGET_SPE)
6821 /* Handle simple unary operations. */
6822 d = (struct builtin_description *) bdesc_1arg;
6823 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6824 if (d->code == fcode)
6825 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6827 /* Handle simple binary operations. */
6828 d = (struct builtin_description *) bdesc_2arg;
6829 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6830 if (d->code == fcode)
6831 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6833 /* Handle simple ternary operations. */
6834 d = (struct builtin_description *) bdesc_3arg;
6835 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6836 if (d->code == fcode)
6837 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6845 build_opaque_vector_type (tree node, int nunits)
6847 node = copy_node (node);
6848 TYPE_MAIN_VARIANT (node) = node;
6849 return build_vector_type (node, nunits);
6853 rs6000_init_builtins (void)
6855 V2SI_type_node = build_vector_type (intSI_type_node, 2);
6856 V2SF_type_node = build_vector_type (float_type_node, 2);
6857 V4HI_type_node = build_vector_type (intHI_type_node, 4);
6858 V4SI_type_node = build_vector_type (intSI_type_node, 4);
6859 V4SF_type_node = build_vector_type (float_type_node, 4);
6860 V8HI_type_node = build_vector_type (intHI_type_node, 8);
6861 V16QI_type_node = build_vector_type (intQI_type_node, 16);
6863 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
6864 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
6865 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
6867 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
6868 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
6869 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6871 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
6872 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
6873 'vector unsigned short'. */
6875 bool_char_type_node = copy_node (unsigned_intQI_type_node);
6876 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
6877 bool_short_type_node = copy_node (unsigned_intHI_type_node);
6878 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
6879 bool_int_type_node = copy_node (unsigned_intSI_type_node);
6880 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
6881 pixel_type_node = copy_node (unsigned_intHI_type_node);
6882 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
6884 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6885 get_identifier ("__bool char"),
6886 bool_char_type_node));
6887 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6888 get_identifier ("__bool short"),
6889 bool_short_type_node));
6890 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6891 get_identifier ("__bool int"),
6892 bool_int_type_node));
6893 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6894 get_identifier ("__pixel"),
6897 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
6898 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
6899 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
6900 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
6902 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6903 get_identifier ("__vector unsigned char"),
6904 unsigned_V16QI_type_node));
6905 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6906 get_identifier ("__vector signed char"),
6908 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6909 get_identifier ("__vector __bool char"),
6910 bool_V16QI_type_node));
6912 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6913 get_identifier ("__vector unsigned short"),
6914 unsigned_V8HI_type_node));
6915 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6916 get_identifier ("__vector signed short"),
6918 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6919 get_identifier ("__vector __bool short"),
6920 bool_V8HI_type_node));
6922 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6923 get_identifier ("__vector unsigned int"),
6924 unsigned_V4SI_type_node));
6925 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6926 get_identifier ("__vector signed int"),
6928 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6929 get_identifier ("__vector __bool int"),
6930 bool_V4SI_type_node));
6932 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6933 get_identifier ("__vector float"),
6935 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6936 get_identifier ("__vector __pixel"),
6937 pixel_V8HI_type_node));
6940 spe_init_builtins ();
6942 altivec_init_builtins ();
6943 if (TARGET_ALTIVEC || TARGET_SPE)
6944 rs6000_common_init_builtins ();
6947 /* Search through a set of builtins and enable the mask bits.
6948 DESC is an array of builtins.
6949 SIZE is the total number of builtins.
6950 START is the builtin enum at which to start.
6951 END is the builtin enum at which to end. */
6953 enable_mask_for_builtins (struct builtin_description *desc, int size,
6954 enum rs6000_builtins start,
6955 enum rs6000_builtins end)
6959 for (i = 0; i < size; ++i)
6960 if (desc[i].code == start)
6966 for (; i < size; ++i)
6968 /* Flip all the bits on. */
6969 desc[i].mask = target_flags;
6970 if (desc[i].code == end)
6976 spe_init_builtins (void)
6978 tree endlink = void_list_node;
6979 tree puint_type_node = build_pointer_type (unsigned_type_node);
6980 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6981 struct builtin_description *d;
6984 tree v2si_ftype_4_v2si
6985 = build_function_type
6986 (opaque_V2SI_type_node,
6987 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6988 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6989 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6990 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6993 tree v2sf_ftype_4_v2sf
6994 = build_function_type
6995 (opaque_V2SF_type_node,
6996 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6997 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6998 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6999 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7002 tree int_ftype_int_v2si_v2si
7003 = build_function_type
7005 tree_cons (NULL_TREE, integer_type_node,
7006 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7007 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7010 tree int_ftype_int_v2sf_v2sf
7011 = build_function_type
7013 tree_cons (NULL_TREE, integer_type_node,
7014 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7015 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7018 tree void_ftype_v2si_puint_int
7019 = build_function_type (void_type_node,
7020 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7021 tree_cons (NULL_TREE, puint_type_node,
7022 tree_cons (NULL_TREE,
7026 tree void_ftype_v2si_puint_char
7027 = build_function_type (void_type_node,
7028 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7029 tree_cons (NULL_TREE, puint_type_node,
7030 tree_cons (NULL_TREE,
7034 tree void_ftype_v2si_pv2si_int
7035 = build_function_type (void_type_node,
7036 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7037 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7038 tree_cons (NULL_TREE,
7042 tree void_ftype_v2si_pv2si_char
7043 = build_function_type (void_type_node,
7044 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7045 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7046 tree_cons (NULL_TREE,
7051 = build_function_type (void_type_node,
7052 tree_cons (NULL_TREE, integer_type_node, endlink));
7055 = build_function_type (integer_type_node, endlink);
7057 tree v2si_ftype_pv2si_int
7058 = build_function_type (opaque_V2SI_type_node,
7059 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7060 tree_cons (NULL_TREE, integer_type_node,
7063 tree v2si_ftype_puint_int
7064 = build_function_type (opaque_V2SI_type_node,
7065 tree_cons (NULL_TREE, puint_type_node,
7066 tree_cons (NULL_TREE, integer_type_node,
7069 tree v2si_ftype_pushort_int
7070 = build_function_type (opaque_V2SI_type_node,
7071 tree_cons (NULL_TREE, pushort_type_node,
7072 tree_cons (NULL_TREE, integer_type_node,
7075 tree v2si_ftype_signed_char
7076 = build_function_type (opaque_V2SI_type_node,
7077 tree_cons (NULL_TREE, signed_char_type_node,
7080 /* The initialization of the simple binary and unary builtins is
7081 done in rs6000_common_init_builtins, but we have to enable the
7082 mask bits here manually because we have run out of `target_flags'
7083 bits. We really need to redesign this mask business. */
7085 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7086 ARRAY_SIZE (bdesc_2arg),
7089 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7090 ARRAY_SIZE (bdesc_1arg),
7092 SPE_BUILTIN_EVSUBFUSIAAW);
7093 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7094 ARRAY_SIZE (bdesc_spe_predicates),
7095 SPE_BUILTIN_EVCMPEQ,
7096 SPE_BUILTIN_EVFSTSTLT);
7097 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7098 ARRAY_SIZE (bdesc_spe_evsel),
7099 SPE_BUILTIN_EVSEL_CMPGTS,
7100 SPE_BUILTIN_EVSEL_FSTSTEQ);
7102 (*lang_hooks.decls.pushdecl)
7103 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7104 opaque_V2SI_type_node));
7106 /* Initialize irregular SPE builtins. */
7108 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
7109 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
7110 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
7111 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
7112 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
7113 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
7114 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
7115 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
7116 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
7117 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
7118 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
7119 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
7120 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
7121 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
7122 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
7123 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
7124 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
7125 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
7128 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
7129 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
7130 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
7131 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
7132 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
7133 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
7134 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
7135 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
7136 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
7137 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
7138 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
7139 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
7140 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
7141 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
7142 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
7143 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
7144 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
7145 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
7146 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
7147 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
7148 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
7149 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
7152 d = (struct builtin_description *) bdesc_spe_predicates;
7153 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
7157 switch (insn_data[d->icode].operand[1].mode)
7160 type = int_ftype_int_v2si_v2si;
7163 type = int_ftype_int_v2sf_v2sf;
7169 def_builtin (d->mask, d->name, type, d->code);
7172 /* Evsel predicates. */
7173 d = (struct builtin_description *) bdesc_spe_evsel;
7174 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7178 switch (insn_data[d->icode].operand[1].mode)
7181 type = v2si_ftype_4_v2si;
7184 type = v2sf_ftype_4_v2sf;
7190 def_builtin (d->mask, d->name, type, d->code);
7195 altivec_init_builtins (void)
7197 struct builtin_description *d;
7198 struct builtin_description_predicates *dp;
7200 tree pfloat_type_node = build_pointer_type (float_type_node);
7201 tree pint_type_node = build_pointer_type (integer_type_node);
7202 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7203 tree pchar_type_node = build_pointer_type (char_type_node);
7205 tree pvoid_type_node = build_pointer_type (void_type_node);
7207 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7208 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7209 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7210 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7212 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7214 tree int_ftype_int_v4si_v4si
7215 = build_function_type_list (integer_type_node,
7216 integer_type_node, V4SI_type_node,
7217 V4SI_type_node, NULL_TREE);
7218 tree v4sf_ftype_pcfloat
7219 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7220 tree void_ftype_pfloat_v4sf
7221 = build_function_type_list (void_type_node,
7222 pfloat_type_node, V4SF_type_node, NULL_TREE);
7223 tree v4si_ftype_pcint
7224 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7225 tree void_ftype_pint_v4si
7226 = build_function_type_list (void_type_node,
7227 pint_type_node, V4SI_type_node, NULL_TREE);
7228 tree v8hi_ftype_pcshort
7229 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7230 tree void_ftype_pshort_v8hi
7231 = build_function_type_list (void_type_node,
7232 pshort_type_node, V8HI_type_node, NULL_TREE);
7233 tree v16qi_ftype_pcchar
7234 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7235 tree void_ftype_pchar_v16qi
7236 = build_function_type_list (void_type_node,
7237 pchar_type_node, V16QI_type_node, NULL_TREE);
7238 tree void_ftype_v4si
7239 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7240 tree v8hi_ftype_void
7241 = build_function_type (V8HI_type_node, void_list_node);
7242 tree void_ftype_void
7243 = build_function_type (void_type_node, void_list_node);
7245 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
7247 tree v16qi_ftype_long_pcvoid
7248 = build_function_type_list (V16QI_type_node,
7249 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7250 tree v8hi_ftype_long_pcvoid
7251 = build_function_type_list (V8HI_type_node,
7252 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7253 tree v4si_ftype_long_pcvoid
7254 = build_function_type_list (V4SI_type_node,
7255 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7257 tree void_ftype_v4si_long_pvoid
7258 = build_function_type_list (void_type_node,
7259 V4SI_type_node, long_integer_type_node,
7260 pvoid_type_node, NULL_TREE);
7261 tree void_ftype_v16qi_long_pvoid
7262 = build_function_type_list (void_type_node,
7263 V16QI_type_node, long_integer_type_node,
7264 pvoid_type_node, NULL_TREE);
7265 tree void_ftype_v8hi_long_pvoid
7266 = build_function_type_list (void_type_node,
7267 V8HI_type_node, long_integer_type_node,
7268 pvoid_type_node, NULL_TREE);
7269 tree int_ftype_int_v8hi_v8hi
7270 = build_function_type_list (integer_type_node,
7271 integer_type_node, V8HI_type_node,
7272 V8HI_type_node, NULL_TREE);
7273 tree int_ftype_int_v16qi_v16qi
7274 = build_function_type_list (integer_type_node,
7275 integer_type_node, V16QI_type_node,
7276 V16QI_type_node, NULL_TREE);
7277 tree int_ftype_int_v4sf_v4sf
7278 = build_function_type_list (integer_type_node,
7279 integer_type_node, V4SF_type_node,
7280 V4SF_type_node, NULL_TREE);
7281 tree v4si_ftype_v4si
7282 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7283 tree v8hi_ftype_v8hi
7284 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7285 tree v16qi_ftype_v16qi
7286 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7287 tree v4sf_ftype_v4sf
7288 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7289 tree void_ftype_pcvoid_int_int
7290 = build_function_type_list (void_type_node,
7291 pcvoid_type_node, integer_type_node,
7292 integer_type_node, NULL_TREE);
7293 tree int_ftype_pcchar
7294 = build_function_type_list (integer_type_node,
7295 pcchar_type_node, NULL_TREE);
7297 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7298 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7299 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7300 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7301 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7302 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7303 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7304 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7305 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7306 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7307 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7308 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7309 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7310 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7311 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7312 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7313 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7314 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7315 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7316 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
7317 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7318 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7319 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7320 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7321 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7322 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7323 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7324 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7325 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7326 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7327 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7328 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7330 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7331 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7332 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7334 /* Add the DST variants. */
7335 d = (struct builtin_description *) bdesc_dst;
7336 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7337 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7339 /* Initialize the predicates. */
7340 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7341 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7343 enum machine_mode mode1;
7346 mode1 = insn_data[dp->icode].operand[1].mode;
7351 type = int_ftype_int_v4si_v4si;
7354 type = int_ftype_int_v8hi_v8hi;
7357 type = int_ftype_int_v16qi_v16qi;
7360 type = int_ftype_int_v4sf_v4sf;
7366 def_builtin (dp->mask, dp->name, type, dp->code);
7369 /* Initialize the abs* operators. */
7370 d = (struct builtin_description *) bdesc_abs;
7371 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7373 enum machine_mode mode0;
7376 mode0 = insn_data[d->icode].operand[0].mode;
7381 type = v4si_ftype_v4si;
7384 type = v8hi_ftype_v8hi;
7387 type = v16qi_ftype_v16qi;
7390 type = v4sf_ftype_v4sf;
7396 def_builtin (d->mask, d->name, type, d->code);
7401 rs6000_common_init_builtins (void)
7403 struct builtin_description *d;
7406 tree v4sf_ftype_v4sf_v4sf_v16qi
7407 = build_function_type_list (V4SF_type_node,
7408 V4SF_type_node, V4SF_type_node,
7409 V16QI_type_node, NULL_TREE);
7410 tree v4si_ftype_v4si_v4si_v16qi
7411 = build_function_type_list (V4SI_type_node,
7412 V4SI_type_node, V4SI_type_node,
7413 V16QI_type_node, NULL_TREE);
7414 tree v8hi_ftype_v8hi_v8hi_v16qi
7415 = build_function_type_list (V8HI_type_node,
7416 V8HI_type_node, V8HI_type_node,
7417 V16QI_type_node, NULL_TREE);
7418 tree v16qi_ftype_v16qi_v16qi_v16qi
7419 = build_function_type_list (V16QI_type_node,
7420 V16QI_type_node, V16QI_type_node,
7421 V16QI_type_node, NULL_TREE);
7423 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7425 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7426 tree v16qi_ftype_int
7427 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7428 tree v8hi_ftype_v16qi
7429 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7430 tree v4sf_ftype_v4sf
7431 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7433 tree v2si_ftype_v2si_v2si
7434 = build_function_type_list (opaque_V2SI_type_node,
7435 opaque_V2SI_type_node,
7436 opaque_V2SI_type_node, NULL_TREE);
7438 tree v2sf_ftype_v2sf_v2sf
7439 = build_function_type_list (opaque_V2SF_type_node,
7440 opaque_V2SF_type_node,
7441 opaque_V2SF_type_node, NULL_TREE);
7443 tree v2si_ftype_int_int
7444 = build_function_type_list (opaque_V2SI_type_node,
7445 integer_type_node, integer_type_node,
7448 tree v2si_ftype_v2si
7449 = build_function_type_list (opaque_V2SI_type_node,
7450 opaque_V2SI_type_node, NULL_TREE);
7452 tree v2sf_ftype_v2sf
7453 = build_function_type_list (opaque_V2SF_type_node,
7454 opaque_V2SF_type_node, NULL_TREE);
7456 tree v2sf_ftype_v2si
7457 = build_function_type_list (opaque_V2SF_type_node,
7458 opaque_V2SI_type_node, NULL_TREE);
7460 tree v2si_ftype_v2sf
7461 = build_function_type_list (opaque_V2SI_type_node,
7462 opaque_V2SF_type_node, NULL_TREE);
7464 tree v2si_ftype_v2si_char
7465 = build_function_type_list (opaque_V2SI_type_node,
7466 opaque_V2SI_type_node,
7467 char_type_node, NULL_TREE);
7469 tree v2si_ftype_int_char
7470 = build_function_type_list (opaque_V2SI_type_node,
7471 integer_type_node, char_type_node, NULL_TREE);
7473 tree v2si_ftype_char
7474 = build_function_type_list (opaque_V2SI_type_node,
7475 char_type_node, NULL_TREE);
7477 tree int_ftype_int_int
7478 = build_function_type_list (integer_type_node,
7479 integer_type_node, integer_type_node,
7482 tree v4si_ftype_v4si_v4si
7483 = build_function_type_list (V4SI_type_node,
7484 V4SI_type_node, V4SI_type_node, NULL_TREE);
7485 tree v4sf_ftype_v4si_int
7486 = build_function_type_list (V4SF_type_node,
7487 V4SI_type_node, integer_type_node, NULL_TREE);
7488 tree v4si_ftype_v4sf_int
7489 = build_function_type_list (V4SI_type_node,
7490 V4SF_type_node, integer_type_node, NULL_TREE);
7491 tree v4si_ftype_v4si_int
7492 = build_function_type_list (V4SI_type_node,
7493 V4SI_type_node, integer_type_node, NULL_TREE);
7494 tree v8hi_ftype_v8hi_int
7495 = build_function_type_list (V8HI_type_node,
7496 V8HI_type_node, integer_type_node, NULL_TREE);
7497 tree v16qi_ftype_v16qi_int
7498 = build_function_type_list (V16QI_type_node,
7499 V16QI_type_node, integer_type_node, NULL_TREE);
7500 tree v16qi_ftype_v16qi_v16qi_int
7501 = build_function_type_list (V16QI_type_node,
7502 V16QI_type_node, V16QI_type_node,
7503 integer_type_node, NULL_TREE);
7504 tree v8hi_ftype_v8hi_v8hi_int
7505 = build_function_type_list (V8HI_type_node,
7506 V8HI_type_node, V8HI_type_node,
7507 integer_type_node, NULL_TREE);
7508 tree v4si_ftype_v4si_v4si_int
7509 = build_function_type_list (V4SI_type_node,
7510 V4SI_type_node, V4SI_type_node,
7511 integer_type_node, NULL_TREE);
7512 tree v4sf_ftype_v4sf_v4sf_int
7513 = build_function_type_list (V4SF_type_node,
7514 V4SF_type_node, V4SF_type_node,
7515 integer_type_node, NULL_TREE);
7516 tree v4sf_ftype_v4sf_v4sf
7517 = build_function_type_list (V4SF_type_node,
7518 V4SF_type_node, V4SF_type_node, NULL_TREE);
7519 tree v4sf_ftype_v4sf_v4sf_v4si
7520 = build_function_type_list (V4SF_type_node,
7521 V4SF_type_node, V4SF_type_node,
7522 V4SI_type_node, NULL_TREE);
7523 tree v4sf_ftype_v4sf_v4sf_v4sf
7524 = build_function_type_list (V4SF_type_node,
7525 V4SF_type_node, V4SF_type_node,
7526 V4SF_type_node, NULL_TREE);
7527 tree v4si_ftype_v4si_v4si_v4si
7528 = build_function_type_list (V4SI_type_node,
7529 V4SI_type_node, V4SI_type_node,
7530 V4SI_type_node, NULL_TREE);
7531 tree v8hi_ftype_v8hi_v8hi
7532 = build_function_type_list (V8HI_type_node,
7533 V8HI_type_node, V8HI_type_node, NULL_TREE);
7534 tree v8hi_ftype_v8hi_v8hi_v8hi
7535 = build_function_type_list (V8HI_type_node,
7536 V8HI_type_node, V8HI_type_node,
7537 V8HI_type_node, NULL_TREE);
7538 tree v4si_ftype_v8hi_v8hi_v4si
7539 = build_function_type_list (V4SI_type_node,
7540 V8HI_type_node, V8HI_type_node,
7541 V4SI_type_node, NULL_TREE);
7542 tree v4si_ftype_v16qi_v16qi_v4si
7543 = build_function_type_list (V4SI_type_node,
7544 V16QI_type_node, V16QI_type_node,
7545 V4SI_type_node, NULL_TREE);
7546 tree v16qi_ftype_v16qi_v16qi
7547 = build_function_type_list (V16QI_type_node,
7548 V16QI_type_node, V16QI_type_node, NULL_TREE);
7549 tree v4si_ftype_v4sf_v4sf
7550 = build_function_type_list (V4SI_type_node,
7551 V4SF_type_node, V4SF_type_node, NULL_TREE);
7552 tree v8hi_ftype_v16qi_v16qi
7553 = build_function_type_list (V8HI_type_node,
7554 V16QI_type_node, V16QI_type_node, NULL_TREE);
7555 tree v4si_ftype_v8hi_v8hi
7556 = build_function_type_list (V4SI_type_node,
7557 V8HI_type_node, V8HI_type_node, NULL_TREE);
7558 tree v8hi_ftype_v4si_v4si
7559 = build_function_type_list (V8HI_type_node,
7560 V4SI_type_node, V4SI_type_node, NULL_TREE);
7561 tree v16qi_ftype_v8hi_v8hi
7562 = build_function_type_list (V16QI_type_node,
7563 V8HI_type_node, V8HI_type_node, NULL_TREE);
7564 tree v4si_ftype_v16qi_v4si
7565 = build_function_type_list (V4SI_type_node,
7566 V16QI_type_node, V4SI_type_node, NULL_TREE);
7567 tree v4si_ftype_v16qi_v16qi
7568 = build_function_type_list (V4SI_type_node,
7569 V16QI_type_node, V16QI_type_node, NULL_TREE);
7570 tree v4si_ftype_v8hi_v4si
7571 = build_function_type_list (V4SI_type_node,
7572 V8HI_type_node, V4SI_type_node, NULL_TREE);
7573 tree v4si_ftype_v8hi
7574 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7575 tree int_ftype_v4si_v4si
7576 = build_function_type_list (integer_type_node,
7577 V4SI_type_node, V4SI_type_node, NULL_TREE);
7578 tree int_ftype_v4sf_v4sf
7579 = build_function_type_list (integer_type_node,
7580 V4SF_type_node, V4SF_type_node, NULL_TREE);
7581 tree int_ftype_v16qi_v16qi
7582 = build_function_type_list (integer_type_node,
7583 V16QI_type_node, V16QI_type_node, NULL_TREE);
7584 tree int_ftype_v8hi_v8hi
7585 = build_function_type_list (integer_type_node,
7586 V8HI_type_node, V8HI_type_node, NULL_TREE);
7588 /* Add the simple ternary operators. */
7589 d = (struct builtin_description *) bdesc_3arg;
7590 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7593 enum machine_mode mode0, mode1, mode2, mode3;
7596 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7599 mode0 = insn_data[d->icode].operand[0].mode;
7600 mode1 = insn_data[d->icode].operand[1].mode;
7601 mode2 = insn_data[d->icode].operand[2].mode;
7602 mode3 = insn_data[d->icode].operand[3].mode;
7604 /* When all four are of the same mode. */
7605 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7610 type = v4si_ftype_v4si_v4si_v4si;
7613 type = v4sf_ftype_v4sf_v4sf_v4sf;
7616 type = v8hi_ftype_v8hi_v8hi_v8hi;
7619 type = v16qi_ftype_v16qi_v16qi_v16qi;
7625 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7630 type = v4si_ftype_v4si_v4si_v16qi;
7633 type = v4sf_ftype_v4sf_v4sf_v16qi;
7636 type = v8hi_ftype_v8hi_v8hi_v16qi;
7639 type = v16qi_ftype_v16qi_v16qi_v16qi;
7645 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7646 && mode3 == V4SImode)
7647 type = v4si_ftype_v16qi_v16qi_v4si;
7648 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7649 && mode3 == V4SImode)
7650 type = v4si_ftype_v8hi_v8hi_v4si;
7651 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7652 && mode3 == V4SImode)
7653 type = v4sf_ftype_v4sf_v4sf_v4si;
7655 /* vchar, vchar, vchar, 4 bit literal. */
7656 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7658 type = v16qi_ftype_v16qi_v16qi_int;
7660 /* vshort, vshort, vshort, 4 bit literal. */
7661 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7663 type = v8hi_ftype_v8hi_v8hi_int;
7665 /* vint, vint, vint, 4 bit literal. */
7666 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7668 type = v4si_ftype_v4si_v4si_int;
7670 /* vfloat, vfloat, vfloat, 4 bit literal. */
7671 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7673 type = v4sf_ftype_v4sf_v4sf_int;
7678 def_builtin (d->mask, d->name, type, d->code);
7681 /* Add the simple binary operators. */
7682 d = (struct builtin_description *) bdesc_2arg;
7683 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7685 enum machine_mode mode0, mode1, mode2;
7688 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7691 mode0 = insn_data[d->icode].operand[0].mode;
7692 mode1 = insn_data[d->icode].operand[1].mode;
7693 mode2 = insn_data[d->icode].operand[2].mode;
7695 /* When all three operands are of the same mode. */
7696 if (mode0 == mode1 && mode1 == mode2)
7701 type = v4sf_ftype_v4sf_v4sf;
7704 type = v4si_ftype_v4si_v4si;
7707 type = v16qi_ftype_v16qi_v16qi;
7710 type = v8hi_ftype_v8hi_v8hi;
7713 type = v2si_ftype_v2si_v2si;
7716 type = v2sf_ftype_v2sf_v2sf;
7719 type = int_ftype_int_int;
7726 /* A few other combos we really don't want to do manually. */
7728 /* vint, vfloat, vfloat. */
7729 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7730 type = v4si_ftype_v4sf_v4sf;
7732 /* vshort, vchar, vchar. */
7733 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7734 type = v8hi_ftype_v16qi_v16qi;
7736 /* vint, vshort, vshort. */
7737 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7738 type = v4si_ftype_v8hi_v8hi;
7740 /* vshort, vint, vint. */
7741 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7742 type = v8hi_ftype_v4si_v4si;
7744 /* vchar, vshort, vshort. */
7745 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7746 type = v16qi_ftype_v8hi_v8hi;
7748 /* vint, vchar, vint. */
7749 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7750 type = v4si_ftype_v16qi_v4si;
7752 /* vint, vchar, vchar. */
7753 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7754 type = v4si_ftype_v16qi_v16qi;
7756 /* vint, vshort, vint. */
7757 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7758 type = v4si_ftype_v8hi_v4si;
7760 /* vint, vint, 5 bit literal. */
7761 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7762 type = v4si_ftype_v4si_int;
7764 /* vshort, vshort, 5 bit literal. */
7765 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7766 type = v8hi_ftype_v8hi_int;
7768 /* vchar, vchar, 5 bit literal. */
7769 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7770 type = v16qi_ftype_v16qi_int;
7772 /* vfloat, vint, 5 bit literal. */
7773 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7774 type = v4sf_ftype_v4si_int;
7776 /* vint, vfloat, 5 bit literal. */
7777 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7778 type = v4si_ftype_v4sf_int;
7780 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7781 type = v2si_ftype_int_int;
7783 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7784 type = v2si_ftype_v2si_char;
7786 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7787 type = v2si_ftype_int_char;
7790 else if (mode0 == SImode)
7795 type = int_ftype_v4si_v4si;
7798 type = int_ftype_v4sf_v4sf;
7801 type = int_ftype_v16qi_v16qi;
7804 type = int_ftype_v8hi_v8hi;
7814 def_builtin (d->mask, d->name, type, d->code);
7817 /* Add the simple unary operators. */
7818 d = (struct builtin_description *) bdesc_1arg;
7819 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7821 enum machine_mode mode0, mode1;
7824 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7827 mode0 = insn_data[d->icode].operand[0].mode;
7828 mode1 = insn_data[d->icode].operand[1].mode;
7830 if (mode0 == V4SImode && mode1 == QImode)
7831 type = v4si_ftype_int;
7832 else if (mode0 == V8HImode && mode1 == QImode)
7833 type = v8hi_ftype_int;
7834 else if (mode0 == V16QImode && mode1 == QImode)
7835 type = v16qi_ftype_int;
7836 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7837 type = v4sf_ftype_v4sf;
7838 else if (mode0 == V8HImode && mode1 == V16QImode)
7839 type = v8hi_ftype_v16qi;
7840 else if (mode0 == V4SImode && mode1 == V8HImode)
7841 type = v4si_ftype_v8hi;
7842 else if (mode0 == V2SImode && mode1 == V2SImode)
7843 type = v2si_ftype_v2si;
7844 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7845 type = v2sf_ftype_v2sf;
7846 else if (mode0 == V2SFmode && mode1 == V2SImode)
7847 type = v2sf_ftype_v2si;
7848 else if (mode0 == V2SImode && mode1 == V2SFmode)
7849 type = v2si_ftype_v2sf;
7850 else if (mode0 == V2SImode && mode1 == QImode)
7851 type = v2si_ftype_char;
7855 def_builtin (d->mask, d->name, type, d->code);
7860 rs6000_init_libfuncs (void)
7862 if (!TARGET_HARD_FLOAT)
7865 if (DEFAULT_ABI != ABI_V4)
7867 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7869 /* AIX library routines for float->int conversion. */
7870 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7871 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7872 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7873 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7876 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7877 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7878 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7879 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7880 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7884 /* 32-bit SVR4 quad floating point routines. */
7886 set_optab_libfunc (add_optab, TFmode, "_q_add");
7887 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7888 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7889 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7890 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7891 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7892 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7894 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7895 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7896 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7897 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7898 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7899 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7901 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7902 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7903 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7904 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7905 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7906 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7907 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7911 /* Expand a block move operation, and return 1 if successful. Return 0
7912 if we should let the compiler generate normal code.
7914 operands[0] is the destination
7915 operands[1] is the source
7916 operands[2] is the length
7917 operands[3] is the alignment */
7919 #define MAX_MOVE_REG 4
7922 expand_block_move (rtx operands[])
7924 rtx orig_dest = operands[0];
7925 rtx orig_src = operands[1];
7926 rtx bytes_rtx = operands[2];
7927 rtx align_rtx = operands[3];
7928 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7933 rtx stores[MAX_MOVE_REG];
7936 /* If this is not a fixed size move, just call memcpy */
7940 /* If this is not a fixed size alignment, abort */
7941 if (GET_CODE (align_rtx) != CONST_INT)
7943 align = INTVAL (align_rtx);
7945 /* Anything to move? */
7946 bytes = INTVAL (bytes_rtx);
7950 /* store_one_arg depends on expand_block_move to handle at least the size of
7951 reg_parm_stack_space. */
7952 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7955 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7958 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7959 rtx (*mov) (rtx, rtx);
7961 enum machine_mode mode = BLKmode;
7965 && bytes > 24 /* move up to 32 bytes at a time */
7973 && ! fixed_regs[12])
7975 move_bytes = (bytes > 32) ? 32 : bytes;
7976 gen_func.movstrsi = gen_movstrsi_8reg;
7978 else if (TARGET_STRING
7979 && bytes > 16 /* move up to 24 bytes at a time */
7985 && ! fixed_regs[10])
7987 move_bytes = (bytes > 24) ? 24 : bytes;
7988 gen_func.movstrsi = gen_movstrsi_6reg;
7990 else if (TARGET_STRING
7991 && bytes > 8 /* move up to 16 bytes at a time */
7997 move_bytes = (bytes > 16) ? 16 : bytes;
7998 gen_func.movstrsi = gen_movstrsi_4reg;
8000 else if (bytes >= 8 && TARGET_POWERPC64
8001 /* 64-bit loads and stores require word-aligned
8003 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
8007 gen_func.mov = gen_movdi;
8009 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
8010 { /* move up to 8 bytes at a time */
8011 move_bytes = (bytes > 8) ? 8 : bytes;
8012 gen_func.movstrsi = gen_movstrsi_2reg;
8014 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
8015 { /* move 4 bytes */
8018 gen_func.mov = gen_movsi;
8020 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
8021 { /* move 2 bytes */
8024 gen_func.mov = gen_movhi;
8026 else if (TARGET_STRING && bytes > 1)
8027 { /* move up to 4 bytes at a time */
8028 move_bytes = (bytes > 4) ? 4 : bytes;
8029 gen_func.movstrsi = gen_movstrsi_1reg;
8031 else /* move 1 byte at a time */
8035 gen_func.mov = gen_movqi;
8038 src = adjust_address (orig_src, mode, offset);
8039 dest = adjust_address (orig_dest, mode, offset);
8041 if (mode != BLKmode)
8043 rtx tmp_reg = gen_reg_rtx (mode);
8045 emit_insn ((*gen_func.mov) (tmp_reg, src));
8046 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
8049 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
8052 for (i = 0; i < num_reg; i++)
8053 emit_insn (stores[i]);
8057 if (mode == BLKmode)
8059 /* Move the address into scratch registers. The movstrsi
8060 patterns require zero offset. */
8061 if (!REG_P (XEXP (src, 0)))
8063 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
8064 src = replace_equiv_address (src, src_reg);
8066 set_mem_size (src, GEN_INT (move_bytes));
8068 if (!REG_P (XEXP (dest, 0)))
8070 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
8071 dest = replace_equiv_address (dest, dest_reg);
8073 set_mem_size (dest, GEN_INT (move_bytes));
8075 emit_insn ((*gen_func.movstrsi) (dest, src,
8076 GEN_INT (move_bytes & 31),
8085 /* Return 1 if OP is a load multiple operation. It is known to be a
8086 PARALLEL and the first section will be tested. */
8089 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8091 int count = XVECLEN (op, 0);
8092 unsigned int dest_regno;
8096 /* Perform a quick check so we don't blow up below. */
8098 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8099 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8100 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8103 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8104 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8106 for (i = 1; i < count; i++)
8108 rtx elt = XVECEXP (op, 0, i);
8110 if (GET_CODE (elt) != SET
8111 || GET_CODE (SET_DEST (elt)) != REG
8112 || GET_MODE (SET_DEST (elt)) != SImode
8113 || REGNO (SET_DEST (elt)) != dest_regno + i
8114 || GET_CODE (SET_SRC (elt)) != MEM
8115 || GET_MODE (SET_SRC (elt)) != SImode
8116 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
8117 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
8118 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
8119 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
8126 /* Similar, but tests for store multiple. Here, the second vector element
8127 is a CLOBBER. It will be tested later. */
8130 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8132 int count = XVECLEN (op, 0) - 1;
8133 unsigned int src_regno;
8137 /* Perform a quick check so we don't blow up below. */
8139 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8140 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8141 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8144 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8145 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8147 for (i = 1; i < count; i++)
8149 rtx elt = XVECEXP (op, 0, i + 1);
8151 if (GET_CODE (elt) != SET
8152 || GET_CODE (SET_SRC (elt)) != REG
8153 || GET_MODE (SET_SRC (elt)) != SImode
8154 || REGNO (SET_SRC (elt)) != src_regno + i
8155 || GET_CODE (SET_DEST (elt)) != MEM
8156 || GET_MODE (SET_DEST (elt)) != SImode
8157 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
8158 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
8159 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
8160 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
8167 /* Return a string to perform a load_multiple operation.
8168 operands[0] is the vector.
8169 operands[1] is the source address.
8170 operands[2] is the first destination register. */
8173 rs6000_output_load_multiple (rtx operands[3])
8175 /* We have to handle the case where the pseudo used to contain the address
8176 is assigned to one of the output registers. */
8178 int words = XVECLEN (operands[0], 0);
8181 if (XVECLEN (operands[0], 0) == 1)
8182 return "{l|lwz} %2,0(%1)";
8184 for (i = 0; i < words; i++)
8185 if (refers_to_regno_p (REGNO (operands[2]) + i,
8186 REGNO (operands[2]) + i + 1, operands[1], 0))
8190 xop[0] = GEN_INT (4 * (words-1));
8191 xop[1] = operands[1];
8192 xop[2] = operands[2];
8193 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8198 xop[0] = GEN_INT (4 * (words-1));
8199 xop[1] = operands[1];
8200 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8201 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8206 for (j = 0; j < words; j++)
8209 xop[0] = GEN_INT (j * 4);
8210 xop[1] = operands[1];
8211 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8212 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8214 xop[0] = GEN_INT (i * 4);
8215 xop[1] = operands[1];
8216 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8221 return "{lsi|lswi} %2,%1,%N0";
8224 /* Return 1 for a parallel vrsave operation. */
8227 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8229 int count = XVECLEN (op, 0);
8230 unsigned int dest_regno, src_regno;
8234 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8235 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8236 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8239 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8240 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8242 if (dest_regno != VRSAVE_REGNO
8243 && src_regno != VRSAVE_REGNO)
8246 for (i = 1; i < count; i++)
8248 rtx elt = XVECEXP (op, 0, i);
8250 if (GET_CODE (elt) != CLOBBER
8251 && GET_CODE (elt) != SET)
8258 /* Return 1 for an PARALLEL suitable for mfcr. */
8261 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8263 int count = XVECLEN (op, 0);
8266 /* Perform a quick check so we don't blow up below. */
8268 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8269 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8270 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8273 for (i = 0; i < count; i++)
8275 rtx exp = XVECEXP (op, 0, i);
8280 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8282 if (GET_CODE (src_reg) != REG
8283 || GET_MODE (src_reg) != CCmode
8284 || ! CR_REGNO_P (REGNO (src_reg)))
8287 if (GET_CODE (exp) != SET
8288 || GET_CODE (SET_DEST (exp)) != REG
8289 || GET_MODE (SET_DEST (exp)) != SImode
8290 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8292 unspec = SET_SRC (exp);
8293 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8295 if (GET_CODE (unspec) != UNSPEC
8296 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8297 || XVECLEN (unspec, 0) != 2
8298 || XVECEXP (unspec, 0, 0) != src_reg
8299 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8300 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8306 /* Return 1 for an PARALLEL suitable for mtcrf. */
8309 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8311 int count = XVECLEN (op, 0);
8315 /* Perform a quick check so we don't blow up below. */
8317 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8318 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8319 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8321 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8323 if (GET_CODE (src_reg) != REG
8324 || GET_MODE (src_reg) != SImode
8325 || ! INT_REGNO_P (REGNO (src_reg)))
8328 for (i = 0; i < count; i++)
8330 rtx exp = XVECEXP (op, 0, i);
8334 if (GET_CODE (exp) != SET
8335 || GET_CODE (SET_DEST (exp)) != REG
8336 || GET_MODE (SET_DEST (exp)) != CCmode
8337 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8339 unspec = SET_SRC (exp);
8340 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8342 if (GET_CODE (unspec) != UNSPEC
8343 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8344 || XVECLEN (unspec, 0) != 2
8345 || XVECEXP (unspec, 0, 0) != src_reg
8346 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8347 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8353 /* Return 1 for an PARALLEL suitable for lmw. */
8356 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8358 int count = XVECLEN (op, 0);
8359 unsigned int dest_regno;
8361 unsigned int base_regno;
8362 HOST_WIDE_INT offset;
8365 /* Perform a quick check so we don't blow up below. */
8367 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8368 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8369 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8372 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8373 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8376 || count != 32 - (int) dest_regno)
8379 if (legitimate_indirect_address_p (src_addr, 0))
8382 base_regno = REGNO (src_addr);
8383 if (base_regno == 0)
8386 else if (legitimate_offset_address_p (SImode, src_addr, 0))
8388 offset = INTVAL (XEXP (src_addr, 1));
8389 base_regno = REGNO (XEXP (src_addr, 0));
8394 for (i = 0; i < count; i++)
8396 rtx elt = XVECEXP (op, 0, i);
8399 HOST_WIDE_INT newoffset;
8401 if (GET_CODE (elt) != SET
8402 || GET_CODE (SET_DEST (elt)) != REG
8403 || GET_MODE (SET_DEST (elt)) != SImode
8404 || REGNO (SET_DEST (elt)) != dest_regno + i
8405 || GET_CODE (SET_SRC (elt)) != MEM
8406 || GET_MODE (SET_SRC (elt)) != SImode)
8408 newaddr = XEXP (SET_SRC (elt), 0);
8409 if (legitimate_indirect_address_p (newaddr, 0))
8414 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8416 addr_reg = XEXP (newaddr, 0);
8417 newoffset = INTVAL (XEXP (newaddr, 1));
8421 if (REGNO (addr_reg) != base_regno
8422 || newoffset != offset + 4 * i)
8429 /* Return 1 for an PARALLEL suitable for stmw. */
8432 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8434 int count = XVECLEN (op, 0);
8435 unsigned int src_regno;
8437 unsigned int base_regno;
8438 HOST_WIDE_INT offset;
8441 /* Perform a quick check so we don't blow up below. */
8443 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8444 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8445 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8448 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8449 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8452 || count != 32 - (int) src_regno)
8455 if (legitimate_indirect_address_p (dest_addr, 0))
8458 base_regno = REGNO (dest_addr);
8459 if (base_regno == 0)
8462 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8464 offset = INTVAL (XEXP (dest_addr, 1));
8465 base_regno = REGNO (XEXP (dest_addr, 0));
8470 for (i = 0; i < count; i++)
8472 rtx elt = XVECEXP (op, 0, i);
8475 HOST_WIDE_INT newoffset;
8477 if (GET_CODE (elt) != SET
8478 || GET_CODE (SET_SRC (elt)) != REG
8479 || GET_MODE (SET_SRC (elt)) != SImode
8480 || REGNO (SET_SRC (elt)) != src_regno + i
8481 || GET_CODE (SET_DEST (elt)) != MEM
8482 || GET_MODE (SET_DEST (elt)) != SImode)
8484 newaddr = XEXP (SET_DEST (elt), 0);
8485 if (legitimate_indirect_address_p (newaddr, 0))
8490 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8492 addr_reg = XEXP (newaddr, 0);
8493 newoffset = INTVAL (XEXP (newaddr, 1));
8497 if (REGNO (addr_reg) != base_regno
8498 || newoffset != offset + 4 * i)
8505 /* A validation routine: say whether CODE, a condition code, and MODE
8506 match. The other alternatives either don't make sense or should
8507 never be generated. */
8510 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8512 if ((GET_RTX_CLASS (code) != RTX_COMPARE
8513 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
8514 || GET_MODE_CLASS (mode) != MODE_CC)
8517 /* These don't make sense. */
8518 if ((code == GT || code == LT || code == GE || code == LE)
8519 && mode == CCUNSmode)
8522 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8523 && mode != CCUNSmode)
8526 if (mode != CCFPmode
8527 && (code == ORDERED || code == UNORDERED
8528 || code == UNEQ || code == LTGT
8529 || code == UNGT || code == UNLT
8530 || code == UNGE || code == UNLE))
8533 /* These should never be generated except for
8534 flag_finite_math_only. */
8535 if (mode == CCFPmode
8536 && ! flag_finite_math_only
8537 && (code == LE || code == GE
8538 || code == UNEQ || code == LTGT
8539 || code == UNGT || code == UNLT))
8542 /* These are invalid; the information is not there. */
8543 if (mode == CCEQmode
8544 && code != EQ && code != NE)
8548 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8549 We only check the opcode against the mode of the CC value here. */
8552 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8554 enum rtx_code code = GET_CODE (op);
8555 enum machine_mode cc_mode;
8557 if (!COMPARISON_P (op))
8560 cc_mode = GET_MODE (XEXP (op, 0));
8561 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8564 validate_condition_mode (code, cc_mode);
8569 /* Return 1 if OP is a comparison operation that is valid for a branch
8570 insn and which is true if the corresponding bit in the CC register
8574 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8578 if (! branch_comparison_operator (op, mode))
8581 code = GET_CODE (op);
8582 return (code == EQ || code == LT || code == GT
8583 || code == LTU || code == GTU
8584 || code == UNORDERED);
8587 /* Return 1 if OP is a comparison operation that is valid for an scc
8588 insn: it must be a positive comparison. */
8591 scc_comparison_operator (rtx op, enum machine_mode mode)
8593 return branch_positive_comparison_operator (op, mode);
8597 trap_comparison_operator (rtx op, enum machine_mode mode)
8599 if (mode != VOIDmode && mode != GET_MODE (op))
8601 return COMPARISON_P (op);
8605 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8607 enum rtx_code code = GET_CODE (op);
8608 return (code == AND || code == IOR || code == XOR);
8612 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8614 enum rtx_code code = GET_CODE (op);
8615 return (code == IOR || code == XOR);
8619 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8621 enum rtx_code code = GET_CODE (op);
8622 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8625 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8626 mask required to convert the result of a rotate insn into a shift
8627 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8630 includes_lshift_p (rtx shiftop, rtx andop)
8632 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8634 shift_mask <<= INTVAL (shiftop);
8636 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8639 /* Similar, but for right shift. */
8642 includes_rshift_p (rtx shiftop, rtx andop)
8644 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8646 shift_mask >>= INTVAL (shiftop);
8648 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8651 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8652 to perform a left shift. It must have exactly SHIFTOP least
8653 significant 0's, then one or more 1's, then zero or more 0's. */
8656 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8658 if (GET_CODE (andop) == CONST_INT)
8660 HOST_WIDE_INT c, lsb, shift_mask;
8663 if (c == 0 || c == ~0)
8667 shift_mask <<= INTVAL (shiftop);
8669 /* Find the least significant one bit. */
8672 /* It must coincide with the LSB of the shift mask. */
8673 if (-lsb != shift_mask)
8676 /* Invert to look for the next transition (if any). */
8679 /* Remove the low group of ones (originally low group of zeros). */
8682 /* Again find the lsb, and check we have all 1's above. */
8686 else if (GET_CODE (andop) == CONST_DOUBLE
8687 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8689 HOST_WIDE_INT low, high, lsb;
8690 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8692 low = CONST_DOUBLE_LOW (andop);
8693 if (HOST_BITS_PER_WIDE_INT < 64)
8694 high = CONST_DOUBLE_HIGH (andop);
8696 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8697 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8700 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8702 shift_mask_high = ~0;
8703 if (INTVAL (shiftop) > 32)
8704 shift_mask_high <<= INTVAL (shiftop) - 32;
8708 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8715 return high == -lsb;
8718 shift_mask_low = ~0;
8719 shift_mask_low <<= INTVAL (shiftop);
8723 if (-lsb != shift_mask_low)
8726 if (HOST_BITS_PER_WIDE_INT < 64)
8731 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8734 return high == -lsb;
8738 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8744 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8745 to perform a left shift. It must have SHIFTOP or more least
8746 significant 0's, with the remainder of the word 1's. */
8749 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8751 if (GET_CODE (andop) == CONST_INT)
8753 HOST_WIDE_INT c, lsb, shift_mask;
8756 shift_mask <<= INTVAL (shiftop);
8759 /* Find the least significant one bit. */
8762 /* It must be covered by the shift mask.
8763 This test also rejects c == 0. */
8764 if ((lsb & shift_mask) == 0)
8767 /* Check we have all 1's above the transition, and reject all 1's. */
8768 return c == -lsb && lsb != 1;
8770 else if (GET_CODE (andop) == CONST_DOUBLE
8771 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8773 HOST_WIDE_INT low, lsb, shift_mask_low;
8775 low = CONST_DOUBLE_LOW (andop);
8777 if (HOST_BITS_PER_WIDE_INT < 64)
8779 HOST_WIDE_INT high, shift_mask_high;
8781 high = CONST_DOUBLE_HIGH (andop);
8785 shift_mask_high = ~0;
8786 if (INTVAL (shiftop) > 32)
8787 shift_mask_high <<= INTVAL (shiftop) - 32;
8791 if ((lsb & shift_mask_high) == 0)
8794 return high == -lsb;
8800 shift_mask_low = ~0;
8801 shift_mask_low <<= INTVAL (shiftop);
8805 if ((lsb & shift_mask_low) == 0)
8808 return low == -lsb && lsb != 1;
8814 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8815 for lfq and stfq insns iff the registers are hard registers. */
8818 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8820 /* We might have been passed a SUBREG. */
8821 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8824 /* We might have been passed non floating point registers. */
8825 if (!FP_REGNO_P (REGNO (reg1))
8826 || !FP_REGNO_P (REGNO (reg2)))
8829 return (REGNO (reg1) == REGNO (reg2) - 1);
8832 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8833 addr1 and addr2 must be in consecutive memory locations
8834 (addr2 == addr1 + 8). */
8837 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
8843 /* The mems cannot be volatile. */
8844 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
8847 addr1 = XEXP (mem1, 0);
8848 addr2 = XEXP (mem2, 0);
8850 /* Extract an offset (if used) from the first addr. */
8851 if (GET_CODE (addr1) == PLUS)
8853 /* If not a REG, return zero. */
8854 if (GET_CODE (XEXP (addr1, 0)) != REG)
8858 reg1 = REGNO (XEXP (addr1, 0));
8859 /* The offset must be constant! */
8860 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8862 offset1 = INTVAL (XEXP (addr1, 1));
8865 else if (GET_CODE (addr1) != REG)
8869 reg1 = REGNO (addr1);
8870 /* This was a simple (mem (reg)) expression. Offset is 0. */
8874 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8875 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8876 register as addr1. */
8877 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8879 if (GET_CODE (addr2) != PLUS)
8882 if (GET_CODE (XEXP (addr2, 0)) != REG
8883 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8886 if (reg1 != REGNO (XEXP (addr2, 0)))
8889 /* The offset for the second addr must be 8 more than the first addr. */
8890 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8893 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8898 /* Return the register class of a scratch register needed to copy IN into
8899 or out of a register in CLASS in MODE. If it can be done directly,
8900 NO_REGS is returned. */
8903 secondary_reload_class (enum reg_class class,
8904 enum machine_mode mode ATTRIBUTE_UNUSED,
8909 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8911 && MACHOPIC_INDIRECT
8915 /* We cannot copy a symbolic operand directly into anything
8916 other than BASE_REGS for TARGET_ELF. So indicate that a
8917 register from BASE_REGS is needed as an intermediate
8920 On Darwin, pic addresses require a load from memory, which
8921 needs a base register. */
8922 if (class != BASE_REGS
8923 && (GET_CODE (in) == SYMBOL_REF
8924 || GET_CODE (in) == HIGH
8925 || GET_CODE (in) == LABEL_REF
8926 || GET_CODE (in) == CONST))
8930 if (GET_CODE (in) == REG)
8933 if (regno >= FIRST_PSEUDO_REGISTER)
8935 regno = true_regnum (in);
8936 if (regno >= FIRST_PSEUDO_REGISTER)
8940 else if (GET_CODE (in) == SUBREG)
8942 regno = true_regnum (in);
8943 if (regno >= FIRST_PSEUDO_REGISTER)
8949 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8951 if (class == GENERAL_REGS || class == BASE_REGS
8952 || (regno >= 0 && INT_REGNO_P (regno)))
8955 /* Constants, memory, and FP registers can go into FP registers. */
8956 if ((regno == -1 || FP_REGNO_P (regno))
8957 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8960 /* Memory, and AltiVec registers can go into AltiVec registers. */
8961 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8962 && class == ALTIVEC_REGS)
8965 /* We can copy among the CR registers. */
8966 if ((class == CR_REGS || class == CR0_REGS)
8967 && regno >= 0 && CR_REGNO_P (regno))
8970 /* Otherwise, we need GENERAL_REGS. */
8971 return GENERAL_REGS;
8974 /* Given a comparison operation, return the bit number in CCR to test. We
8975 know this is a valid comparison.
8977 SCC_P is 1 if this is for an scc. That means that %D will have been
8978 used instead of %C, so the bits will be in different places.
8980 Return -1 if OP isn't a valid comparison for some reason. */
8983 ccr_bit (rtx op, int scc_p)
8985 enum rtx_code code = GET_CODE (op);
8986 enum machine_mode cc_mode;
8991 if (!COMPARISON_P (op))
8996 if (GET_CODE (reg) != REG
8997 || ! CR_REGNO_P (REGNO (reg)))
9000 cc_mode = GET_MODE (reg);
9001 cc_regnum = REGNO (reg);
9002 base_bit = 4 * (cc_regnum - CR0_REGNO);
9004 validate_condition_mode (code, cc_mode);
9006 /* When generating a sCOND operation, only positive conditions are
9008 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
9009 && code != GTU && code != LTU)
9015 return scc_p ? base_bit + 3 : base_bit + 2;
9017 return base_bit + 2;
9018 case GT: case GTU: case UNLE:
9019 return base_bit + 1;
9020 case LT: case LTU: case UNGE:
9022 case ORDERED: case UNORDERED:
9023 return base_bit + 3;
9026 /* If scc, we will have done a cror to put the bit in the
9027 unordered position. So test that bit. For integer, this is ! LT
9028 unless this is an scc insn. */
9029 return scc_p ? base_bit + 3 : base_bit;
9032 return scc_p ? base_bit + 3 : base_bit + 1;
9039 /* Return the GOT register. */
9042 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9044 /* The second flow pass currently (June 1999) can't update
9045 regs_ever_live without disturbing other parts of the compiler, so
9046 update it here to make the prolog/epilogue code happy. */
9047 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9048 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9050 current_function_uses_pic_offset_table = 1;
9052 return pic_offset_table_rtx;
9055 /* Function to init struct machine_function.
9056 This will be called, via a pointer variable,
9057 from push_function_context. */
9059 static struct machine_function *
9060 rs6000_init_machine_status (void)
9062 return ggc_alloc_cleared (sizeof (machine_function));
9065 /* These macros test for integers and extract the low-order bits. */
9067 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9068 && GET_MODE (X) == VOIDmode)
9070 #define INT_LOWPART(X) \
9071 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9077 unsigned long val = INT_LOWPART (op);
9079 /* If the high bit is zero, the value is the first 1 bit we find
9081 if ((val & 0x80000000) == 0)
9083 if ((val & 0xffffffff) == 0)
9087 while (((val <<= 1) & 0x80000000) == 0)
9092 /* If the high bit is set and the low bit is not, or the mask is all
9093 1's, the value is zero. */
9094 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9097 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9100 while (((val >>= 1) & 1) != 0)
9110 unsigned long val = INT_LOWPART (op);
9112 /* If the low bit is zero, the value is the first 1 bit we find from
9116 if ((val & 0xffffffff) == 0)
9120 while (((val >>= 1) & 1) == 0)
9126 /* If the low bit is set and the high bit is not, or the mask is all
9127 1's, the value is 31. */
9128 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9131 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9134 while (((val <<= 1) & 0x80000000) != 0)
9140 /* Locate some local-dynamic symbol still in use by this function
9141 so that we can print its name in some tls_ld pattern. */
9144 rs6000_get_some_local_dynamic_name (void)
9148 if (cfun->machine->some_ld_name)
9149 return cfun->machine->some_ld_name;
9151 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9153 && for_each_rtx (&PATTERN (insn),
9154 rs6000_get_some_local_dynamic_name_1, 0))
9155 return cfun->machine->some_ld_name;
9160 /* Helper function for rs6000_get_some_local_dynamic_name. */
9163 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9167 if (GET_CODE (x) == SYMBOL_REF)
9169 const char *str = XSTR (x, 0);
9170 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9172 cfun->machine->some_ld_name = str;
9180 /* Print an operand. Recognize special options, documented below. */
9183 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9184 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9186 #define SMALL_DATA_RELOC "sda21"
9187 #define SMALL_DATA_REG 0
9191 print_operand (FILE *file, rtx x, int code)
9195 unsigned HOST_WIDE_INT uval;
9200 /* Write out an instruction after the call which may be replaced
9201 with glue code by the loader. This depends on the AIX version. */
9202 asm_fprintf (file, RS6000_CALL_GLUE);
9205 /* %a is output_address. */
9208 /* If X is a constant integer whose low-order 5 bits are zero,
9209 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9210 in the AIX assembler where "sri" with a zero shift count
9211 writes a trash instruction. */
9212 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9219 /* If constant, low-order 16 bits of constant, unsigned.
9220 Otherwise, write normally. */
9222 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9224 print_operand (file, x, 0);
9228 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9229 for 64-bit mask direction. */
9230 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9233 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9237 /* X is a CR register. Print the number of the GT bit of the CR. */
9238 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9239 output_operand_lossage ("invalid %%E value");
9241 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9245 /* Like 'J' but get to the GT bit. */
9246 if (GET_CODE (x) != REG)
9249 /* Bit 1 is GT bit. */
9250 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
9252 /* If we want bit 31, write a shift count of zero, not 32. */
9253 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9257 /* X is a CR register. Print the number of the EQ bit of the CR */
9258 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9259 output_operand_lossage ("invalid %%E value");
9261 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9265 /* X is a CR register. Print the shift count needed to move it
9266 to the high-order four bits. */
9267 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9268 output_operand_lossage ("invalid %%f value");
9270 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9274 /* Similar, but print the count for the rotate in the opposite
9276 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9277 output_operand_lossage ("invalid %%F value");
9279 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9283 /* X is a constant integer. If it is negative, print "m",
9284 otherwise print "z". This is to make an aze or ame insn. */
9285 if (GET_CODE (x) != CONST_INT)
9286 output_operand_lossage ("invalid %%G value");
9287 else if (INTVAL (x) >= 0)
9294 /* If constant, output low-order five bits. Otherwise, write
9297 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9299 print_operand (file, x, 0);
9303 /* If constant, output low-order six bits. Otherwise, write
9306 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9308 print_operand (file, x, 0);
9312 /* Print `i' if this is a constant, else nothing. */
9318 /* Write the bit number in CCR for jump. */
9321 output_operand_lossage ("invalid %%j code");
9323 fprintf (file, "%d", i);
9327 /* Similar, but add one for shift count in rlinm for scc and pass
9328 scc flag to `ccr_bit'. */
9331 output_operand_lossage ("invalid %%J code");
9333 /* If we want bit 31, write a shift count of zero, not 32. */
9334 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9338 /* X must be a constant. Write the 1's complement of the
9341 output_operand_lossage ("invalid %%k value");
9343 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9347 /* X must be a symbolic constant on ELF. Write an
9348 expression suitable for an 'addi' that adds in the low 16
9350 if (GET_CODE (x) != CONST)
9352 print_operand_address (file, x);
9357 if (GET_CODE (XEXP (x, 0)) != PLUS
9358 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9359 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9360 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9361 output_operand_lossage ("invalid %%K value");
9362 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9364 /* For GNU as, there must be a non-alphanumeric character
9365 between 'l' and the number. The '-' is added by
9366 print_operand() already. */
9367 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9369 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9373 /* %l is output_asm_label. */
9376 /* Write second word of DImode or DFmode reference. Works on register
9377 or non-indexed memory only. */
9378 if (GET_CODE (x) == REG)
9379 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9380 else if (GET_CODE (x) == MEM)
9382 /* Handle possible auto-increment. Since it is pre-increment and
9383 we have already done it, we can just use an offset of word. */
9384 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9385 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9386 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9389 output_address (XEXP (adjust_address_nv (x, SImode,
9393 if (small_data_operand (x, GET_MODE (x)))
9394 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9395 reg_names[SMALL_DATA_REG]);
9400 /* MB value for a mask operand. */
9401 if (! mask_operand (x, SImode))
9402 output_operand_lossage ("invalid %%m value");
9404 fprintf (file, "%d", extract_MB (x));
9408 /* ME value for a mask operand. */
9409 if (! mask_operand (x, SImode))
9410 output_operand_lossage ("invalid %%M value");
9412 fprintf (file, "%d", extract_ME (x));
9415 /* %n outputs the negative of its operand. */
9418 /* Write the number of elements in the vector times 4. */
9419 if (GET_CODE (x) != PARALLEL)
9420 output_operand_lossage ("invalid %%N value");
9422 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9426 /* Similar, but subtract 1 first. */
9427 if (GET_CODE (x) != PARALLEL)
9428 output_operand_lossage ("invalid %%O value");
9430 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9434 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9436 || INT_LOWPART (x) < 0
9437 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9438 output_operand_lossage ("invalid %%p value");
9440 fprintf (file, "%d", i);
9444 /* The operand must be an indirect memory reference. The result
9445 is the register name. */
9446 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9447 || REGNO (XEXP (x, 0)) >= 32)
9448 output_operand_lossage ("invalid %%P value");
9450 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9454 /* This outputs the logical code corresponding to a boolean
9455 expression. The expression may have one or both operands
9456 negated (if one, only the first one). For condition register
9457 logical operations, it will also treat the negated
9458 CR codes as NOTs, but not handle NOTs of them. */
9460 const char *const *t = 0;
9462 enum rtx_code code = GET_CODE (x);
9463 static const char * const tbl[3][3] = {
9464 { "and", "andc", "nor" },
9465 { "or", "orc", "nand" },
9466 { "xor", "eqv", "xor" } };
9470 else if (code == IOR)
9472 else if (code == XOR)
9475 output_operand_lossage ("invalid %%q value");
9477 if (GET_CODE (XEXP (x, 0)) != NOT)
9481 if (GET_CODE (XEXP (x, 1)) == NOT)
9499 /* X is a CR register. Print the mask for `mtcrf'. */
9500 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9501 output_operand_lossage ("invalid %%R value");
9503 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9507 /* Low 5 bits of 32 - value */
9509 output_operand_lossage ("invalid %%s value");
9511 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9515 /* PowerPC64 mask position. All 0's is excluded.
9516 CONST_INT 32-bit mask is considered sign-extended so any
9517 transition must occur within the CONST_INT, not on the boundary. */
9518 if (! mask64_operand (x, DImode))
9519 output_operand_lossage ("invalid %%S value");
9521 uval = INT_LOWPART (x);
9523 if (uval & 1) /* Clear Left */
9525 #if HOST_BITS_PER_WIDE_INT > 64
9526 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9530 else /* Clear Right */
9533 #if HOST_BITS_PER_WIDE_INT > 64
9534 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9542 fprintf (file, "%d", i);
9546 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9547 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9550 /* Bit 3 is OV bit. */
9551 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9553 /* If we want bit 31, write a shift count of zero, not 32. */
9554 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9558 /* Print the symbolic name of a branch target register. */
9559 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9560 && REGNO (x) != COUNT_REGISTER_REGNUM))
9561 output_operand_lossage ("invalid %%T value");
9562 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9563 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9565 fputs ("ctr", file);
9569 /* High-order 16 bits of constant for use in unsigned operand. */
9571 output_operand_lossage ("invalid %%u value");
9573 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9574 (INT_LOWPART (x) >> 16) & 0xffff);
9578 /* High-order 16 bits of constant for use in signed operand. */
9580 output_operand_lossage ("invalid %%v value");
9582 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9583 (INT_LOWPART (x) >> 16) & 0xffff);
9587 /* Print `u' if this has an auto-increment or auto-decrement. */
9588 if (GET_CODE (x) == MEM
9589 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9590 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9595 /* Print the trap code for this operand. */
9596 switch (GET_CODE (x))
9599 fputs ("eq", file); /* 4 */
9602 fputs ("ne", file); /* 24 */
9605 fputs ("lt", file); /* 16 */
9608 fputs ("le", file); /* 20 */
9611 fputs ("gt", file); /* 8 */
9614 fputs ("ge", file); /* 12 */
9617 fputs ("llt", file); /* 2 */
9620 fputs ("lle", file); /* 6 */
9623 fputs ("lgt", file); /* 1 */
9626 fputs ("lge", file); /* 5 */
9634 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9637 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9638 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9640 print_operand (file, x, 0);
9644 /* MB value for a PowerPC64 rldic operand. */
9645 val = (GET_CODE (x) == CONST_INT
9646 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9651 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9652 if ((val <<= 1) < 0)
9655 #if HOST_BITS_PER_WIDE_INT == 32
9656 if (GET_CODE (x) == CONST_INT && i >= 0)
9657 i += 32; /* zero-extend high-part was all 0's */
9658 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9660 val = CONST_DOUBLE_LOW (x);
9667 for ( ; i < 64; i++)
9668 if ((val <<= 1) < 0)
9673 fprintf (file, "%d", i + 1);
9677 if (GET_CODE (x) == MEM
9678 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9683 /* Like 'L', for third word of TImode */
9684 if (GET_CODE (x) == REG)
9685 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9686 else if (GET_CODE (x) == MEM)
9688 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9689 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9690 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9692 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9693 if (small_data_operand (x, GET_MODE (x)))
9694 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9695 reg_names[SMALL_DATA_REG]);
9700 /* X is a SYMBOL_REF. Write out the name preceded by a
9701 period and without any trailing data in brackets. Used for function
9702 names. If we are configured for System V (or the embedded ABI) on
9703 the PowerPC, do not emit the period, since those systems do not use
9704 TOCs and the like. */
9705 if (GET_CODE (x) != SYMBOL_REF)
9708 /* Mark the decl as referenced so that cgraph will output the function. */
9709 if (SYMBOL_REF_DECL (x))
9710 mark_decl_referenced (SYMBOL_REF_DECL (x));
9712 if (XSTR (x, 0)[0] != '.')
9714 switch (DEFAULT_ABI)
9728 /* For macho, we need to check it see if we need a stub. */
9731 const char *name = XSTR (x, 0);
9733 if (MACHOPIC_INDIRECT
9734 && machopic_classify_name (name) == MACHOPIC_UNDEFINED_FUNCTION)
9735 name = machopic_stub_name (name);
9737 assemble_name (file, name);
9739 else if (TARGET_AIX)
9740 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9742 assemble_name (file, XSTR (x, 0));
9746 /* Like 'L', for last word of TImode. */
9747 if (GET_CODE (x) == REG)
9748 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9749 else if (GET_CODE (x) == MEM)
9751 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9752 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9753 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9755 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9756 if (small_data_operand (x, GET_MODE (x)))
9757 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9758 reg_names[SMALL_DATA_REG]);
9762 /* Print AltiVec or SPE memory operand. */
9767 if (GET_CODE (x) != MEM)
9775 if (GET_CODE (tmp) == REG)
9777 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9780 /* Handle [reg+UIMM]. */
9781 else if (GET_CODE (tmp) == PLUS &&
9782 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9786 if (GET_CODE (XEXP (tmp, 0)) != REG)
9789 x = INTVAL (XEXP (tmp, 1));
9790 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9794 /* Fall through. Must be [reg+reg]. */
9796 if (GET_CODE (tmp) == REG)
9797 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9798 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9800 if (REGNO (XEXP (tmp, 0)) == 0)
9801 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9802 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9804 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9805 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9813 if (GET_CODE (x) == REG)
9814 fprintf (file, "%s", reg_names[REGNO (x)]);
9815 else if (GET_CODE (x) == MEM)
9817 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9818 know the width from the mode. */
9819 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9820 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9821 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9822 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9823 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9824 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9826 output_address (XEXP (x, 0));
9829 output_addr_const (file, x);
9833 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9837 output_operand_lossage ("invalid %%xn code");
9841 /* Print the address of an operand. */
9844 print_operand_address (FILE *file, rtx x)
9846 if (GET_CODE (x) == REG)
9847 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9848 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9849 || GET_CODE (x) == LABEL_REF)
9851 output_addr_const (file, x);
9852 if (small_data_operand (x, GET_MODE (x)))
9853 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9854 reg_names[SMALL_DATA_REG]);
9855 else if (TARGET_TOC)
9858 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9860 if (REGNO (XEXP (x, 0)) == 0)
9861 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9862 reg_names[ REGNO (XEXP (x, 0)) ]);
9864 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9865 reg_names[ REGNO (XEXP (x, 1)) ]);
9867 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9868 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9869 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9871 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9872 && CONSTANT_P (XEXP (x, 1)))
9874 output_addr_const (file, XEXP (x, 1));
9875 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9879 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9880 && CONSTANT_P (XEXP (x, 1)))
9882 fprintf (file, "lo16(");
9883 output_addr_const (file, XEXP (x, 1));
9884 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9887 else if (legitimate_constant_pool_address_p (x))
9889 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9891 rtx contains_minus = XEXP (x, 1);
9895 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9896 turn it into (sym) for output_addr_const. */
9897 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9898 contains_minus = XEXP (contains_minus, 0);
9900 minus = XEXP (contains_minus, 0);
9901 symref = XEXP (minus, 0);
9902 XEXP (contains_minus, 0) = symref;
9907 name = XSTR (symref, 0);
9908 newname = alloca (strlen (name) + sizeof ("@toc"));
9909 strcpy (newname, name);
9910 strcat (newname, "@toc");
9911 XSTR (symref, 0) = newname;
9913 output_addr_const (file, XEXP (x, 1));
9915 XSTR (symref, 0) = name;
9916 XEXP (contains_minus, 0) = minus;
9919 output_addr_const (file, XEXP (x, 1));
9921 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9927 /* Target hook for assembling integer objects. The PowerPC version has
9928 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9929 is defined. It also needs to handle DI-mode objects on 64-bit
9933 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9935 #ifdef RELOCATABLE_NEEDS_FIXUP
9936 /* Special handling for SI values. */
9937 if (size == 4 && aligned_p)
9939 extern int in_toc_section (void);
9940 static int recurse = 0;
9942 /* For -mrelocatable, we mark all addresses that need to be fixed up
9943 in the .fixup section. */
9944 if (TARGET_RELOCATABLE
9945 && !in_toc_section ()
9946 && !in_text_section ()
9947 && !in_unlikely_text_section ()
9949 && GET_CODE (x) != CONST_INT
9950 && GET_CODE (x) != CONST_DOUBLE
9956 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9958 ASM_OUTPUT_LABEL (asm_out_file, buf);
9959 fprintf (asm_out_file, "\t.long\t(");
9960 output_addr_const (asm_out_file, x);
9961 fprintf (asm_out_file, ")@fixup\n");
9962 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9963 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9964 fprintf (asm_out_file, "\t.long\t");
9965 assemble_name (asm_out_file, buf);
9966 fprintf (asm_out_file, "\n\t.previous\n");
9970 /* Remove initial .'s to turn a -mcall-aixdesc function
9971 address into the address of the descriptor, not the function
9973 else if (GET_CODE (x) == SYMBOL_REF
9974 && XSTR (x, 0)[0] == '.'
9975 && DEFAULT_ABI == ABI_AIX)
9977 const char *name = XSTR (x, 0);
9978 while (*name == '.')
9981 fprintf (asm_out_file, "\t.long\t%s\n", name);
9985 #endif /* RELOCATABLE_NEEDS_FIXUP */
9986 return default_assemble_integer (x, size, aligned_p);
9989 #ifdef HAVE_GAS_HIDDEN
9990 /* Emit an assembler directive to set symbol visibility for DECL to
9994 rs6000_assemble_visibility (tree decl, int vis)
9996 /* Functions need to have their entry point symbol visibility set as
9997 well as their descriptor symbol visibility. */
9998 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
10000 static const char * const visibility_types[] = {
10001 NULL, "internal", "hidden", "protected"
10004 const char *name, *type;
10006 name = ((* targetm.strip_name_encoding)
10007 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10008 type = visibility_types[vis];
10010 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10011 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10014 default_assemble_visibility (decl, vis);
10019 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10021 /* Reversal of FP compares takes care -- an ordered compare
10022 becomes an unordered compare and vice versa. */
10023 if (mode == CCFPmode
10024 && (!flag_finite_math_only
10025 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10026 || code == UNEQ || code == LTGT))
10027 return reverse_condition_maybe_unordered (code);
10029 return reverse_condition (code);
10032 /* Generate a compare for CODE. Return a brand-new rtx that
10033 represents the result of the compare. */
10036 rs6000_generate_compare (enum rtx_code code)
10038 enum machine_mode comp_mode;
10039 rtx compare_result;
10041 if (rs6000_compare_fp_p)
10042 comp_mode = CCFPmode;
10043 else if (code == GTU || code == LTU
10044 || code == GEU || code == LEU)
10045 comp_mode = CCUNSmode;
10047 comp_mode = CCmode;
10049 /* First, the compare. */
10050 compare_result = gen_reg_rtx (comp_mode);
10052 /* SPE FP compare instructions on the GPRs. Yuck! */
10053 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10054 && rs6000_compare_fp_p)
10056 rtx cmp, or1, or2, or_result, compare_result2;
10058 /* Note: The E500 comparison instructions set the GT bit (x +
10059 1), on success. This explains the mess. */
10063 case EQ: case UNEQ: case NE: case LTGT:
10064 cmp = flag_finite_math_only
10065 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10066 rs6000_compare_op1)
10067 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10068 rs6000_compare_op1);
10070 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10071 cmp = flag_finite_math_only
10072 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10073 rs6000_compare_op1)
10074 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10075 rs6000_compare_op1);
10077 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10078 cmp = flag_finite_math_only
10079 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10080 rs6000_compare_op1)
10081 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10082 rs6000_compare_op1);
10088 /* Synthesize LE and GE from LT/GT || EQ. */
10089 if (code == LE || code == GE || code == LEU || code == GEU)
10095 case LE: code = LT; break;
10096 case GE: code = GT; break;
10097 case LEU: code = LT; break;
10098 case GEU: code = GT; break;
10102 or1 = gen_reg_rtx (SImode);
10103 or2 = gen_reg_rtx (SImode);
10104 or_result = gen_reg_rtx (CCEQmode);
10105 compare_result2 = gen_reg_rtx (CCFPmode);
10108 cmp = flag_finite_math_only
10109 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10110 rs6000_compare_op1)
10111 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10112 rs6000_compare_op1);
10115 or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
10116 or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
10118 /* OR them together. */
10119 cmp = gen_rtx_SET (VOIDmode, or_result,
10120 gen_rtx_COMPARE (CCEQmode,
10121 gen_rtx_IOR (SImode, or1, or2),
10123 compare_result = or_result;
10128 if (code == NE || code == LTGT)
10137 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
10138 gen_rtx_COMPARE (comp_mode,
10139 rs6000_compare_op0,
10140 rs6000_compare_op1)));
10142 /* Some kinds of FP comparisons need an OR operation;
10143 under flag_finite_math_only we don't bother. */
10144 if (rs6000_compare_fp_p
10145 && ! flag_finite_math_only
10146 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
10147 && (code == LE || code == GE
10148 || code == UNEQ || code == LTGT
10149 || code == UNGT || code == UNLT))
10151 enum rtx_code or1, or2;
10152 rtx or1_rtx, or2_rtx, compare2_rtx;
10153 rtx or_result = gen_reg_rtx (CCEQmode);
10157 case LE: or1 = LT; or2 = EQ; break;
10158 case GE: or1 = GT; or2 = EQ; break;
10159 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
10160 case LTGT: or1 = LT; or2 = GT; break;
10161 case UNGT: or1 = UNORDERED; or2 = GT; break;
10162 case UNLT: or1 = UNORDERED; or2 = LT; break;
10165 validate_condition_mode (or1, comp_mode);
10166 validate_condition_mode (or2, comp_mode);
10167 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
10168 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
10169 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
10170 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
10172 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
10174 compare_result = or_result;
10178 validate_condition_mode (code, GET_MODE (compare_result));
10180 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
10184 /* Emit the RTL for an sCOND pattern. */
10187 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10190 enum machine_mode op_mode;
10191 enum rtx_code cond_code;
10193 condition_rtx = rs6000_generate_compare (code);
10194 cond_code = GET_CODE (condition_rtx);
10196 if (TARGET_E500 && rs6000_compare_fp_p
10197 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10201 PUT_MODE (condition_rtx, SImode);
10202 t = XEXP (condition_rtx, 0);
10204 if (cond_code != NE && cond_code != EQ)
10207 if (cond_code == NE)
10208 emit_insn (gen_e500_flip_gt_bit (t, t));
10210 emit_insn (gen_move_from_CR_gt_bit (result, t));
10214 if (cond_code == NE
10215 || cond_code == GE || cond_code == LE
10216 || cond_code == GEU || cond_code == LEU
10217 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10219 rtx not_result = gen_reg_rtx (CCEQmode);
10220 rtx not_op, rev_cond_rtx;
10221 enum machine_mode cc_mode;
10223 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10225 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
10226 SImode, XEXP (condition_rtx, 0), const0_rtx);
10227 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10228 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10229 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10232 op_mode = GET_MODE (rs6000_compare_op0);
10233 if (op_mode == VOIDmode)
10234 op_mode = GET_MODE (rs6000_compare_op1);
10236 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10238 PUT_MODE (condition_rtx, DImode);
10239 convert_move (result, condition_rtx, 0);
10243 PUT_MODE (condition_rtx, SImode);
10244 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10248 /* Emit a branch of kind CODE to location LOC. */
10251 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10253 rtx condition_rtx, loc_ref;
10255 condition_rtx = rs6000_generate_compare (code);
10256 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10257 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10258 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10259 loc_ref, pc_rtx)));
10262 /* Return the string to output a conditional branch to LABEL, which is
10263 the operand number of the label, or -1 if the branch is really a
10264 conditional return.
10266 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10267 condition code register and its mode specifies what kind of
10268 comparison we made.
10270 REVERSED is nonzero if we should reverse the sense of the comparison.
10272 INSN is the insn. */
10275 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10277 static char string[64];
10278 enum rtx_code code = GET_CODE (op);
10279 rtx cc_reg = XEXP (op, 0);
10280 enum machine_mode mode = GET_MODE (cc_reg);
10281 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10282 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10283 int really_reversed = reversed ^ need_longbranch;
10289 validate_condition_mode (code, mode);
10291 /* Work out which way this really branches. We could use
10292 reverse_condition_maybe_unordered here always but this
10293 makes the resulting assembler clearer. */
10294 if (really_reversed)
10296 /* Reversal of FP compares takes care -- an ordered compare
10297 becomes an unordered compare and vice versa. */
10298 if (mode == CCFPmode)
10299 code = reverse_condition_maybe_unordered (code);
10301 code = reverse_condition (code);
10304 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10306 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10309 /* Opposite of GT. */
10311 else if (code == NE)
10319 /* Not all of these are actually distinct opcodes, but
10320 we distinguish them for clarity of the resulting assembler. */
10321 case NE: case LTGT:
10322 ccode = "ne"; break;
10323 case EQ: case UNEQ:
10324 ccode = "eq"; break;
10326 ccode = "ge"; break;
10327 case GT: case GTU: case UNGT:
10328 ccode = "gt"; break;
10330 ccode = "le"; break;
10331 case LT: case LTU: case UNLT:
10332 ccode = "lt"; break;
10333 case UNORDERED: ccode = "un"; break;
10334 case ORDERED: ccode = "nu"; break;
10335 case UNGE: ccode = "nl"; break;
10336 case UNLE: ccode = "ng"; break;
10341 /* Maybe we have a guess as to how likely the branch is.
10342 The old mnemonics don't have a way to specify this information. */
10344 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10345 if (note != NULL_RTX)
10347 /* PROB is the difference from 50%. */
10348 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10350 /* Only hint for highly probable/improbable branches on newer
10351 cpus as static prediction overrides processor dynamic
10352 prediction. For older cpus we may as well always hint, but
10353 assume not taken for branches that are very close to 50% as a
10354 mispredicted taken branch is more expensive than a
10355 mispredicted not-taken branch. */
10356 if (rs6000_always_hint
10357 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10359 if (abs (prob) > REG_BR_PROB_BASE / 20
10360 && ((prob > 0) ^ need_longbranch))
10368 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10370 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10372 /* We need to escape any '%' characters in the reg_names string.
10373 Assume they'd only be the first character.... */
10374 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10376 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10380 /* If the branch distance was too far, we may have to use an
10381 unconditional branch to go the distance. */
10382 if (need_longbranch)
10383 s += sprintf (s, ",$+8\n\tb %s", label);
10385 s += sprintf (s, ",%s", label);
10391 /* Return the string to flip the GT bit on a CR. */
10393 output_e500_flip_gt_bit (rtx dst, rtx src)
10395 static char string[64];
10398 if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
10399 || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
10403 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
10404 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
10406 sprintf (string, "crnot %d,%d", a, b);
10410 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
10411 operands of the last comparison is nonzero/true, FALSE_COND if it
10412 is zero/false. Return 0 if the hardware has no such operation. */
10415 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10417 enum rtx_code code = GET_CODE (op);
10418 rtx op0 = rs6000_compare_op0;
10419 rtx op1 = rs6000_compare_op1;
10420 REAL_VALUE_TYPE c1;
10421 enum machine_mode compare_mode = GET_MODE (op0);
10422 enum machine_mode result_mode = GET_MODE (dest);
10425 /* These modes should always match. */
10426 if (GET_MODE (op1) != compare_mode
10427 /* In the isel case however, we can use a compare immediate, so
10428 op1 may be a small constant. */
10429 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10431 if (GET_MODE (true_cond) != result_mode)
10433 if (GET_MODE (false_cond) != result_mode)
10436 /* First, work out if the hardware can do this at all, or
10437 if it's too slow.... */
10438 if (! rs6000_compare_fp_p)
10441 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10444 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
10445 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
10448 /* Eliminate half of the comparisons by switching operands, this
10449 makes the remaining code simpler. */
10450 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10451 || code == LTGT || code == LT || code == UNLE)
10453 code = reverse_condition_maybe_unordered (code);
10455 true_cond = false_cond;
10459 /* UNEQ and LTGT take four instructions for a comparison with zero,
10460 it'll probably be faster to use a branch here too. */
10461 if (code == UNEQ && HONOR_NANS (compare_mode))
10464 if (GET_CODE (op1) == CONST_DOUBLE)
10465 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10467 /* We're going to try to implement comparisons by performing
10468 a subtract, then comparing against zero. Unfortunately,
10469 Inf - Inf is NaN which is not zero, and so if we don't
10470 know that the operand is finite and the comparison
10471 would treat EQ different to UNORDERED, we can't do it. */
10472 if (HONOR_INFINITIES (compare_mode)
10473 && code != GT && code != UNGE
10474 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10475 /* Constructs of the form (a OP b ? a : b) are safe. */
10476 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10477 || (! rtx_equal_p (op0, true_cond)
10478 && ! rtx_equal_p (op1, true_cond))))
10480 /* At this point we know we can use fsel. */
10482 /* Reduce the comparison to a comparison against zero. */
10483 temp = gen_reg_rtx (compare_mode);
10484 emit_insn (gen_rtx_SET (VOIDmode, temp,
10485 gen_rtx_MINUS (compare_mode, op0, op1)));
10487 op1 = CONST0_RTX (compare_mode);
10489 /* If we don't care about NaNs we can reduce some of the comparisons
10490 down to faster ones. */
10491 if (! HONOR_NANS (compare_mode))
10497 true_cond = false_cond;
10510 /* Now, reduce everything down to a GE. */
10517 temp = gen_reg_rtx (compare_mode);
10518 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10523 temp = gen_reg_rtx (compare_mode);
10524 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10529 temp = gen_reg_rtx (compare_mode);
10530 emit_insn (gen_rtx_SET (VOIDmode, temp,
10531 gen_rtx_NEG (compare_mode,
10532 gen_rtx_ABS (compare_mode, op0))));
10537 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10538 temp = gen_reg_rtx (result_mode);
10539 emit_insn (gen_rtx_SET (VOIDmode, temp,
10540 gen_rtx_IF_THEN_ELSE (result_mode,
10541 gen_rtx_GE (VOIDmode,
10543 true_cond, false_cond)));
10544 false_cond = true_cond;
10547 temp = gen_reg_rtx (compare_mode);
10548 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10553 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10554 temp = gen_reg_rtx (result_mode);
10555 emit_insn (gen_rtx_SET (VOIDmode, temp,
10556 gen_rtx_IF_THEN_ELSE (result_mode,
10557 gen_rtx_GE (VOIDmode,
10559 true_cond, false_cond)));
10560 true_cond = false_cond;
10563 temp = gen_reg_rtx (compare_mode);
10564 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10572 emit_insn (gen_rtx_SET (VOIDmode, dest,
10573 gen_rtx_IF_THEN_ELSE (result_mode,
10574 gen_rtx_GE (VOIDmode,
10576 true_cond, false_cond)));
10580 /* Same as above, but for ints (isel). */
10583 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10585 rtx condition_rtx, cr;
10587 /* All isel implementations thus far are 32-bits. */
10588 if (GET_MODE (rs6000_compare_op0) != SImode)
10591 /* We still have to do the compare, because isel doesn't do a
10592 compare, it just looks at the CRx bits set by a previous compare
10594 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10595 cr = XEXP (condition_rtx, 0);
10597 if (GET_MODE (cr) == CCmode)
10598 emit_insn (gen_isel_signed (dest, condition_rtx,
10599 true_cond, false_cond, cr));
10601 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10602 true_cond, false_cond, cr));
10608 output_isel (rtx *operands)
10610 enum rtx_code code;
10612 code = GET_CODE (operands[1]);
10613 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10615 PUT_CODE (operands[1], reverse_condition (code));
10616 return "isel %0,%3,%2,%j1";
10619 return "isel %0,%2,%3,%j1";
10623 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10625 enum machine_mode mode = GET_MODE (op0);
10629 if (code == SMAX || code == SMIN)
10634 if (code == SMAX || code == UMAX)
10635 target = emit_conditional_move (dest, c, op0, op1, mode,
10636 op0, op1, mode, 0);
10638 target = emit_conditional_move (dest, c, op0, op1, mode,
10639 op1, op0, mode, 0);
10640 if (target == NULL_RTX)
10642 if (target != dest)
10643 emit_move_insn (dest, target);
10646 /* Emit instructions to move SRC to DST. Called by splitters for
10647 multi-register moves. It will emit at most one instruction for
10648 each register that is accessed; that is, it won't emit li/lis pairs
10649 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10653 rs6000_split_multireg_move (rtx dst, rtx src)
10655 /* The register number of the first register being moved. */
10657 /* The mode that is to be moved. */
10658 enum machine_mode mode;
10659 /* The mode that the move is being done in, and its size. */
10660 enum machine_mode reg_mode;
10662 /* The number of registers that will be moved. */
10665 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10666 mode = GET_MODE (dst);
10667 nregs = HARD_REGNO_NREGS (reg, mode);
10668 if (FP_REGNO_P (reg))
10670 else if (ALTIVEC_REGNO_P (reg))
10671 reg_mode = V16QImode;
10673 reg_mode = word_mode;
10674 reg_mode_size = GET_MODE_SIZE (reg_mode);
10676 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10679 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10681 /* Move register range backwards, if we might have destructive
10684 for (i = nregs - 1; i >= 0; i--)
10685 emit_insn (gen_rtx_SET (VOIDmode,
10686 simplify_gen_subreg (reg_mode, dst, mode,
10687 i * reg_mode_size),
10688 simplify_gen_subreg (reg_mode, src, mode,
10689 i * reg_mode_size)));
10695 bool used_update = false;
10697 if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10701 if (GET_CODE (XEXP (src, 0)) == PRE_INC
10702 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10705 breg = XEXP (XEXP (src, 0), 0);
10706 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
10707 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10708 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10709 emit_insn (TARGET_32BIT
10710 ? gen_addsi3 (breg, breg, delta_rtx)
10711 : gen_adddi3 (breg, breg, delta_rtx));
10712 src = gen_rtx_MEM (mode, breg);
10715 /* We have now address involving an base register only.
10716 If we use one of the registers to address memory,
10717 we have change that register last. */
10719 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10720 ? XEXP (XEXP (src, 0), 0)
10726 if (REGNO (breg) >= REGNO (dst)
10727 && REGNO (breg) < REGNO (dst) + nregs)
10728 j = REGNO (breg) - REGNO (dst);
10731 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10735 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10736 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10739 breg = XEXP (XEXP (dst, 0), 0);
10740 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10741 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10742 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10744 /* We have to update the breg before doing the store.
10745 Use store with update, if available. */
10749 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10750 emit_insn (TARGET_32BIT
10751 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10752 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10753 used_update = true;
10756 emit_insn (TARGET_32BIT
10757 ? gen_addsi3 (breg, breg, delta_rtx)
10758 : gen_adddi3 (breg, breg, delta_rtx));
10759 dst = gen_rtx_MEM (mode, breg);
10763 for (i = 0; i < nregs; i++)
10765 /* Calculate index to next subword. */
10770 /* If compiler already emited move of first word by
10771 store with update, no need to do anything. */
10772 if (j == 0 && used_update)
10775 emit_insn (gen_rtx_SET (VOIDmode,
10776 simplify_gen_subreg (reg_mode, dst, mode,
10777 j * reg_mode_size),
10778 simplify_gen_subreg (reg_mode, src, mode,
10779 j * reg_mode_size)));
10785 /* This page contains routines that are used to determine what the
10786 function prologue and epilogue code will do and write them out. */
10788 /* Return the first fixed-point register that is required to be
10789 saved. 32 if none. */
10792 first_reg_to_save (void)
10796 /* Find lowest numbered live register. */
10797 for (first_reg = 13; first_reg <= 31; first_reg++)
10798 if (regs_ever_live[first_reg]
10799 && (! call_used_regs[first_reg]
10800 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10801 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10802 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10807 && current_function_uses_pic_offset_table
10808 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10809 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10815 /* Similar, for FP regs. */
10818 first_fp_reg_to_save (void)
10822 /* Find lowest numbered live register. */
10823 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10824 if (regs_ever_live[first_reg])
10830 /* Similar, for AltiVec regs. */
10833 first_altivec_reg_to_save (void)
10837 /* Stack frame remains as is unless we are in AltiVec ABI. */
10838 if (! TARGET_ALTIVEC_ABI)
10839 return LAST_ALTIVEC_REGNO + 1;
10841 /* Find lowest numbered live register. */
10842 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10843 if (regs_ever_live[i])
10849 /* Return a 32-bit mask of the AltiVec registers we need to set in
10850 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10851 the 32-bit word is 0. */
10853 static unsigned int
10854 compute_vrsave_mask (void)
10856 unsigned int i, mask = 0;
10858 /* First, find out if we use _any_ altivec registers. */
10859 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10860 if (regs_ever_live[i])
10861 mask |= ALTIVEC_REG_BIT (i);
10866 /* Next, remove the argument registers from the set. These must
10867 be in the VRSAVE mask set by the caller, so we don't need to add
10868 them in again. More importantly, the mask we compute here is
10869 used to generate CLOBBERs in the set_vrsave insn, and we do not
10870 wish the argument registers to die. */
10871 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10872 mask &= ~ALTIVEC_REG_BIT (i);
10874 /* Similarly, remove the return value from the set. */
10877 diddle_return_value (is_altivec_return_reg, &yes);
10879 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10886 is_altivec_return_reg (rtx reg, void *xyes)
10888 bool *yes = (bool *) xyes;
10889 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10894 /* Calculate the stack information for the current function. This is
10895 complicated by having two separate calling sequences, the AIX calling
10896 sequence and the V.4 calling sequence.
10898 AIX (and Darwin/Mac OS X) stack frames look like:
10900 SP----> +---------------------------------------+
10901 | back chain to caller | 0 0
10902 +---------------------------------------+
10903 | saved CR | 4 8 (8-11)
10904 +---------------------------------------+
10906 +---------------------------------------+
10907 | reserved for compilers | 12 24
10908 +---------------------------------------+
10909 | reserved for binders | 16 32
10910 +---------------------------------------+
10911 | saved TOC pointer | 20 40
10912 +---------------------------------------+
10913 | Parameter save area (P) | 24 48
10914 +---------------------------------------+
10915 | Alloca space (A) | 24+P etc.
10916 +---------------------------------------+
10917 | Local variable space (L) | 24+P+A
10918 +---------------------------------------+
10919 | Float/int conversion temporary (X) | 24+P+A+L
10920 +---------------------------------------+
10921 | Save area for AltiVec registers (W) | 24+P+A+L+X
10922 +---------------------------------------+
10923 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10924 +---------------------------------------+
10925 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10926 +---------------------------------------+
10927 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10928 +---------------------------------------+
10929 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10930 +---------------------------------------+
10931 old SP->| back chain to caller's caller |
10932 +---------------------------------------+
10934 The required alignment for AIX configurations is two words (i.e., 8
10938 V.4 stack frames look like:
10940 SP----> +---------------------------------------+
10941 | back chain to caller | 0
10942 +---------------------------------------+
10943 | caller's saved LR | 4
10944 +---------------------------------------+
10945 | Parameter save area (P) | 8
10946 +---------------------------------------+
10947 | Alloca space (A) | 8+P
10948 +---------------------------------------+
10949 | Varargs save area (V) | 8+P+A
10950 +---------------------------------------+
10951 | Local variable space (L) | 8+P+A+V
10952 +---------------------------------------+
10953 | Float/int conversion temporary (X) | 8+P+A+V+L
10954 +---------------------------------------+
10955 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10956 +---------------------------------------+
10957 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10958 +---------------------------------------+
10959 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10960 +---------------------------------------+
10961 | SPE: area for 64-bit GP registers |
10962 +---------------------------------------+
10963 | SPE alignment padding |
10964 +---------------------------------------+
10965 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10966 +---------------------------------------+
10967 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10968 +---------------------------------------+
10969 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10970 +---------------------------------------+
10971 old SP->| back chain to caller's caller |
10972 +---------------------------------------+
10974 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10975 given. (But note below and in sysv4.h that we require only 8 and
10976 may round up the size of our stack frame anyways. The historical
10977 reason is early versions of powerpc-linux which didn't properly
10978 align the stack at program startup. A happy side-effect is that
10979 -mno-eabi libraries can be used with -meabi programs.)
10981 The EABI configuration defaults to the V.4 layout. However,
10982 the stack alignment requirements may differ. If -mno-eabi is not
10983 given, the required stack alignment is 8 bytes; if -mno-eabi is
10984 given, the required alignment is 16 bytes. (But see V.4 comment
10987 #ifndef ABI_STACK_BOUNDARY
10988 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10991 static rs6000_stack_t *
10992 rs6000_stack_info (void)
10994 static rs6000_stack_t info, zero_info;
10995 rs6000_stack_t *info_ptr = &info;
10996 int reg_size = TARGET_32BIT ? 4 : 8;
10998 HOST_WIDE_INT non_fixed_size;
11000 /* Zero all fields portably. */
11005 /* Cache value so we don't rescan instruction chain over and over. */
11006 if (cfun->machine->insn_chain_scanned_p == 0)
11008 cfun->machine->insn_chain_scanned_p = 1;
11009 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
11013 /* Select which calling sequence. */
11014 info_ptr->abi = DEFAULT_ABI;
11016 /* Calculate which registers need to be saved & save area size. */
11017 info_ptr->first_gp_reg_save = first_reg_to_save ();
11018 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
11019 even if it currently looks like we won't. */
11020 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
11021 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
11022 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
11023 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
11024 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
11026 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
11028 /* For the SPE, we have an additional upper 32-bits on each GPR.
11029 Ideally we should save the entire 64-bits only when the upper
11030 half is used in SIMD instructions. Since we only record
11031 registers live (not the size they are used in), this proves
11032 difficult because we'd have to traverse the instruction chain at
11033 the right time, taking reload into account. This is a real pain,
11034 so we opt to save the GPRs in 64-bits always if but one register
11035 gets used in 64-bits. Otherwise, all the registers in the frame
11036 get saved in 32-bits.
11038 So... since when we save all GPRs (except the SP) in 64-bits, the
11039 traditional GP save area will be empty. */
11040 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11041 info_ptr->gp_size = 0;
11043 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
11044 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
11046 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
11047 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
11048 - info_ptr->first_altivec_reg_save);
11050 /* Does this function call anything? */
11051 info_ptr->calls_p = (! current_function_is_leaf
11052 || cfun->machine->ra_needs_full_frame);
11054 /* Determine if we need to save the link register. */
11055 if (rs6000_ra_ever_killed ()
11056 || (DEFAULT_ABI == ABI_AIX
11057 && current_function_profile
11058 && !TARGET_PROFILE_KERNEL)
11059 #ifdef TARGET_RELOCATABLE
11060 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
11062 || (info_ptr->first_fp_reg_save != 64
11063 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
11064 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
11065 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
11066 || (DEFAULT_ABI == ABI_DARWIN
11068 && current_function_uses_pic_offset_table)
11069 || info_ptr->calls_p)
11071 info_ptr->lr_save_p = 1;
11072 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
11075 /* Determine if we need to save the condition code registers. */
11076 if (regs_ever_live[CR2_REGNO]
11077 || regs_ever_live[CR3_REGNO]
11078 || regs_ever_live[CR4_REGNO])
11080 info_ptr->cr_save_p = 1;
11081 if (DEFAULT_ABI == ABI_V4)
11082 info_ptr->cr_size = reg_size;
11085 /* If the current function calls __builtin_eh_return, then we need
11086 to allocate stack space for registers that will hold data for
11087 the exception handler. */
11088 if (current_function_calls_eh_return)
11091 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
11094 /* SPE saves EH registers in 64-bits. */
11095 ehrd_size = i * (TARGET_SPE_ABI
11096 && info_ptr->spe_64bit_regs_used != 0
11097 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
11102 /* Determine various sizes. */
11103 info_ptr->reg_size = reg_size;
11104 info_ptr->fixed_size = RS6000_SAVE_AREA;
11105 info_ptr->varargs_size = RS6000_VARARGS_AREA;
11106 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
11107 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
11108 TARGET_ALTIVEC ? 16 : 8);
11110 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11111 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
11113 info_ptr->spe_gp_size = 0;
11115 if (TARGET_ALTIVEC_ABI)
11116 info_ptr->vrsave_mask = compute_vrsave_mask ();
11118 info_ptr->vrsave_mask = 0;
11120 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
11121 info_ptr->vrsave_size = 4;
11123 info_ptr->vrsave_size = 0;
11125 /* Calculate the offsets. */
11126 switch (DEFAULT_ABI)
11134 info_ptr->fp_save_offset = - info_ptr->fp_size;
11135 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11137 if (TARGET_ALTIVEC_ABI)
11139 info_ptr->vrsave_save_offset
11140 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
11142 /* Align stack so vector save area is on a quadword boundary. */
11143 if (info_ptr->altivec_size != 0)
11144 info_ptr->altivec_padding_size
11145 = 16 - (-info_ptr->vrsave_save_offset % 16);
11147 info_ptr->altivec_padding_size = 0;
11149 info_ptr->altivec_save_offset
11150 = info_ptr->vrsave_save_offset
11151 - info_ptr->altivec_padding_size
11152 - info_ptr->altivec_size;
11154 /* Adjust for AltiVec case. */
11155 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
11158 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
11159 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
11160 info_ptr->lr_save_offset = 2*reg_size;
11164 info_ptr->fp_save_offset = - info_ptr->fp_size;
11165 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11166 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
11168 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11170 /* Align stack so SPE GPR save area is aligned on a
11171 double-word boundary. */
11172 if (info_ptr->spe_gp_size != 0)
11173 info_ptr->spe_padding_size
11174 = 8 - (-info_ptr->cr_save_offset % 8);
11176 info_ptr->spe_padding_size = 0;
11178 info_ptr->spe_gp_save_offset
11179 = info_ptr->cr_save_offset
11180 - info_ptr->spe_padding_size
11181 - info_ptr->spe_gp_size;
11183 /* Adjust for SPE case. */
11184 info_ptr->toc_save_offset
11185 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11187 else if (TARGET_ALTIVEC_ABI)
11189 info_ptr->vrsave_save_offset
11190 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11192 /* Align stack so vector save area is on a quadword boundary. */
11193 if (info_ptr->altivec_size != 0)
11194 info_ptr->altivec_padding_size
11195 = 16 - (-info_ptr->vrsave_save_offset % 16);
11197 info_ptr->altivec_padding_size = 0;
11199 info_ptr->altivec_save_offset
11200 = info_ptr->vrsave_save_offset
11201 - info_ptr->altivec_padding_size
11202 - info_ptr->altivec_size;
11204 /* Adjust for AltiVec case. */
11205 info_ptr->toc_save_offset
11206 = info_ptr->altivec_save_offset - info_ptr->toc_size;
11209 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
11210 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
11211 info_ptr->lr_save_offset = reg_size;
11215 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
11216 + info_ptr->gp_size
11217 + info_ptr->altivec_size
11218 + info_ptr->altivec_padding_size
11219 + info_ptr->spe_gp_size
11220 + info_ptr->spe_padding_size
11222 + info_ptr->cr_size
11223 + info_ptr->lr_size
11224 + info_ptr->vrsave_size
11225 + info_ptr->toc_size,
11226 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
11229 non_fixed_size = (info_ptr->vars_size
11230 + info_ptr->parm_size
11231 + info_ptr->save_size
11232 + info_ptr->varargs_size);
11234 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11235 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11237 /* Determine if we need to allocate any stack frame:
11239 For AIX we need to push the stack if a frame pointer is needed
11240 (because the stack might be dynamically adjusted), if we are
11241 debugging, if we make calls, or if the sum of fp_save, gp_save,
11242 and local variables are more than the space needed to save all
11243 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11244 + 18*8 = 288 (GPR13 reserved).
11246 For V.4 we don't have the stack cushion that AIX uses, but assume
11247 that the debugger can handle stackless frames. */
11249 if (info_ptr->calls_p)
11250 info_ptr->push_p = 1;
11252 else if (DEFAULT_ABI == ABI_V4)
11253 info_ptr->push_p = non_fixed_size != 0;
11255 else if (frame_pointer_needed)
11256 info_ptr->push_p = 1;
11258 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11259 info_ptr->push_p = 1;
11262 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11264 /* Zero offsets if we're not saving those registers. */
11265 if (info_ptr->fp_size == 0)
11266 info_ptr->fp_save_offset = 0;
11268 if (info_ptr->gp_size == 0)
11269 info_ptr->gp_save_offset = 0;
11271 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11272 info_ptr->altivec_save_offset = 0;
11274 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11275 info_ptr->vrsave_save_offset = 0;
11277 if (! TARGET_SPE_ABI
11278 || info_ptr->spe_64bit_regs_used == 0
11279 || info_ptr->spe_gp_size == 0)
11280 info_ptr->spe_gp_save_offset = 0;
11282 if (! info_ptr->lr_save_p)
11283 info_ptr->lr_save_offset = 0;
11285 if (! info_ptr->cr_save_p)
11286 info_ptr->cr_save_offset = 0;
11288 if (! info_ptr->toc_save_p)
11289 info_ptr->toc_save_offset = 0;
11294 /* Return true if the current function uses any GPRs in 64-bit SIMD
11298 spe_func_has_64bit_regs_p (void)
11302 /* Functions that save and restore all the call-saved registers will
11303 need to save/restore the registers in 64-bits. */
11304 if (current_function_calls_eh_return
11305 || current_function_calls_setjmp
11306 || current_function_has_nonlocal_goto)
11309 insns = get_insns ();
11311 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11317 i = PATTERN (insn);
11318 if (GET_CODE (i) == SET
11319 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11328 debug_stack_info (rs6000_stack_t *info)
11330 const char *abi_string;
11333 info = rs6000_stack_info ();
11335 fprintf (stderr, "\nStack information for function %s:\n",
11336 ((current_function_decl && DECL_NAME (current_function_decl))
11337 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11342 default: abi_string = "Unknown"; break;
11343 case ABI_NONE: abi_string = "NONE"; break;
11344 case ABI_AIX: abi_string = "AIX"; break;
11345 case ABI_DARWIN: abi_string = "Darwin"; break;
11346 case ABI_V4: abi_string = "V.4"; break;
11349 fprintf (stderr, "\tABI = %5s\n", abi_string);
11351 if (TARGET_ALTIVEC_ABI)
11352 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11354 if (TARGET_SPE_ABI)
11355 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11357 if (info->first_gp_reg_save != 32)
11358 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
11360 if (info->first_fp_reg_save != 64)
11361 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
11363 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11364 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11365 info->first_altivec_reg_save);
11367 if (info->lr_save_p)
11368 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
11370 if (info->cr_save_p)
11371 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
11373 if (info->toc_save_p)
11374 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
11376 if (info->vrsave_mask)
11377 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
11380 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
11383 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
11385 if (info->gp_save_offset)
11386 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
11388 if (info->fp_save_offset)
11389 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
11391 if (info->altivec_save_offset)
11392 fprintf (stderr, "\taltivec_save_offset = %5d\n",
11393 info->altivec_save_offset);
11395 if (info->spe_gp_save_offset)
11396 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
11397 info->spe_gp_save_offset);
11399 if (info->vrsave_save_offset)
11400 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
11401 info->vrsave_save_offset);
11403 if (info->lr_save_offset)
11404 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
11406 if (info->cr_save_offset)
11407 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
11409 if (info->toc_save_offset)
11410 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
11412 if (info->varargs_save_offset)
11413 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11415 if (info->total_size)
11416 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11419 if (info->varargs_size)
11420 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
11422 if (info->vars_size)
11423 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11426 if (info->parm_size)
11427 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
11429 if (info->fixed_size)
11430 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
11433 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
11435 if (info->spe_gp_size)
11436 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
11439 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
11441 if (info->altivec_size)
11442 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
11444 if (info->vrsave_size)
11445 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
11447 if (info->altivec_padding_size)
11448 fprintf (stderr, "\taltivec_padding_size= %5d\n",
11449 info->altivec_padding_size);
11451 if (info->spe_padding_size)
11452 fprintf (stderr, "\tspe_padding_size = %5d\n",
11453 info->spe_padding_size);
11456 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
11459 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
11461 if (info->toc_size)
11462 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
11464 if (info->save_size)
11465 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
11467 if (info->reg_size != 4)
11468 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
11470 fprintf (stderr, "\n");
11474 rs6000_return_addr (int count, rtx frame)
11476 /* Currently we don't optimize very well between prolog and body
11477 code and for PIC code the code can be actually quite bad, so
11478 don't try to be too clever here. */
11479 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11481 cfun->machine->ra_needs_full_frame = 1;
11488 plus_constant (copy_to_reg
11489 (gen_rtx_MEM (Pmode,
11490 memory_address (Pmode, frame))),
11491 RETURN_ADDRESS_OFFSET)));
11494 cfun->machine->ra_need_lr = 1;
11495 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11498 /* Say whether a function is a candidate for sibcall handling or not.
11499 We do not allow indirect calls to be optimized into sibling calls.
11500 Also, we can't do it if there are any vector parameters; there's
11501 nowhere to put the VRsave code so it works; note that functions with
11502 vector parameters are required to have a prototype, so the argument
11503 type info must be available here. (The tail recursion case can work
11504 with vector parameters, but there's no way to distinguish here.) */
11506 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11511 if (TARGET_ALTIVEC_VRSAVE)
11513 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11514 type; type = TREE_CHAIN (type))
11516 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11520 if (DEFAULT_ABI == ABI_DARWIN
11521 || (*targetm.binds_local_p) (decl))
11523 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11525 if (!lookup_attribute ("longcall", attr_list)
11526 || lookup_attribute ("shortcall", attr_list))
11534 rs6000_ra_ever_killed (void)
11540 if (current_function_is_thunk)
11543 /* regs_ever_live has LR marked as used if any sibcalls are present,
11544 but this should not force saving and restoring in the
11545 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11546 clobbers LR, so that is inappropriate. */
11548 /* Also, the prologue can generate a store into LR that
11549 doesn't really count, like this:
11552 bcl to set PIC register
11556 When we're called from the epilogue, we need to avoid counting
11557 this as a store. */
11559 push_topmost_sequence ();
11560 top = get_insns ();
11561 pop_topmost_sequence ();
11562 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11564 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11568 if (FIND_REG_INC_NOTE (insn, reg))
11570 else if (GET_CODE (insn) == CALL_INSN
11571 && !SIBLING_CALL_P (insn))
11573 else if (set_of (reg, insn) != NULL_RTX
11574 && !prologue_epilogue_contains (insn))
11581 /* Add a REG_MAYBE_DEAD note to the insn. */
11583 rs6000_maybe_dead (rtx insn)
11585 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11590 /* Emit instructions needed to load the TOC register.
11591 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11592 a constant pool; or for SVR4 -fpic. */
11595 rs6000_emit_load_toc_table (int fromprolog)
11598 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11600 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11602 rtx temp = (fromprolog
11603 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11604 : gen_reg_rtx (Pmode));
11605 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11607 rs6000_maybe_dead (insn);
11608 insn = emit_move_insn (dest, temp);
11610 rs6000_maybe_dead (insn);
11612 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11615 rtx tempLR = (fromprolog
11616 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11617 : gen_reg_rtx (Pmode));
11618 rtx temp0 = (fromprolog
11619 ? gen_rtx_REG (Pmode, 0)
11620 : gen_reg_rtx (Pmode));
11623 /* possibly create the toc section */
11624 if (! toc_initialized)
11627 function_section (current_function_decl);
11634 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11635 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11637 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11638 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11640 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11642 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11643 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11650 static int reload_toc_labelno = 0;
11652 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11654 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
11655 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11657 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
11658 emit_move_insn (dest, tempLR);
11659 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11661 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11663 rs6000_maybe_dead (insn);
11665 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11667 /* This is for AIX code running in non-PIC ELF32. */
11670 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11671 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11673 insn = emit_insn (gen_elf_high (dest, realsym));
11675 rs6000_maybe_dead (insn);
11676 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11678 rs6000_maybe_dead (insn);
11680 else if (DEFAULT_ABI == ABI_AIX)
11683 insn = emit_insn (gen_load_toc_aix_si (dest));
11685 insn = emit_insn (gen_load_toc_aix_di (dest));
11687 rs6000_maybe_dead (insn);
11693 /* Emit instructions to restore the link register after determining where
11694 its value has been stored. */
11697 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11699 rs6000_stack_t *info = rs6000_stack_info ();
11702 operands[0] = source;
11703 operands[1] = scratch;
11705 if (info->lr_save_p)
11707 rtx frame_rtx = stack_pointer_rtx;
11708 HOST_WIDE_INT sp_offset = 0;
11711 if (frame_pointer_needed
11712 || current_function_calls_alloca
11713 || info->total_size > 32767)
11715 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11716 frame_rtx = operands[1];
11718 else if (info->push_p)
11719 sp_offset = info->total_size;
11721 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11722 tmp = gen_rtx_MEM (Pmode, tmp);
11723 emit_move_insn (tmp, operands[0]);
11726 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11729 static GTY(()) int set = -1;
11732 get_TOC_alias_set (void)
11735 set = new_alias_set ();
11739 /* This returns nonzero if the current function uses the TOC. This is
11740 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11741 is generated by the ABI_V4 load_toc_* patterns. */
11748 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11751 rtx pat = PATTERN (insn);
11754 if (GET_CODE (pat) == PARALLEL)
11755 for (i = 0; i < XVECLEN (pat, 0); i++)
11757 rtx sub = XVECEXP (pat, 0, i);
11758 if (GET_CODE (sub) == USE)
11760 sub = XEXP (sub, 0);
11761 if (GET_CODE (sub) == UNSPEC
11762 && XINT (sub, 1) == UNSPEC_TOC)
11772 create_TOC_reference (rtx symbol)
11774 return gen_rtx_PLUS (Pmode,
11775 gen_rtx_REG (Pmode, TOC_REGISTER),
11776 gen_rtx_CONST (Pmode,
11777 gen_rtx_MINUS (Pmode, symbol,
11778 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11781 /* If _Unwind_* has been called from within the same module,
11782 toc register is not guaranteed to be saved to 40(1) on function
11783 entry. Save it there in that case. */
11786 rs6000_aix_emit_builtin_unwind_init (void)
11789 rtx stack_top = gen_reg_rtx (Pmode);
11790 rtx opcode_addr = gen_reg_rtx (Pmode);
11791 rtx opcode = gen_reg_rtx (SImode);
11792 rtx tocompare = gen_reg_rtx (SImode);
11793 rtx no_toc_save_needed = gen_label_rtx ();
11795 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11796 emit_move_insn (stack_top, mem);
11798 mem = gen_rtx_MEM (Pmode,
11799 gen_rtx_PLUS (Pmode, stack_top,
11800 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11801 emit_move_insn (opcode_addr, mem);
11802 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11803 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11804 : 0xE8410028, SImode));
11806 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11807 SImode, NULL_RTX, NULL_RTX,
11808 no_toc_save_needed);
11810 mem = gen_rtx_MEM (Pmode,
11811 gen_rtx_PLUS (Pmode, stack_top,
11812 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11813 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11814 emit_label (no_toc_save_needed);
11817 /* This ties together stack memory (MEM with an alias set of
11818 rs6000_sr_alias_set) and the change to the stack pointer. */
11821 rs6000_emit_stack_tie (void)
11823 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11825 set_mem_alias_set (mem, rs6000_sr_alias_set);
11826 emit_insn (gen_stack_tie (mem));
11829 /* Emit the correct code for allocating stack space, as insns.
11830 If COPY_R12, make sure a copy of the old frame is left in r12.
11831 The generated code may use hard register 0 as a temporary. */
11834 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11837 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11838 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11839 rtx todec = GEN_INT (-size);
11841 if (current_function_limit_stack)
11843 if (REG_P (stack_limit_rtx)
11844 && REGNO (stack_limit_rtx) > 1
11845 && REGNO (stack_limit_rtx) <= 31)
11847 emit_insn (TARGET_32BIT
11848 ? gen_addsi3 (tmp_reg,
11851 : gen_adddi3 (tmp_reg,
11855 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11858 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11860 && DEFAULT_ABI == ABI_V4)
11862 rtx toload = gen_rtx_CONST (VOIDmode,
11863 gen_rtx_PLUS (Pmode,
11867 emit_insn (gen_elf_high (tmp_reg, toload));
11868 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11869 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11873 warning ("stack limit expression is not supported");
11876 if (copy_r12 || ! TARGET_UPDATE)
11877 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11883 /* Need a note here so that try_split doesn't get confused. */
11884 if (get_last_insn() == NULL_RTX)
11885 emit_note (NOTE_INSN_DELETED);
11886 insn = emit_move_insn (tmp_reg, todec);
11887 try_split (PATTERN (insn), insn, 0);
11891 insn = emit_insn (TARGET_32BIT
11892 ? gen_movsi_update (stack_reg, stack_reg,
11894 : gen_movdi_update (stack_reg, stack_reg,
11895 todec, stack_reg));
11899 insn = emit_insn (TARGET_32BIT
11900 ? gen_addsi3 (stack_reg, stack_reg, todec)
11901 : gen_adddi3 (stack_reg, stack_reg, todec));
11902 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11903 gen_rtx_REG (Pmode, 12));
11906 RTX_FRAME_RELATED_P (insn) = 1;
11908 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11909 gen_rtx_SET (VOIDmode, stack_reg,
11910 gen_rtx_PLUS (Pmode, stack_reg,
11915 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11916 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11917 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11918 deduce these equivalences by itself so it wasn't necessary to hold
11919 its hand so much. */
11922 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11923 rtx reg2, rtx rreg)
11927 /* copy_rtx will not make unique copies of registers, so we need to
11928 ensure we don't have unwanted sharing here. */
11930 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11933 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11935 real = copy_rtx (PATTERN (insn));
11937 if (reg2 != NULL_RTX)
11938 real = replace_rtx (real, reg2, rreg);
11940 real = replace_rtx (real, reg,
11941 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11942 STACK_POINTER_REGNUM),
11945 /* We expect that 'real' is either a SET or a PARALLEL containing
11946 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11947 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11949 if (GET_CODE (real) == SET)
11953 temp = simplify_rtx (SET_SRC (set));
11955 SET_SRC (set) = temp;
11956 temp = simplify_rtx (SET_DEST (set));
11958 SET_DEST (set) = temp;
11959 if (GET_CODE (SET_DEST (set)) == MEM)
11961 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11963 XEXP (SET_DEST (set), 0) = temp;
11966 else if (GET_CODE (real) == PARALLEL)
11969 for (i = 0; i < XVECLEN (real, 0); i++)
11970 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11972 rtx set = XVECEXP (real, 0, i);
11974 temp = simplify_rtx (SET_SRC (set));
11976 SET_SRC (set) = temp;
11977 temp = simplify_rtx (SET_DEST (set));
11979 SET_DEST (set) = temp;
11980 if (GET_CODE (SET_DEST (set)) == MEM)
11982 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11984 XEXP (SET_DEST (set), 0) = temp;
11986 RTX_FRAME_RELATED_P (set) = 1;
11993 real = spe_synthesize_frame_save (real);
11995 RTX_FRAME_RELATED_P (insn) = 1;
11996 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12001 /* Given an SPE frame note, return a PARALLEL of SETs with the
12002 original note, plus a synthetic register save. */
12005 spe_synthesize_frame_save (rtx real)
12007 rtx synth, offset, reg, real2;
12009 if (GET_CODE (real) != SET
12010 || GET_MODE (SET_SRC (real)) != V2SImode)
12013 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
12014 frame related note. The parallel contains a set of the register
12015 being saved, and another set to a synthetic register (n+1200).
12016 This is so we can differentiate between 64-bit and 32-bit saves.
12017 Words cannot describe this nastiness. */
12019 if (GET_CODE (SET_DEST (real)) != MEM
12020 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
12021 || GET_CODE (SET_SRC (real)) != REG)
12025 (set (mem (plus (reg x) (const y)))
12028 (set (mem (plus (reg x) (const y+4)))
12032 real2 = copy_rtx (real);
12033 PUT_MODE (SET_DEST (real2), SImode);
12034 reg = SET_SRC (real2);
12035 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
12036 synth = copy_rtx (real2);
12038 if (BYTES_BIG_ENDIAN)
12040 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
12041 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
12044 reg = SET_SRC (synth);
12046 synth = replace_rtx (synth, reg,
12047 gen_rtx_REG (SImode, REGNO (reg) + 1200));
12049 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
12050 synth = replace_rtx (synth, offset,
12051 GEN_INT (INTVAL (offset)
12052 + (BYTES_BIG_ENDIAN ? 0 : 4)));
12054 RTX_FRAME_RELATED_P (synth) = 1;
12055 RTX_FRAME_RELATED_P (real2) = 1;
12056 if (BYTES_BIG_ENDIAN)
12057 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
12059 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
12064 /* Returns an insn that has a vrsave set operation with the
12065 appropriate CLOBBERs. */
12068 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
12071 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
12072 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12075 = gen_rtx_SET (VOIDmode,
12077 gen_rtx_UNSPEC_VOLATILE (SImode,
12078 gen_rtvec (2, reg, vrsave),
12083 /* We need to clobber the registers in the mask so the scheduler
12084 does not move sets to VRSAVE before sets of AltiVec registers.
12086 However, if the function receives nonlocal gotos, reload will set
12087 all call saved registers live. We will end up with:
12089 (set (reg 999) (mem))
12090 (parallel [ (set (reg vrsave) (unspec blah))
12091 (clobber (reg 999))])
12093 The clobber will cause the store into reg 999 to be dead, and
12094 flow will attempt to delete an epilogue insn. In this case, we
12095 need an unspec use/set of the register. */
12097 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12098 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12100 if (!epiloguep || call_used_regs [i])
12101 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
12102 gen_rtx_REG (V4SImode, i));
12105 rtx reg = gen_rtx_REG (V4SImode, i);
12108 = gen_rtx_SET (VOIDmode,
12110 gen_rtx_UNSPEC (V4SImode,
12111 gen_rtvec (1, reg), 27));
12115 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
12117 for (i = 0; i < nclobs; ++i)
12118 XVECEXP (insn, 0, i) = clobs[i];
12123 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
12124 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
12127 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
12128 unsigned int regno, int offset, HOST_WIDE_INT total_size)
12130 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
12131 rtx replacea, replaceb;
12133 int_rtx = GEN_INT (offset);
12135 /* Some cases that need register indexed addressing. */
12136 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
12138 && SPE_VECTOR_MODE (mode)
12139 && !SPE_CONST_OFFSET_OK (offset)))
12141 /* Whomever calls us must make sure r11 is available in the
12142 flow path of instructions in the prologue. */
12143 offset_rtx = gen_rtx_REG (Pmode, 11);
12144 emit_move_insn (offset_rtx, int_rtx);
12146 replacea = offset_rtx;
12147 replaceb = int_rtx;
12151 offset_rtx = int_rtx;
12152 replacea = NULL_RTX;
12153 replaceb = NULL_RTX;
12156 reg = gen_rtx_REG (mode, regno);
12157 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
12158 mem = gen_rtx_MEM (mode, addr);
12159 set_mem_alias_set (mem, rs6000_sr_alias_set);
12161 insn = emit_move_insn (mem, reg);
12163 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
12166 /* Emit an offset memory reference suitable for a frame store, while
12167 converting to a valid addressing mode. */
12170 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
12172 rtx int_rtx, offset_rtx;
12174 int_rtx = GEN_INT (offset);
12176 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
12178 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12179 emit_move_insn (offset_rtx, int_rtx);
12182 offset_rtx = int_rtx;
12184 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12187 /* Emit function prologue as insns. */
12190 rs6000_emit_prologue (void)
12192 rs6000_stack_t *info = rs6000_stack_info ();
12193 enum machine_mode reg_mode = Pmode;
12194 int reg_size = TARGET_32BIT ? 4 : 8;
12195 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12196 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12197 rtx frame_reg_rtx = sp_reg_rtx;
12198 rtx cr_save_rtx = NULL_RTX;
12200 int saving_FPRs_inline;
12201 int using_store_multiple;
12202 HOST_WIDE_INT sp_offset = 0;
12204 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12206 reg_mode = V2SImode;
12210 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12211 && (!TARGET_SPE_ABI
12212 || info->spe_64bit_regs_used == 0)
12213 && info->first_gp_reg_save < 31);
12214 saving_FPRs_inline = (info->first_fp_reg_save == 64
12215 || FP_SAVE_INLINE (info->first_fp_reg_save)
12216 || current_function_calls_eh_return
12217 || cfun->machine->ra_need_lr);
12219 /* For V.4, update stack before we do any saving and set back pointer. */
12221 && (DEFAULT_ABI == ABI_V4
12222 || current_function_calls_eh_return))
12224 if (info->total_size < 32767)
12225 sp_offset = info->total_size;
12227 frame_reg_rtx = frame_ptr_rtx;
12228 rs6000_emit_allocate_stack (info->total_size,
12229 (frame_reg_rtx != sp_reg_rtx
12230 && (info->cr_save_p
12232 || info->first_fp_reg_save < 64
12233 || info->first_gp_reg_save < 32
12235 if (frame_reg_rtx != sp_reg_rtx)
12236 rs6000_emit_stack_tie ();
12239 /* Save AltiVec registers if needed. */
12240 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12244 /* There should be a non inline version of this, for when we
12245 are saving lots of vector registers. */
12246 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12247 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12249 rtx areg, savereg, mem;
12252 offset = info->altivec_save_offset + sp_offset
12253 + 16 * (i - info->first_altivec_reg_save);
12255 savereg = gen_rtx_REG (V4SImode, i);
12257 areg = gen_rtx_REG (Pmode, 0);
12258 emit_move_insn (areg, GEN_INT (offset));
12260 /* AltiVec addressing mode is [reg+reg]. */
12261 mem = gen_rtx_MEM (V4SImode,
12262 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
12264 set_mem_alias_set (mem, rs6000_sr_alias_set);
12266 insn = emit_move_insn (mem, savereg);
12268 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12269 areg, GEN_INT (offset));
12273 /* VRSAVE is a bit vector representing which AltiVec registers
12274 are used. The OS uses this to determine which vector
12275 registers to save on a context switch. We need to save
12276 VRSAVE on the stack frame, add whatever AltiVec registers we
12277 used in this function, and do the corresponding magic in the
12280 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12281 && info->vrsave_mask != 0)
12283 rtx reg, mem, vrsave;
12286 /* Get VRSAVE onto a GPR. */
12287 reg = gen_rtx_REG (SImode, 12);
12288 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12290 emit_insn (gen_get_vrsave_internal (reg));
12292 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
12295 offset = info->vrsave_save_offset + sp_offset;
12297 = gen_rtx_MEM (SImode,
12298 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12299 set_mem_alias_set (mem, rs6000_sr_alias_set);
12300 insn = emit_move_insn (mem, reg);
12302 /* Include the registers in the mask. */
12303 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12305 insn = emit_insn (generate_set_vrsave (reg, info, 0));
12308 /* If we use the link register, get it into r0. */
12309 if (info->lr_save_p)
12310 emit_move_insn (gen_rtx_REG (Pmode, 0),
12311 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12313 /* If we need to save CR, put it into r12. */
12314 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12316 cr_save_rtx = gen_rtx_REG (SImode, 12);
12317 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12320 /* Do any required saving of fpr's. If only one or two to save, do
12321 it ourselves. Otherwise, call function. */
12322 if (saving_FPRs_inline)
12325 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12326 if ((regs_ever_live[info->first_fp_reg_save+i]
12327 && ! call_used_regs[info->first_fp_reg_save+i]))
12328 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12329 info->first_fp_reg_save + i,
12330 info->fp_save_offset + sp_offset + 8 * i,
12333 else if (info->first_fp_reg_save != 64)
12337 const char *alloc_rname;
12339 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12341 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12342 gen_rtx_REG (Pmode,
12343 LINK_REGISTER_REGNUM));
12344 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12345 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12346 alloc_rname = ggc_strdup (rname);
12347 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12348 gen_rtx_SYMBOL_REF (Pmode,
12350 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12352 rtx addr, reg, mem;
12353 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12354 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12355 GEN_INT (info->fp_save_offset
12356 + sp_offset + 8*i));
12357 mem = gen_rtx_MEM (DFmode, addr);
12358 set_mem_alias_set (mem, rs6000_sr_alias_set);
12360 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12362 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12363 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12364 NULL_RTX, NULL_RTX);
12367 /* Save GPRs. This is done as a PARALLEL if we are using
12368 the store-multiple instructions. */
12369 if (using_store_multiple)
12373 p = rtvec_alloc (32 - info->first_gp_reg_save);
12374 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12376 rtx addr, reg, mem;
12377 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12378 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12379 GEN_INT (info->gp_save_offset
12382 mem = gen_rtx_MEM (reg_mode, addr);
12383 set_mem_alias_set (mem, rs6000_sr_alias_set);
12385 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12387 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12388 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12389 NULL_RTX, NULL_RTX);
12394 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12395 if ((regs_ever_live[info->first_gp_reg_save+i]
12396 && ! call_used_regs[info->first_gp_reg_save+i])
12397 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12398 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12399 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12401 rtx addr, reg, mem;
12402 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12404 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12406 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12409 if (!SPE_CONST_OFFSET_OK (offset))
12411 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12412 emit_move_insn (b, GEN_INT (offset));
12415 b = GEN_INT (offset);
12417 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12418 mem = gen_rtx_MEM (V2SImode, addr);
12419 set_mem_alias_set (mem, rs6000_sr_alias_set);
12420 insn = emit_move_insn (mem, reg);
12422 if (GET_CODE (b) == CONST_INT)
12423 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12424 NULL_RTX, NULL_RTX);
12426 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12427 b, GEN_INT (offset));
12431 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12432 GEN_INT (info->gp_save_offset
12435 mem = gen_rtx_MEM (reg_mode, addr);
12436 set_mem_alias_set (mem, rs6000_sr_alias_set);
12438 insn = emit_move_insn (mem, reg);
12439 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12440 NULL_RTX, NULL_RTX);
12445 /* ??? There's no need to emit actual instructions here, but it's the
12446 easiest way to get the frame unwind information emitted. */
12447 if (current_function_calls_eh_return)
12449 unsigned int i, regno;
12451 /* In AIX ABI we need to pretend we save r2 here. */
12454 rtx addr, reg, mem;
12456 reg = gen_rtx_REG (reg_mode, 2);
12457 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12458 GEN_INT (sp_offset + 5 * reg_size));
12459 mem = gen_rtx_MEM (reg_mode, addr);
12460 set_mem_alias_set (mem, rs6000_sr_alias_set);
12462 insn = emit_move_insn (mem, reg);
12463 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12464 NULL_RTX, NULL_RTX);
12465 PATTERN (insn) = gen_blockage ();
12470 regno = EH_RETURN_DATA_REGNO (i);
12471 if (regno == INVALID_REGNUM)
12474 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12475 info->ehrd_offset + sp_offset
12476 + reg_size * (int) i,
12481 /* Save lr if we used it. */
12482 if (info->lr_save_p)
12484 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12485 GEN_INT (info->lr_save_offset + sp_offset));
12486 rtx reg = gen_rtx_REG (Pmode, 0);
12487 rtx mem = gen_rtx_MEM (Pmode, addr);
12488 /* This should not be of rs6000_sr_alias_set, because of
12489 __builtin_return_address. */
12491 insn = emit_move_insn (mem, reg);
12492 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12493 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12496 /* Save CR if we use any that must be preserved. */
12497 if (info->cr_save_p)
12499 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12500 GEN_INT (info->cr_save_offset + sp_offset));
12501 rtx mem = gen_rtx_MEM (SImode, addr);
12503 set_mem_alias_set (mem, rs6000_sr_alias_set);
12505 /* If r12 was used to hold the original sp, copy cr into r0 now
12507 if (REGNO (frame_reg_rtx) == 12)
12509 cr_save_rtx = gen_rtx_REG (SImode, 0);
12510 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12512 insn = emit_move_insn (mem, cr_save_rtx);
12514 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12515 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12516 But that's OK. All we have to do is specify that _one_ condition
12517 code register is saved in this stack slot. The thrower's epilogue
12518 will then restore all the call-saved registers.
12519 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12520 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12521 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12524 /* Update stack and set back pointer unless this is V.4,
12525 for which it was done previously. */
12527 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12528 rs6000_emit_allocate_stack (info->total_size, FALSE);
12530 /* Set frame pointer, if needed. */
12531 if (frame_pointer_needed)
12533 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12535 RTX_FRAME_RELATED_P (insn) = 1;
12538 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12539 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12540 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12541 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12543 /* If emit_load_toc_table will use the link register, we need to save
12544 it. We use R12 for this purpose because emit_load_toc_table
12545 can use register 0. This allows us to use a plain 'blr' to return
12546 from the procedure more often. */
12547 int save_LR_around_toc_setup = (TARGET_ELF
12548 && DEFAULT_ABI != ABI_AIX
12550 && ! info->lr_save_p
12551 && EXIT_BLOCK_PTR->pred != NULL);
12552 if (save_LR_around_toc_setup)
12554 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12555 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12556 rs6000_emit_load_toc_table (TRUE);
12557 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12560 rs6000_emit_load_toc_table (TRUE);
12564 if (DEFAULT_ABI == ABI_DARWIN
12565 && flag_pic && current_function_uses_pic_offset_table)
12567 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12568 const char *picbase = machopic_function_base_name ();
12569 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12571 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12573 rs6000_maybe_dead (
12574 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12575 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12580 /* Write function prologue. */
12583 rs6000_output_function_prologue (FILE *file,
12584 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12586 rs6000_stack_t *info = rs6000_stack_info ();
12588 if (TARGET_DEBUG_STACK)
12589 debug_stack_info (info);
12591 /* Write .extern for any function we will call to save and restore
12593 if (info->first_fp_reg_save < 64
12594 && !FP_SAVE_INLINE (info->first_fp_reg_save))
12595 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12596 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12597 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12598 RESTORE_FP_SUFFIX);
12600 /* Write .extern for AIX common mode routines, if needed. */
12601 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12603 fputs ("\t.extern __mulh\n", file);
12604 fputs ("\t.extern __mull\n", file);
12605 fputs ("\t.extern __divss\n", file);
12606 fputs ("\t.extern __divus\n", file);
12607 fputs ("\t.extern __quoss\n", file);
12608 fputs ("\t.extern __quous\n", file);
12609 common_mode_defined = 1;
12612 if (! HAVE_prologue)
12616 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12617 the "toplevel" insn chain. */
12618 emit_note (NOTE_INSN_DELETED);
12619 rs6000_emit_prologue ();
12620 emit_note (NOTE_INSN_DELETED);
12622 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12626 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12628 INSN_ADDRESSES_NEW (insn, addr);
12633 if (TARGET_DEBUG_STACK)
12634 debug_rtx_list (get_insns (), 100);
12635 final (get_insns (), file, FALSE, FALSE);
12639 rs6000_pic_labelno++;
12642 /* Emit function epilogue as insns.
12644 At present, dwarf2out_frame_debug_expr doesn't understand
12645 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12646 anywhere in the epilogue. Most of the insns below would in any case
12647 need special notes to explain where r11 is in relation to the stack. */
12650 rs6000_emit_epilogue (int sibcall)
12652 rs6000_stack_t *info;
12653 int restoring_FPRs_inline;
12654 int using_load_multiple;
12655 int using_mfcr_multiple;
12656 int use_backchain_to_restore_sp;
12658 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12659 rtx frame_reg_rtx = sp_reg_rtx;
12660 enum machine_mode reg_mode = Pmode;
12661 int reg_size = TARGET_32BIT ? 4 : 8;
12664 info = rs6000_stack_info ();
12666 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12668 reg_mode = V2SImode;
12672 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12673 && (!TARGET_SPE_ABI
12674 || info->spe_64bit_regs_used == 0)
12675 && info->first_gp_reg_save < 31);
12676 restoring_FPRs_inline = (sibcall
12677 || current_function_calls_eh_return
12678 || info->first_fp_reg_save == 64
12679 || FP_SAVE_INLINE (info->first_fp_reg_save));
12680 use_backchain_to_restore_sp = (frame_pointer_needed
12681 || current_function_calls_alloca
12682 || info->total_size > 32767);
12683 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12684 || rs6000_cpu == PROCESSOR_PPC603
12685 || rs6000_cpu == PROCESSOR_PPC750
12688 /* If we have a frame pointer, a call to alloca, or a large stack
12689 frame, restore the old stack pointer using the backchain. Otherwise,
12690 we know what size to update it with. */
12691 if (use_backchain_to_restore_sp)
12693 /* Under V.4, don't reset the stack pointer until after we're done
12694 loading the saved registers. */
12695 if (DEFAULT_ABI == ABI_V4)
12696 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12698 emit_move_insn (frame_reg_rtx,
12699 gen_rtx_MEM (Pmode, sp_reg_rtx));
12702 else if (info->push_p)
12704 if (DEFAULT_ABI == ABI_V4
12705 || current_function_calls_eh_return)
12706 sp_offset = info->total_size;
12709 emit_insn (TARGET_32BIT
12710 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12711 GEN_INT (info->total_size))
12712 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12713 GEN_INT (info->total_size)));
12717 /* Restore AltiVec registers if needed. */
12718 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12722 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12723 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12725 rtx addr, areg, mem;
12727 areg = gen_rtx_REG (Pmode, 0);
12729 (areg, GEN_INT (info->altivec_save_offset
12731 + 16 * (i - info->first_altivec_reg_save)));
12733 /* AltiVec addressing mode is [reg+reg]. */
12734 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12735 mem = gen_rtx_MEM (V4SImode, addr);
12736 set_mem_alias_set (mem, rs6000_sr_alias_set);
12738 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12742 /* Restore VRSAVE if needed. */
12743 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12744 && info->vrsave_mask != 0)
12746 rtx addr, mem, reg;
12748 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12749 GEN_INT (info->vrsave_save_offset + sp_offset));
12750 mem = gen_rtx_MEM (SImode, addr);
12751 set_mem_alias_set (mem, rs6000_sr_alias_set);
12752 reg = gen_rtx_REG (SImode, 12);
12753 emit_move_insn (reg, mem);
12755 emit_insn (generate_set_vrsave (reg, info, 1));
12758 /* Get the old lr if we saved it. */
12759 if (info->lr_save_p)
12761 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12762 info->lr_save_offset + sp_offset);
12764 set_mem_alias_set (mem, rs6000_sr_alias_set);
12766 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12769 /* Get the old cr if we saved it. */
12770 if (info->cr_save_p)
12772 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12773 GEN_INT (info->cr_save_offset + sp_offset));
12774 rtx mem = gen_rtx_MEM (SImode, addr);
12776 set_mem_alias_set (mem, rs6000_sr_alias_set);
12778 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12781 /* Set LR here to try to overlap restores below. */
12782 if (info->lr_save_p)
12783 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12784 gen_rtx_REG (Pmode, 0));
12786 /* Load exception handler data registers, if needed. */
12787 if (current_function_calls_eh_return)
12789 unsigned int i, regno;
12793 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12794 GEN_INT (sp_offset + 5 * reg_size));
12795 rtx mem = gen_rtx_MEM (reg_mode, addr);
12797 set_mem_alias_set (mem, rs6000_sr_alias_set);
12799 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12806 regno = EH_RETURN_DATA_REGNO (i);
12807 if (regno == INVALID_REGNUM)
12810 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12811 info->ehrd_offset + sp_offset
12812 + reg_size * (int) i);
12813 set_mem_alias_set (mem, rs6000_sr_alias_set);
12815 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12819 /* Restore GPRs. This is done as a PARALLEL if we are using
12820 the load-multiple instructions. */
12821 if (using_load_multiple)
12824 p = rtvec_alloc (32 - info->first_gp_reg_save);
12825 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12827 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12828 GEN_INT (info->gp_save_offset
12831 rtx mem = gen_rtx_MEM (reg_mode, addr);
12833 set_mem_alias_set (mem, rs6000_sr_alias_set);
12836 gen_rtx_SET (VOIDmode,
12837 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12840 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12843 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12844 if ((regs_ever_live[info->first_gp_reg_save+i]
12845 && ! call_used_regs[info->first_gp_reg_save+i])
12846 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12847 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12848 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12850 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12851 GEN_INT (info->gp_save_offset
12854 rtx mem = gen_rtx_MEM (reg_mode, addr);
12856 /* Restore 64-bit quantities for SPE. */
12857 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12859 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12862 if (!SPE_CONST_OFFSET_OK (offset))
12864 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12865 emit_move_insn (b, GEN_INT (offset));
12868 b = GEN_INT (offset);
12870 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12871 mem = gen_rtx_MEM (V2SImode, addr);
12874 set_mem_alias_set (mem, rs6000_sr_alias_set);
12876 emit_move_insn (gen_rtx_REG (reg_mode,
12877 info->first_gp_reg_save + i), mem);
12880 /* Restore fpr's if we need to do it without calling a function. */
12881 if (restoring_FPRs_inline)
12882 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12883 if ((regs_ever_live[info->first_fp_reg_save+i]
12884 && ! call_used_regs[info->first_fp_reg_save+i]))
12887 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12888 GEN_INT (info->fp_save_offset
12891 mem = gen_rtx_MEM (DFmode, addr);
12892 set_mem_alias_set (mem, rs6000_sr_alias_set);
12894 emit_move_insn (gen_rtx_REG (DFmode,
12895 info->first_fp_reg_save + i),
12899 /* If we saved cr, restore it here. Just those that were used. */
12900 if (info->cr_save_p)
12902 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12905 if (using_mfcr_multiple)
12907 for (i = 0; i < 8; i++)
12908 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12914 if (using_mfcr_multiple && count > 1)
12919 p = rtvec_alloc (count);
12922 for (i = 0; i < 8; i++)
12923 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12925 rtvec r = rtvec_alloc (2);
12926 RTVEC_ELT (r, 0) = r12_rtx;
12927 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12928 RTVEC_ELT (p, ndx) =
12929 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12930 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12933 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12938 for (i = 0; i < 8; i++)
12939 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12941 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12947 /* If this is V.4, unwind the stack pointer after all of the loads
12948 have been done. We need to emit a block here so that sched
12949 doesn't decide to move the sp change before the register restores
12950 (which may not have any obvious dependency on the stack). This
12951 doesn't hurt performance, because there is no scheduling that can
12952 be done after this point. */
12953 if (DEFAULT_ABI == ABI_V4
12954 || current_function_calls_eh_return)
12956 if (frame_reg_rtx != sp_reg_rtx)
12957 rs6000_emit_stack_tie ();
12959 if (use_backchain_to_restore_sp)
12961 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12963 else if (sp_offset != 0)
12965 emit_insn (TARGET_32BIT
12966 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12967 GEN_INT (sp_offset))
12968 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12969 GEN_INT (sp_offset)));
12973 if (current_function_calls_eh_return)
12975 rtx sa = EH_RETURN_STACKADJ_RTX;
12976 emit_insn (TARGET_32BIT
12977 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12978 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12984 if (! restoring_FPRs_inline)
12985 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12987 p = rtvec_alloc (2);
12989 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12990 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12991 gen_rtx_REG (Pmode,
12992 LINK_REGISTER_REGNUM));
12994 /* If we have to restore more than two FP registers, branch to the
12995 restore function. It will return to our caller. */
12996 if (! restoring_FPRs_inline)
13000 const char *alloc_rname;
13002 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
13003 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
13004 alloc_rname = ggc_strdup (rname);
13005 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
13006 gen_rtx_SYMBOL_REF (Pmode,
13009 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13012 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
13013 GEN_INT (info->fp_save_offset + 8*i));
13014 mem = gen_rtx_MEM (DFmode, addr);
13015 set_mem_alias_set (mem, rs6000_sr_alias_set);
13017 RTVEC_ELT (p, i+3) =
13018 gen_rtx_SET (VOIDmode,
13019 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
13024 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
13028 /* Write function epilogue. */
13031 rs6000_output_function_epilogue (FILE *file,
13032 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13034 rs6000_stack_t *info = rs6000_stack_info ();
13036 if (! HAVE_epilogue)
13038 rtx insn = get_last_insn ();
13039 /* If the last insn was a BARRIER, we don't have to write anything except
13040 the trace table. */
13041 if (GET_CODE (insn) == NOTE)
13042 insn = prev_nonnote_insn (insn);
13043 if (insn == 0 || GET_CODE (insn) != BARRIER)
13045 /* This is slightly ugly, but at least we don't have two
13046 copies of the epilogue-emitting code. */
13049 /* A NOTE_INSN_DELETED is supposed to be at the start
13050 and end of the "toplevel" insn chain. */
13051 emit_note (NOTE_INSN_DELETED);
13052 rs6000_emit_epilogue (FALSE);
13053 emit_note (NOTE_INSN_DELETED);
13055 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13059 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13061 INSN_ADDRESSES_NEW (insn, addr);
13066 if (TARGET_DEBUG_STACK)
13067 debug_rtx_list (get_insns (), 100);
13068 final (get_insns (), file, FALSE, FALSE);
13074 macho_branch_islands ();
13075 /* Mach-O doesn't support labels at the end of objects, so if
13076 it looks like we might want one, insert a NOP. */
13078 rtx insn = get_last_insn ();
13081 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
13082 insn = PREV_INSN (insn);
13086 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
13087 fputs ("\tnop\n", file);
13091 /* Output a traceback table here. See /usr/include/sys/debug.h for info
13094 We don't output a traceback table if -finhibit-size-directive was
13095 used. The documentation for -finhibit-size-directive reads
13096 ``don't output a @code{.size} assembler directive, or anything
13097 else that would cause trouble if the function is split in the
13098 middle, and the two halves are placed at locations far apart in
13099 memory.'' The traceback table has this property, since it
13100 includes the offset from the start of the function to the
13101 traceback table itself.
13103 System V.4 Powerpc's (and the embedded ABI derived from it) use a
13104 different traceback table. */
13105 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
13106 && rs6000_traceback != traceback_none)
13108 const char *fname = NULL;
13109 const char *language_string = lang_hooks.name;
13110 int fixed_parms = 0, float_parms = 0, parm_info = 0;
13112 int optional_tbtab;
13114 if (rs6000_traceback == traceback_full)
13115 optional_tbtab = 1;
13116 else if (rs6000_traceback == traceback_part)
13117 optional_tbtab = 0;
13119 optional_tbtab = !optimize_size && !TARGET_ELF;
13121 if (optional_tbtab)
13123 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
13124 while (*fname == '.') /* V.4 encodes . in the name */
13127 /* Need label immediately before tbtab, so we can compute
13128 its offset from the function start. */
13129 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13130 ASM_OUTPUT_LABEL (file, fname);
13133 /* The .tbtab pseudo-op can only be used for the first eight
13134 expressions, since it can't handle the possibly variable
13135 length fields that follow. However, if you omit the optional
13136 fields, the assembler outputs zeros for all optional fields
13137 anyways, giving each variable length field is minimum length
13138 (as defined in sys/debug.h). Thus we can not use the .tbtab
13139 pseudo-op at all. */
13141 /* An all-zero word flags the start of the tbtab, for debuggers
13142 that have to find it by searching forward from the entry
13143 point or from the current pc. */
13144 fputs ("\t.long 0\n", file);
13146 /* Tbtab format type. Use format type 0. */
13147 fputs ("\t.byte 0,", file);
13149 /* Language type. Unfortunately, there does not seem to be any
13150 official way to discover the language being compiled, so we
13151 use language_string.
13152 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
13153 Java is 13. Objective-C is 14. */
13154 if (! strcmp (language_string, "GNU C"))
13156 else if (! strcmp (language_string, "GNU F77")
13157 || ! strcmp (language_string, "GNU F95"))
13159 else if (! strcmp (language_string, "GNU Pascal"))
13161 else if (! strcmp (language_string, "GNU Ada"))
13163 else if (! strcmp (language_string, "GNU C++"))
13165 else if (! strcmp (language_string, "GNU Java"))
13167 else if (! strcmp (language_string, "GNU Objective-C"))
13171 fprintf (file, "%d,", i);
13173 /* 8 single bit fields: global linkage (not set for C extern linkage,
13174 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
13175 from start of procedure stored in tbtab, internal function, function
13176 has controlled storage, function has no toc, function uses fp,
13177 function logs/aborts fp operations. */
13178 /* Assume that fp operations are used if any fp reg must be saved. */
13179 fprintf (file, "%d,",
13180 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
13182 /* 6 bitfields: function is interrupt handler, name present in
13183 proc table, function calls alloca, on condition directives
13184 (controls stack walks, 3 bits), saves condition reg, saves
13186 /* The `function calls alloca' bit seems to be set whenever reg 31 is
13187 set up as a frame pointer, even when there is no alloca call. */
13188 fprintf (file, "%d,",
13189 ((optional_tbtab << 6)
13190 | ((optional_tbtab & frame_pointer_needed) << 5)
13191 | (info->cr_save_p << 1)
13192 | (info->lr_save_p)));
13194 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
13196 fprintf (file, "%d,",
13197 (info->push_p << 7) | (64 - info->first_fp_reg_save));
13199 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
13200 fprintf (file, "%d,", (32 - first_reg_to_save ()));
13202 if (optional_tbtab)
13204 /* Compute the parameter info from the function decl argument
13207 int next_parm_info_bit = 31;
13209 for (decl = DECL_ARGUMENTS (current_function_decl);
13210 decl; decl = TREE_CHAIN (decl))
13212 rtx parameter = DECL_INCOMING_RTL (decl);
13213 enum machine_mode mode = GET_MODE (parameter);
13215 if (GET_CODE (parameter) == REG)
13217 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
13223 if (mode == SFmode)
13225 else if (mode == DFmode || mode == TFmode)
13230 /* If only one bit will fit, don't or in this entry. */
13231 if (next_parm_info_bit > 0)
13232 parm_info |= (bits << (next_parm_info_bit - 1));
13233 next_parm_info_bit -= 2;
13237 fixed_parms += ((GET_MODE_SIZE (mode)
13238 + (UNITS_PER_WORD - 1))
13240 next_parm_info_bit -= 1;
13246 /* Number of fixed point parameters. */
13247 /* This is actually the number of words of fixed point parameters; thus
13248 an 8 byte struct counts as 2; and thus the maximum value is 8. */
13249 fprintf (file, "%d,", fixed_parms);
13251 /* 2 bitfields: number of floating point parameters (7 bits), parameters
13253 /* This is actually the number of fp registers that hold parameters;
13254 and thus the maximum value is 13. */
13255 /* Set parameters on stack bit if parameters are not in their original
13256 registers, regardless of whether they are on the stack? Xlc
13257 seems to set the bit when not optimizing. */
13258 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
13260 if (! optional_tbtab)
13263 /* Optional fields follow. Some are variable length. */
13265 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
13266 11 double float. */
13267 /* There is an entry for each parameter in a register, in the order that
13268 they occur in the parameter list. Any intervening arguments on the
13269 stack are ignored. If the list overflows a long (max possible length
13270 34 bits) then completely leave off all elements that don't fit. */
13271 /* Only emit this long if there was at least one parameter. */
13272 if (fixed_parms || float_parms)
13273 fprintf (file, "\t.long %d\n", parm_info);
13275 /* Offset from start of code to tb table. */
13276 fputs ("\t.long ", file);
13277 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13279 RS6000_OUTPUT_BASENAME (file, fname);
13281 assemble_name (file, fname);
13283 fputs ("-.", file);
13285 RS6000_OUTPUT_BASENAME (file, fname);
13287 assemble_name (file, fname);
13291 /* Interrupt handler mask. */
13292 /* Omit this long, since we never set the interrupt handler bit
13295 /* Number of CTL (controlled storage) anchors. */
13296 /* Omit this long, since the has_ctl bit is never set above. */
13298 /* Displacement into stack of each CTL anchor. */
13299 /* Omit this list of longs, because there are no CTL anchors. */
13301 /* Length of function name. */
13304 fprintf (file, "\t.short %d\n", (int) strlen (fname));
13306 /* Function name. */
13307 assemble_string (fname, strlen (fname));
13309 /* Register for alloca automatic storage; this is always reg 31.
13310 Only emit this if the alloca bit was set above. */
13311 if (frame_pointer_needed)
13312 fputs ("\t.byte 31\n", file);
13314 fputs ("\t.align 2\n", file);
13318 /* A C compound statement that outputs the assembler code for a thunk
13319 function, used to implement C++ virtual function calls with
13320 multiple inheritance. The thunk acts as a wrapper around a virtual
13321 function, adjusting the implicit object parameter before handing
13322 control off to the real function.
13324 First, emit code to add the integer DELTA to the location that
13325 contains the incoming first argument. Assume that this argument
13326 contains a pointer, and is the one used to pass the `this' pointer
13327 in C++. This is the incoming argument *before* the function
13328 prologue, e.g. `%o0' on a sparc. The addition must preserve the
13329 values of all other incoming arguments.
13331 After the addition, emit code to jump to FUNCTION, which is a
13332 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
13333 not touch the return address. Hence returning from FUNCTION will
13334 return to whoever called the current `thunk'.
13336 The effect must be as if FUNCTION had been called directly with the
13337 adjusted first argument. This macro is responsible for emitting
13338 all of the code for a thunk function; output_function_prologue()
13339 and output_function_epilogue() are not invoked.
13341 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
13342 been extracted from it.) It might possibly be useful on some
13343 targets, but probably not.
13345 If you do not define this macro, the target-independent code in the
13346 C++ frontend will generate a less efficient heavyweight thunk that
13347 calls FUNCTION instead of jumping to it. The generic approach does
13348 not support varargs. */
13351 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13352 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13355 rtx this, insn, funexp;
13357 reload_completed = 1;
13358 epilogue_completed = 1;
13359 no_new_pseudos = 1;
13361 /* Mark the end of the (empty) prologue. */
13362 emit_note (NOTE_INSN_PROLOGUE_END);
13364 /* Find the "this" pointer. If the function returns a structure,
13365 the structure return pointer is in r3. */
13366 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13367 this = gen_rtx_REG (Pmode, 4);
13369 this = gen_rtx_REG (Pmode, 3);
13371 /* Apply the constant offset, if required. */
13374 rtx delta_rtx = GEN_INT (delta);
13375 emit_insn (TARGET_32BIT
13376 ? gen_addsi3 (this, this, delta_rtx)
13377 : gen_adddi3 (this, this, delta_rtx));
13380 /* Apply the offset from the vtable, if required. */
13383 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13384 rtx tmp = gen_rtx_REG (Pmode, 12);
13386 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13387 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13389 emit_insn (TARGET_32BIT
13390 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13391 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13392 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13396 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13398 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13400 emit_insn (TARGET_32BIT
13401 ? gen_addsi3 (this, this, tmp)
13402 : gen_adddi3 (this, this, tmp));
13405 /* Generate a tail call to the target function. */
13406 if (!TREE_USED (function))
13408 assemble_external (function);
13409 TREE_USED (function) = 1;
13411 funexp = XEXP (DECL_RTL (function), 0);
13412 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13415 if (MACHOPIC_INDIRECT)
13416 funexp = machopic_indirect_call_target (funexp);
13419 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13420 generate sibcall RTL explicitly to avoid constraint abort. */
13421 insn = emit_call_insn (
13422 gen_rtx_PARALLEL (VOIDmode,
13424 gen_rtx_CALL (VOIDmode,
13425 funexp, const0_rtx),
13426 gen_rtx_USE (VOIDmode, const0_rtx),
13427 gen_rtx_USE (VOIDmode,
13428 gen_rtx_REG (SImode,
13429 LINK_REGISTER_REGNUM)),
13430 gen_rtx_RETURN (VOIDmode))));
13431 SIBLING_CALL_P (insn) = 1;
13434 /* Run just enough of rest_of_compilation to get the insns emitted.
13435 There's not really enough bulk here to make other passes such as
13436 instruction scheduling worth while. Note that use_thunk calls
13437 assemble_start_function and assemble_end_function. */
13438 insn = get_insns ();
13439 insn_locators_initialize ();
13440 shorten_branches (insn);
13441 final_start_function (insn, file, 1);
13442 final (insn, file, 1, 0);
13443 final_end_function ();
13445 reload_completed = 0;
13446 epilogue_completed = 0;
13447 no_new_pseudos = 0;
13450 /* A quick summary of the various types of 'constant-pool tables'
13453 Target Flags Name One table per
13454 AIX (none) AIX TOC object file
13455 AIX -mfull-toc AIX TOC object file
13456 AIX -mminimal-toc AIX minimal TOC translation unit
13457 SVR4/EABI (none) SVR4 SDATA object file
13458 SVR4/EABI -fpic SVR4 pic object file
13459 SVR4/EABI -fPIC SVR4 PIC translation unit
13460 SVR4/EABI -mrelocatable EABI TOC function
13461 SVR4/EABI -maix AIX TOC object file
13462 SVR4/EABI -maix -mminimal-toc
13463 AIX minimal TOC translation unit
13465 Name Reg. Set by entries contains:
13466 made by addrs? fp? sum?
13468 AIX TOC 2 crt0 as Y option option
13469 AIX minimal TOC 30 prolog gcc Y Y option
13470 SVR4 SDATA 13 crt0 gcc N Y N
13471 SVR4 pic 30 prolog ld Y not yet N
13472 SVR4 PIC 30 prolog gcc Y option option
13473 EABI TOC 30 prolog gcc Y option option
13477 /* Hash functions for the hash table. */
13480 rs6000_hash_constant (rtx k)
13482 enum rtx_code code = GET_CODE (k);
13483 enum machine_mode mode = GET_MODE (k);
13484 unsigned result = (code << 3) ^ mode;
13485 const char *format;
13488 format = GET_RTX_FORMAT (code);
13489 flen = strlen (format);
13495 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13498 if (mode != VOIDmode)
13499 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13511 for (; fidx < flen; fidx++)
13512 switch (format[fidx])
13517 const char *str = XSTR (k, fidx);
13518 len = strlen (str);
13519 result = result * 613 + len;
13520 for (i = 0; i < len; i++)
13521 result = result * 613 + (unsigned) str[i];
13526 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13530 result = result * 613 + (unsigned) XINT (k, fidx);
13533 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13534 result = result * 613 + (unsigned) XWINT (k, fidx);
13538 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13539 result = result * 613 + (unsigned) (XWINT (k, fidx)
13553 toc_hash_function (const void *hash_entry)
13555 const struct toc_hash_struct *thc =
13556 (const struct toc_hash_struct *) hash_entry;
13557 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13560 /* Compare H1 and H2 for equivalence. */
13563 toc_hash_eq (const void *h1, const void *h2)
13565 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13566 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13568 if (((const struct toc_hash_struct *) h1)->key_mode
13569 != ((const struct toc_hash_struct *) h2)->key_mode)
13572 return rtx_equal_p (r1, r2);
13575 /* These are the names given by the C++ front-end to vtables, and
13576 vtable-like objects. Ideally, this logic should not be here;
13577 instead, there should be some programmatic way of inquiring as
13578 to whether or not an object is a vtable. */
13580 #define VTABLE_NAME_P(NAME) \
13581 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13582 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13583 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13584 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
13585 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13588 rs6000_output_symbol_ref (FILE *file, rtx x)
13590 /* Currently C++ toc references to vtables can be emitted before it
13591 is decided whether the vtable is public or private. If this is
13592 the case, then the linker will eventually complain that there is
13593 a reference to an unknown section. Thus, for vtables only,
13594 we emit the TOC reference to reference the symbol and not the
13596 const char *name = XSTR (x, 0);
13598 if (VTABLE_NAME_P (name))
13600 RS6000_OUTPUT_BASENAME (file, name);
13603 assemble_name (file, name);
13606 /* Output a TOC entry. We derive the entry name from what is being
13610 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13613 const char *name = buf;
13614 const char *real_name;
13621 /* When the linker won't eliminate them, don't output duplicate
13622 TOC entries (this happens on AIX if there is any kind of TOC,
13623 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13625 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13627 struct toc_hash_struct *h;
13630 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13631 time because GGC is not initialized at that point. */
13632 if (toc_hash_table == NULL)
13633 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13634 toc_hash_eq, NULL);
13636 h = ggc_alloc (sizeof (*h));
13638 h->key_mode = mode;
13639 h->labelno = labelno;
13641 found = htab_find_slot (toc_hash_table, h, 1);
13642 if (*found == NULL)
13644 else /* This is indeed a duplicate.
13645 Set this label equal to that label. */
13647 fputs ("\t.set ", file);
13648 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13649 fprintf (file, "%d,", labelno);
13650 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13651 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13657 /* If we're going to put a double constant in the TOC, make sure it's
13658 aligned properly when strict alignment is on. */
13659 if (GET_CODE (x) == CONST_DOUBLE
13660 && STRICT_ALIGNMENT
13661 && GET_MODE_BITSIZE (mode) >= 64
13662 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13663 ASM_OUTPUT_ALIGN (file, 3);
13666 (*targetm.asm_out.internal_label) (file, "LC", labelno);
13668 /* Handle FP constants specially. Note that if we have a minimal
13669 TOC, things we put here aren't actually in the TOC, so we can allow
13671 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13673 REAL_VALUE_TYPE rv;
13676 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13677 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13681 if (TARGET_MINIMAL_TOC)
13682 fputs (DOUBLE_INT_ASM_OP, file);
13684 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13685 k[0] & 0xffffffff, k[1] & 0xffffffff,
13686 k[2] & 0xffffffff, k[3] & 0xffffffff);
13687 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13688 k[0] & 0xffffffff, k[1] & 0xffffffff,
13689 k[2] & 0xffffffff, k[3] & 0xffffffff);
13694 if (TARGET_MINIMAL_TOC)
13695 fputs ("\t.long ", file);
13697 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13698 k[0] & 0xffffffff, k[1] & 0xffffffff,
13699 k[2] & 0xffffffff, k[3] & 0xffffffff);
13700 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13701 k[0] & 0xffffffff, k[1] & 0xffffffff,
13702 k[2] & 0xffffffff, k[3] & 0xffffffff);
13706 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13708 REAL_VALUE_TYPE rv;
13711 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13712 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13716 if (TARGET_MINIMAL_TOC)
13717 fputs (DOUBLE_INT_ASM_OP, file);
13719 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13720 k[0] & 0xffffffff, k[1] & 0xffffffff);
13721 fprintf (file, "0x%lx%08lx\n",
13722 k[0] & 0xffffffff, k[1] & 0xffffffff);
13727 if (TARGET_MINIMAL_TOC)
13728 fputs ("\t.long ", file);
13730 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13731 k[0] & 0xffffffff, k[1] & 0xffffffff);
13732 fprintf (file, "0x%lx,0x%lx\n",
13733 k[0] & 0xffffffff, k[1] & 0xffffffff);
13737 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13739 REAL_VALUE_TYPE rv;
13742 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13743 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13747 if (TARGET_MINIMAL_TOC)
13748 fputs (DOUBLE_INT_ASM_OP, file);
13750 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13751 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13756 if (TARGET_MINIMAL_TOC)
13757 fputs ("\t.long ", file);
13759 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13760 fprintf (file, "0x%lx\n", l & 0xffffffff);
13764 else if (GET_MODE (x) == VOIDmode
13765 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13767 unsigned HOST_WIDE_INT low;
13768 HOST_WIDE_INT high;
13770 if (GET_CODE (x) == CONST_DOUBLE)
13772 low = CONST_DOUBLE_LOW (x);
13773 high = CONST_DOUBLE_HIGH (x);
13776 #if HOST_BITS_PER_WIDE_INT == 32
13779 high = (low & 0x80000000) ? ~0 : 0;
13783 low = INTVAL (x) & 0xffffffff;
13784 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13788 /* TOC entries are always Pmode-sized, but since this
13789 is a bigendian machine then if we're putting smaller
13790 integer constants in the TOC we have to pad them.
13791 (This is still a win over putting the constants in
13792 a separate constant pool, because then we'd have
13793 to have both a TOC entry _and_ the actual constant.)
13795 For a 32-bit target, CONST_INT values are loaded and shifted
13796 entirely within `low' and can be stored in one TOC entry. */
13798 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13799 abort ();/* It would be easy to make this work, but it doesn't now. */
13801 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13803 #if HOST_BITS_PER_WIDE_INT == 32
13804 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13805 POINTER_SIZE, &low, &high, 0);
13808 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13809 high = (HOST_WIDE_INT) low >> 32;
13816 if (TARGET_MINIMAL_TOC)
13817 fputs (DOUBLE_INT_ASM_OP, file);
13819 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13820 (long) high & 0xffffffff, (long) low & 0xffffffff);
13821 fprintf (file, "0x%lx%08lx\n",
13822 (long) high & 0xffffffff, (long) low & 0xffffffff);
13827 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13829 if (TARGET_MINIMAL_TOC)
13830 fputs ("\t.long ", file);
13832 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13833 (long) high & 0xffffffff, (long) low & 0xffffffff);
13834 fprintf (file, "0x%lx,0x%lx\n",
13835 (long) high & 0xffffffff, (long) low & 0xffffffff);
13839 if (TARGET_MINIMAL_TOC)
13840 fputs ("\t.long ", file);
13842 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13843 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13849 if (GET_CODE (x) == CONST)
13851 if (GET_CODE (XEXP (x, 0)) != PLUS)
13854 base = XEXP (XEXP (x, 0), 0);
13855 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13858 if (GET_CODE (base) == SYMBOL_REF)
13859 name = XSTR (base, 0);
13860 else if (GET_CODE (base) == LABEL_REF)
13861 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13862 else if (GET_CODE (base) == CODE_LABEL)
13863 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13867 real_name = (*targetm.strip_name_encoding) (name);
13868 if (TARGET_MINIMAL_TOC)
13869 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13872 fprintf (file, "\t.tc %s", real_name);
13875 fprintf (file, ".N%d", - offset);
13877 fprintf (file, ".P%d", offset);
13879 fputs ("[TC],", file);
13882 /* Currently C++ toc references to vtables can be emitted before it
13883 is decided whether the vtable is public or private. If this is
13884 the case, then the linker will eventually complain that there is
13885 a TOC reference to an unknown section. Thus, for vtables only,
13886 we emit the TOC reference to reference the symbol and not the
13888 if (VTABLE_NAME_P (name))
13890 RS6000_OUTPUT_BASENAME (file, name);
13892 fprintf (file, "%d", offset);
13893 else if (offset > 0)
13894 fprintf (file, "+%d", offset);
13897 output_addr_const (file, x);
13901 /* Output an assembler pseudo-op to write an ASCII string of N characters
13902 starting at P to FILE.
13904 On the RS/6000, we have to do this using the .byte operation and
13905 write out special characters outside the quoted string.
13906 Also, the assembler is broken; very long strings are truncated,
13907 so we must artificially break them up early. */
13910 output_ascii (FILE *file, const char *p, int n)
13913 int i, count_string;
13914 const char *for_string = "\t.byte \"";
13915 const char *for_decimal = "\t.byte ";
13916 const char *to_close = NULL;
13919 for (i = 0; i < n; i++)
13922 if (c >= ' ' && c < 0177)
13925 fputs (for_string, file);
13928 /* Write two quotes to get one. */
13936 for_decimal = "\"\n\t.byte ";
13940 if (count_string >= 512)
13942 fputs (to_close, file);
13944 for_string = "\t.byte \"";
13945 for_decimal = "\t.byte ";
13953 fputs (for_decimal, file);
13954 fprintf (file, "%d", c);
13956 for_string = "\n\t.byte \"";
13957 for_decimal = ", ";
13963 /* Now close the string if we have written one. Then end the line. */
13965 fputs (to_close, file);
13968 /* Generate a unique section name for FILENAME for a section type
13969 represented by SECTION_DESC. Output goes into BUF.
13971 SECTION_DESC can be any string, as long as it is different for each
13972 possible section type.
13974 We name the section in the same manner as xlc. The name begins with an
13975 underscore followed by the filename (after stripping any leading directory
13976 names) with the last period replaced by the string SECTION_DESC. If
13977 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13981 rs6000_gen_section_name (char **buf, const char *filename,
13982 const char *section_desc)
13984 const char *q, *after_last_slash, *last_period = 0;
13988 after_last_slash = filename;
13989 for (q = filename; *q; q++)
13992 after_last_slash = q + 1;
13993 else if (*q == '.')
13997 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13998 *buf = (char *) xmalloc (len);
14003 for (q = after_last_slash; *q; q++)
14005 if (q == last_period)
14007 strcpy (p, section_desc);
14008 p += strlen (section_desc);
14012 else if (ISALNUM (*q))
14016 if (last_period == 0)
14017 strcpy (p, section_desc);
14022 /* Emit profile function. */
14025 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
14027 if (TARGET_PROFILE_KERNEL)
14030 if (DEFAULT_ABI == ABI_AIX)
14032 #ifndef NO_PROFILE_COUNTERS
14033 # define NO_PROFILE_COUNTERS 0
14035 if (NO_PROFILE_COUNTERS)
14036 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
14040 const char *label_name;
14043 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
14044 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
14045 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
14047 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
14051 else if (DEFAULT_ABI == ABI_DARWIN)
14053 const char *mcount_name = RS6000_MCOUNT;
14054 int caller_addr_regno = LINK_REGISTER_REGNUM;
14056 /* Be conservative and always set this, at least for now. */
14057 current_function_uses_pic_offset_table = 1;
14060 /* For PIC code, set up a stub and collect the caller's address
14061 from r0, which is where the prologue puts it. */
14062 if (MACHOPIC_INDIRECT)
14064 mcount_name = machopic_stub_name (mcount_name);
14065 if (current_function_uses_pic_offset_table)
14066 caller_addr_regno = 0;
14069 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
14071 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
14075 /* Write function profiler code. */
14078 output_function_profiler (FILE *file, int labelno)
14083 switch (DEFAULT_ABI)
14092 warning ("no profiling of 64-bit code for this ABI");
14095 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
14096 fprintf (file, "\tmflr %s\n", reg_names[0]);
14099 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
14100 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14101 reg_names[0], save_lr, reg_names[1]);
14102 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
14103 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
14104 assemble_name (file, buf);
14105 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
14107 else if (flag_pic > 1)
14109 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14110 reg_names[0], save_lr, reg_names[1]);
14111 /* Now, we need to get the address of the label. */
14112 fputs ("\tbl 1f\n\t.long ", file);
14113 assemble_name (file, buf);
14114 fputs ("-.\n1:", file);
14115 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
14116 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
14117 reg_names[0], reg_names[11]);
14118 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
14119 reg_names[0], reg_names[0], reg_names[11]);
14123 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
14124 assemble_name (file, buf);
14125 fputs ("@ha\n", file);
14126 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14127 reg_names[0], save_lr, reg_names[1]);
14128 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
14129 assemble_name (file, buf);
14130 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
14133 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
14134 fprintf (file, "\tbl %s%s\n",
14135 RS6000_MCOUNT, flag_pic ? "@plt" : "");
14140 if (!TARGET_PROFILE_KERNEL)
14142 /* Don't do anything, done in output_profile_hook (). */
14149 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
14150 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
14152 if (cfun->static_chain_decl != NULL)
14154 asm_fprintf (file, "\tstd %s,24(%s)\n",
14155 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14156 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14157 asm_fprintf (file, "\tld %s,24(%s)\n",
14158 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14161 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14169 rs6000_use_dfa_pipeline_interface (void)
14174 /* Power4 load update and store update instructions are cracked into a
14175 load or store and an integer insn which are executed in the same cycle.
14176 Branches have their own dispatch slot which does not count against the
14177 GCC issue rate, but it changes the program flow so there are no other
14178 instructions to issue in this cycle. */
14181 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
14182 int verbose ATTRIBUTE_UNUSED,
14183 rtx insn, int more)
14185 if (GET_CODE (PATTERN (insn)) == USE
14186 || GET_CODE (PATTERN (insn)) == CLOBBER)
14189 if (rs6000_sched_groups)
14191 if (is_microcoded_insn (insn))
14193 else if (is_cracked_insn (insn))
14194 return more > 2 ? more - 2 : 0;
14200 /* Adjust the cost of a scheduling dependency. Return the new cost of
14201 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
14204 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
14207 if (! recog_memoized (insn))
14210 if (REG_NOTE_KIND (link) != 0)
14213 if (REG_NOTE_KIND (link) == 0)
14215 /* Data dependency; DEP_INSN writes a register that INSN reads
14216 some cycles later. */
14217 switch (get_attr_type (insn))
14220 /* Tell the first scheduling pass about the latency between
14221 a mtctr and bctr (and mtlr and br/blr). The first
14222 scheduling pass will not know about this latency since
14223 the mtctr instruction, which has the latency associated
14224 to it, will be generated by reload. */
14225 return TARGET_POWER ? 5 : 4;
14227 /* Leave some extra cycles between a compare and its
14228 dependent branch, to inhibit expensive mispredicts. */
14229 if ((rs6000_cpu_attr == CPU_PPC603
14230 || rs6000_cpu_attr == CPU_PPC604
14231 || rs6000_cpu_attr == CPU_PPC604E
14232 || rs6000_cpu_attr == CPU_PPC620
14233 || rs6000_cpu_attr == CPU_PPC630
14234 || rs6000_cpu_attr == CPU_PPC750
14235 || rs6000_cpu_attr == CPU_PPC7400
14236 || rs6000_cpu_attr == CPU_PPC7450
14237 || rs6000_cpu_attr == CPU_POWER4
14238 || rs6000_cpu_attr == CPU_POWER5)
14239 && recog_memoized (dep_insn)
14240 && (INSN_CODE (dep_insn) >= 0)
14241 && (get_attr_type (dep_insn) == TYPE_CMP
14242 || get_attr_type (dep_insn) == TYPE_COMPARE
14243 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
14244 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
14245 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
14246 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
14247 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
14248 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
14253 /* Fall out to return default cost. */
14259 /* The function returns a true if INSN is microcoded.
14260 Return false otherwise. */
14263 is_microcoded_insn (rtx insn)
14265 if (!insn || !INSN_P (insn)
14266 || GET_CODE (PATTERN (insn)) == USE
14267 || GET_CODE (PATTERN (insn)) == CLOBBER)
14270 if (rs6000_sched_groups)
14272 enum attr_type type = get_attr_type (insn);
14273 if (type == TYPE_LOAD_EXT_U
14274 || type == TYPE_LOAD_EXT_UX
14275 || type == TYPE_LOAD_UX
14276 || type == TYPE_STORE_UX
14277 || type == TYPE_MFCR)
14284 /* The function returns a nonzero value if INSN can be scheduled only
14285 as the first insn in a dispatch group ("dispatch-slot restricted").
14286 In this case, the returned value indicates how many dispatch slots
14287 the insn occupies (at the beginning of the group).
14288 Return 0 otherwise. */
14291 is_dispatch_slot_restricted (rtx insn)
14293 enum attr_type type;
14295 if (!rs6000_sched_groups)
14299 || insn == NULL_RTX
14300 || GET_CODE (insn) == NOTE
14301 || GET_CODE (PATTERN (insn)) == USE
14302 || GET_CODE (PATTERN (insn)) == CLOBBER)
14305 type = get_attr_type (insn);
14312 case TYPE_DELAYED_CR:
14313 case TYPE_CR_LOGICAL:
14321 if (rs6000_cpu == PROCESSOR_POWER5
14322 && is_cracked_insn (insn))
14328 /* The function returns true if INSN is cracked into 2 instructions
14329 by the processor (and therefore occupies 2 issue slots). */
14332 is_cracked_insn (rtx insn)
14334 if (!insn || !INSN_P (insn)
14335 || GET_CODE (PATTERN (insn)) == USE
14336 || GET_CODE (PATTERN (insn)) == CLOBBER)
14339 if (rs6000_sched_groups)
14341 enum attr_type type = get_attr_type (insn);
14342 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14343 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14344 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14345 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14346 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14347 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14348 || type == TYPE_IDIV || type == TYPE_LDIV
14349 || type == TYPE_INSERT_WORD)
14356 /* The function returns true if INSN can be issued only from
14357 the branch slot. */
14360 is_branch_slot_insn (rtx insn)
14362 if (!insn || !INSN_P (insn)
14363 || GET_CODE (PATTERN (insn)) == USE
14364 || GET_CODE (PATTERN (insn)) == CLOBBER)
14367 if (rs6000_sched_groups)
14369 enum attr_type type = get_attr_type (insn);
14370 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14378 /* A C statement (sans semicolon) to update the integer scheduling
14379 priority INSN_PRIORITY (INSN). Increase the priority to execute the
14380 INSN earlier, reduce the priority to execute INSN later. Do not
14381 define this macro if you do not need to adjust the scheduling
14382 priorities of insns. */
14385 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14387 /* On machines (like the 750) which have asymmetric integer units,
14388 where one integer unit can do multiply and divides and the other
14389 can't, reduce the priority of multiply/divide so it is scheduled
14390 before other integer operations. */
14393 if (! INSN_P (insn))
14396 if (GET_CODE (PATTERN (insn)) == USE)
14399 switch (rs6000_cpu_attr) {
14401 switch (get_attr_type (insn))
14408 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14409 priority, priority);
14410 if (priority >= 0 && priority < 0x01000000)
14417 if (is_dispatch_slot_restricted (insn)
14418 && reload_completed
14419 && current_sched_info->sched_max_insns_priority
14420 && rs6000_sched_restricted_insns_priority)
14423 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
14424 if (rs6000_sched_restricted_insns_priority == 1)
14425 /* Attach highest priority to insn. This means that in
14426 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14427 precede 'priority' (critical path) considerations. */
14428 return current_sched_info->sched_max_insns_priority;
14429 else if (rs6000_sched_restricted_insns_priority == 2)
14430 /* Increase priority of insn by a minimal amount. This means that in
14431 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14432 precede dispatch-slot restriction considerations. */
14433 return (priority + 1);
14439 /* Return how many instructions the machine can issue per cycle. */
14442 rs6000_issue_rate (void)
14444 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
14445 if (!reload_completed)
14448 switch (rs6000_cpu_attr) {
14449 case CPU_RIOS1: /* ? */
14451 case CPU_PPC601: /* ? */
14474 /* Return how many instructions to look ahead for better insn
14478 rs6000_use_sched_lookahead (void)
14480 if (rs6000_cpu_attr == CPU_PPC8540)
14485 /* Determine is PAT refers to memory. */
14488 is_mem_ref (rtx pat)
14494 if (GET_CODE (pat) == MEM)
14497 /* Recursively process the pattern. */
14498 fmt = GET_RTX_FORMAT (GET_CODE (pat));
14500 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14503 ret |= is_mem_ref (XEXP (pat, i));
14504 else if (fmt[i] == 'E')
14505 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14506 ret |= is_mem_ref (XVECEXP (pat, i, j));
14512 /* Determine if PAT is a PATTERN of a load insn. */
14515 is_load_insn1 (rtx pat)
14517 if (!pat || pat == NULL_RTX)
14520 if (GET_CODE (pat) == SET)
14521 return is_mem_ref (SET_SRC (pat));
14523 if (GET_CODE (pat) == PARALLEL)
14527 for (i = 0; i < XVECLEN (pat, 0); i++)
14528 if (is_load_insn1 (XVECEXP (pat, 0, i)))
14535 /* Determine if INSN loads from memory. */
14538 is_load_insn (rtx insn)
14540 if (!insn || !INSN_P (insn))
14543 if (GET_CODE (insn) == CALL_INSN)
14546 return is_load_insn1 (PATTERN (insn));
14549 /* Determine if PAT is a PATTERN of a store insn. */
14552 is_store_insn1 (rtx pat)
14554 if (!pat || pat == NULL_RTX)
14557 if (GET_CODE (pat) == SET)
14558 return is_mem_ref (SET_DEST (pat));
14560 if (GET_CODE (pat) == PARALLEL)
14564 for (i = 0; i < XVECLEN (pat, 0); i++)
14565 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14572 /* Determine if INSN stores to memory. */
14575 is_store_insn (rtx insn)
14577 if (!insn || !INSN_P (insn))
14580 return is_store_insn1 (PATTERN (insn));
14583 /* Returns whether the dependence between INSN and NEXT is considered
14584 costly by the given target. */
14587 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14589 /* If the flag is not enbled - no dependence is considered costly;
14590 allow all dependent insns in the same group.
14591 This is the most aggressive option. */
14592 if (rs6000_sched_costly_dep == no_dep_costly)
14595 /* If the flag is set to 1 - a dependence is always considered costly;
14596 do not allow dependent instructions in the same group.
14597 This is the most conservative option. */
14598 if (rs6000_sched_costly_dep == all_deps_costly)
14601 if (rs6000_sched_costly_dep == store_to_load_dep_costly
14602 && is_load_insn (next)
14603 && is_store_insn (insn))
14604 /* Prevent load after store in the same group. */
14607 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14608 && is_load_insn (next)
14609 && is_store_insn (insn)
14610 && (!link || (int) REG_NOTE_KIND (link) == 0))
14611 /* Prevent load after store in the same group if it is a true dependence. */
14614 /* The flag is set to X; dependences with latency >= X are considered costly,
14615 and will not be scheduled in the same group. */
14616 if (rs6000_sched_costly_dep <= max_dep_latency
14617 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14623 /* Return the next insn after INSN that is found before TAIL is reached,
14624 skipping any "non-active" insns - insns that will not actually occupy
14625 an issue slot. Return NULL_RTX if such an insn is not found. */
14628 get_next_active_insn (rtx insn, rtx tail)
14632 if (!insn || insn == tail)
14635 next_insn = NEXT_INSN (insn);
14638 && next_insn != tail
14639 && (GET_CODE(next_insn) == NOTE
14640 || GET_CODE (PATTERN (next_insn)) == USE
14641 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14643 next_insn = NEXT_INSN (next_insn);
14646 if (!next_insn || next_insn == tail)
14652 /* Return whether the presence of INSN causes a dispatch group termination
14653 of group WHICH_GROUP.
14655 If WHICH_GROUP == current_group, this function will return true if INSN
14656 causes the termination of the current group (i.e, the dispatch group to
14657 which INSN belongs). This means that INSN will be the last insn in the
14658 group it belongs to.
14660 If WHICH_GROUP == previous_group, this function will return true if INSN
14661 causes the termination of the previous group (i.e, the dispatch group that
14662 precedes the group to which INSN belongs). This means that INSN will be
14663 the first insn in the group it belongs to). */
14666 insn_terminates_group_p (rtx insn, enum group_termination which_group)
14668 enum attr_type type;
14673 type = get_attr_type (insn);
14675 if (is_microcoded_insn (insn))
14678 if (which_group == current_group)
14680 if (is_branch_slot_insn (insn))
14684 else if (which_group == previous_group)
14686 if (is_dispatch_slot_restricted (insn))
14694 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14695 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14698 is_costly_group (rtx *group_insns, rtx next_insn)
14703 int issue_rate = rs6000_issue_rate ();
14705 for (i = 0; i < issue_rate; i++)
14707 rtx insn = group_insns[i];
14710 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14712 rtx next = XEXP (link, 0);
14713 if (next == next_insn)
14715 cost = insn_cost (insn, link, next_insn);
14716 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14725 /* Utility of the function redefine_groups.
14726 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14727 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14728 to keep it "far" (in a separate group) from GROUP_INSNS, following
14729 one of the following schemes, depending on the value of the flag
14730 -minsert_sched_nops = X:
14731 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14732 in order to force NEXT_INSN into a separate group.
14733 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14734 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14735 insertion (has a group just ended, how many vacant issue slots remain in the
14736 last group, and how many dispatch groups were encountered so far). */
14739 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14740 bool *group_end, int can_issue_more, int *group_count)
14744 int issue_rate = rs6000_issue_rate ();
14745 bool end = *group_end;
14748 if (next_insn == NULL_RTX)
14749 return can_issue_more;
14751 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14752 return can_issue_more;
14754 force = is_costly_group (group_insns, next_insn);
14756 return can_issue_more;
14758 if (sched_verbose > 6)
14759 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14760 *group_count ,can_issue_more);
14762 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14765 can_issue_more = 0;
14767 /* Since only a branch can be issued in the last issue_slot, it is
14768 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14769 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14770 in this case the last nop will start a new group and the branch will be
14771 forced to the new group. */
14772 if (can_issue_more && !is_branch_slot_insn (next_insn))
14775 while (can_issue_more > 0)
14778 emit_insn_before (nop, next_insn);
14786 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14788 int n_nops = rs6000_sched_insert_nops;
14790 /* Nops can't be issued from the branch slot, so the effective
14791 issue_rate for nops is 'issue_rate - 1'. */
14792 if (can_issue_more == 0)
14793 can_issue_more = issue_rate;
14795 if (can_issue_more == 0)
14797 can_issue_more = issue_rate - 1;
14800 for (i = 0; i < issue_rate; i++)
14802 group_insns[i] = 0;
14809 emit_insn_before (nop, next_insn);
14810 if (can_issue_more == issue_rate - 1) /* new group begins */
14813 if (can_issue_more == 0)
14815 can_issue_more = issue_rate - 1;
14818 for (i = 0; i < issue_rate; i++)
14820 group_insns[i] = 0;
14826 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14829 *group_end = /* Is next_insn going to start a new group? */
14831 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14832 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14833 || (can_issue_more < issue_rate &&
14834 insn_terminates_group_p (next_insn, previous_group)));
14835 if (*group_end && end)
14838 if (sched_verbose > 6)
14839 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14840 *group_count, can_issue_more);
14841 return can_issue_more;
14844 return can_issue_more;
14847 /* This function tries to synch the dispatch groups that the compiler "sees"
14848 with the dispatch groups that the processor dispatcher is expected to
14849 form in practice. It tries to achieve this synchronization by forcing the
14850 estimated processor grouping on the compiler (as opposed to the function
14851 'pad_goups' which tries to force the scheduler's grouping on the processor).
14853 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14854 examines the (estimated) dispatch groups that will be formed by the processor
14855 dispatcher. It marks these group boundaries to reflect the estimated
14856 processor grouping, overriding the grouping that the scheduler had marked.
14857 Depending on the value of the flag '-minsert-sched-nops' this function can
14858 force certain insns into separate groups or force a certain distance between
14859 them by inserting nops, for example, if there exists a "costly dependence"
14862 The function estimates the group boundaries that the processor will form as
14863 folllows: It keeps track of how many vacant issue slots are available after
14864 each insn. A subsequent insn will start a new group if one of the following
14866 - no more vacant issue slots remain in the current dispatch group.
14867 - only the last issue slot, which is the branch slot, is vacant, but the next
14868 insn is not a branch.
14869 - only the last 2 or less issue slots, including the branch slot, are vacant,
14870 which means that a cracked insn (which occupies two issue slots) can't be
14871 issued in this group.
14872 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14873 start a new group. */
14876 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14878 rtx insn, next_insn;
14880 int can_issue_more;
14883 int group_count = 0;
14887 issue_rate = rs6000_issue_rate ();
14888 group_insns = alloca (issue_rate * sizeof (rtx));
14889 for (i = 0; i < issue_rate; i++)
14891 group_insns[i] = 0;
14893 can_issue_more = issue_rate;
14895 insn = get_next_active_insn (prev_head_insn, tail);
14898 while (insn != NULL_RTX)
14900 slot = (issue_rate - can_issue_more);
14901 group_insns[slot] = insn;
14903 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14904 if (insn_terminates_group_p (insn, current_group))
14905 can_issue_more = 0;
14907 next_insn = get_next_active_insn (insn, tail);
14908 if (next_insn == NULL_RTX)
14909 return group_count + 1;
14911 group_end = /* Is next_insn going to start a new group? */
14912 (can_issue_more == 0
14913 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14914 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14915 || (can_issue_more < issue_rate &&
14916 insn_terminates_group_p (next_insn, previous_group)));
14918 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14919 next_insn, &group_end, can_issue_more, &group_count);
14924 can_issue_more = 0;
14925 for (i = 0; i < issue_rate; i++)
14927 group_insns[i] = 0;
14931 if (GET_MODE (next_insn) == TImode && can_issue_more)
14932 PUT_MODE(next_insn, VOIDmode);
14933 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14934 PUT_MODE (next_insn, TImode);
14937 if (can_issue_more == 0)
14938 can_issue_more = issue_rate;
14941 return group_count;
14944 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14945 dispatch group boundaries that the scheduler had marked. Pad with nops
14946 any dispatch groups which have vacant issue slots, in order to force the
14947 scheduler's grouping on the processor dispatcher. The function
14948 returns the number of dispatch groups found. */
14951 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14953 rtx insn, next_insn;
14956 int can_issue_more;
14958 int group_count = 0;
14960 /* Initialize issue_rate. */
14961 issue_rate = rs6000_issue_rate ();
14962 can_issue_more = issue_rate;
14964 insn = get_next_active_insn (prev_head_insn, tail);
14965 next_insn = get_next_active_insn (insn, tail);
14967 while (insn != NULL_RTX)
14970 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14972 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14974 if (next_insn == NULL_RTX)
14979 /* If the scheduler had marked group termination at this location
14980 (between insn and next_indn), and neither insn nor next_insn will
14981 force group termination, pad the group with nops to force group
14984 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14985 && !insn_terminates_group_p (insn, current_group)
14986 && !insn_terminates_group_p (next_insn, previous_group))
14988 if (!is_branch_slot_insn(next_insn))
14991 while (can_issue_more)
14994 emit_insn_before (nop, next_insn);
14999 can_issue_more = issue_rate;
15004 next_insn = get_next_active_insn (insn, tail);
15007 return group_count;
15010 /* The following function is called at the end of scheduling BB.
15011 After reload, it inserts nops at insn group bundling. */
15014 rs6000_sched_finish (FILE *dump, int sched_verbose)
15019 fprintf (dump, "=== Finishing schedule.\n");
15021 if (reload_completed && rs6000_sched_groups)
15023 if (rs6000_sched_insert_nops == sched_finish_none)
15026 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
15027 n_groups = pad_groups (dump, sched_verbose,
15028 current_sched_info->prev_head,
15029 current_sched_info->next_tail);
15031 n_groups = redefine_groups (dump, sched_verbose,
15032 current_sched_info->prev_head,
15033 current_sched_info->next_tail);
15035 if (sched_verbose >= 6)
15037 fprintf (dump, "ngroups = %d\n", n_groups);
15038 print_rtl (dump, current_sched_info->prev_head);
15039 fprintf (dump, "Done finish_sched\n");
15044 /* Length in units of the trampoline for entering a nested function. */
15047 rs6000_trampoline_size (void)
15051 switch (DEFAULT_ABI)
15057 ret = (TARGET_32BIT) ? 12 : 24;
15062 ret = (TARGET_32BIT) ? 40 : 48;
15069 /* Emit RTL insns to initialize the variable parts of a trampoline.
15070 FNADDR is an RTX for the address of the function's pure code.
15071 CXT is an RTX for the static chain value for the function. */
15074 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
15076 enum machine_mode pmode = Pmode;
15077 int regsize = (TARGET_32BIT) ? 4 : 8;
15078 rtx ctx_reg = force_reg (pmode, cxt);
15080 switch (DEFAULT_ABI)
15085 /* Macros to shorten the code expansions below. */
15086 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
15087 #define MEM_PLUS(addr,offset) \
15088 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
15090 /* Under AIX, just build the 3 word function descriptor */
15093 rtx fn_reg = gen_reg_rtx (pmode);
15094 rtx toc_reg = gen_reg_rtx (pmode);
15095 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
15096 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
15097 emit_move_insn (MEM_DEREF (addr), fn_reg);
15098 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
15099 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
15103 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
15106 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
15107 FALSE, VOIDmode, 4,
15109 GEN_INT (rs6000_trampoline_size ()), SImode,
15119 /* Table of valid machine attributes. */
15121 const struct attribute_spec rs6000_attribute_table[] =
15123 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
15124 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
15125 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
15126 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
15127 { NULL, 0, 0, false, false, false, NULL }
15130 /* Handle the "altivec" attribute. The attribute may have
15131 arguments as follows:
15133 __attribute__((altivec(vector__)))
15134 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
15135 __attribute__((altivec(bool__))) (always followed by 'unsigned')
15137 and may appear more than once (e.g., 'vector bool char') in a
15138 given declaration. */
15141 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
15142 int flags ATTRIBUTE_UNUSED,
15143 bool *no_add_attrs)
15145 tree type = *node, result = NULL_TREE;
15146 enum machine_mode mode;
15149 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
15150 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
15151 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
15154 while (POINTER_TYPE_P (type)
15155 || TREE_CODE (type) == FUNCTION_TYPE
15156 || TREE_CODE (type) == METHOD_TYPE
15157 || TREE_CODE (type) == ARRAY_TYPE)
15158 type = TREE_TYPE (type);
15160 mode = TYPE_MODE (type);
15162 if (rs6000_warn_altivec_long
15163 && (type == long_unsigned_type_node || type == long_integer_type_node))
15164 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
15166 switch (altivec_type)
15169 unsigned_p = TYPE_UNSIGNED (type);
15173 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
15176 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
15179 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
15181 case SFmode: result = V4SF_type_node; break;
15182 /* If the user says 'vector int bool', we may be handed the 'bool'
15183 attribute _before_ the 'vector' attribute, and so select the proper
15184 type in the 'b' case below. */
15185 case V4SImode: case V8HImode: case V16QImode: result = type;
15192 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
15193 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
15194 case QImode: case V16QImode: result = bool_V16QI_type_node;
15201 case V8HImode: result = pixel_V8HI_type_node;
15207 if (result && result != type && TYPE_READONLY (type))
15208 result = build_qualified_type (result, TYPE_QUAL_CONST);
15210 *no_add_attrs = true; /* No need to hang on to the attribute. */
15213 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
15215 *node = reconstruct_complex_type (*node, result);
15220 /* AltiVec defines four built-in scalar types that serve as vector
15221 elements; we must teach the compiler how to mangle them. */
15223 static const char *
15224 rs6000_mangle_fundamental_type (tree type)
15226 if (type == bool_char_type_node) return "U6__boolc";
15227 if (type == bool_short_type_node) return "U6__bools";
15228 if (type == pixel_type_node) return "u7__pixel";
15229 if (type == bool_int_type_node) return "U6__booli";
15231 /* For all other types, use normal C++ mangling. */
15235 /* Handle a "longcall" or "shortcall" attribute; arguments as in
15236 struct attribute_spec.handler. */
15239 rs6000_handle_longcall_attribute (tree *node, tree name,
15240 tree args ATTRIBUTE_UNUSED,
15241 int flags ATTRIBUTE_UNUSED,
15242 bool *no_add_attrs)
15244 if (TREE_CODE (*node) != FUNCTION_TYPE
15245 && TREE_CODE (*node) != FIELD_DECL
15246 && TREE_CODE (*node) != TYPE_DECL)
15248 warning ("`%s' attribute only applies to functions",
15249 IDENTIFIER_POINTER (name));
15250 *no_add_attrs = true;
15256 /* Set longcall attributes on all functions declared when
15257 rs6000_default_long_calls is true. */
15259 rs6000_set_default_type_attributes (tree type)
15261 if (rs6000_default_long_calls
15262 && (TREE_CODE (type) == FUNCTION_TYPE
15263 || TREE_CODE (type) == METHOD_TYPE))
15264 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
15266 TYPE_ATTRIBUTES (type));
15269 /* Return a reference suitable for calling a function with the
15270 longcall attribute. */
15273 rs6000_longcall_ref (rtx call_ref)
15275 const char *call_name;
15278 if (GET_CODE (call_ref) != SYMBOL_REF)
15281 /* System V adds '.' to the internal name, so skip them. */
15282 call_name = XSTR (call_ref, 0);
15283 if (*call_name == '.')
15285 while (*call_name == '.')
15288 node = get_identifier (call_name);
15289 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
15292 return force_reg (Pmode, call_ref);
15295 #ifdef USING_ELFOS_H
15297 /* A C statement or statements to switch to the appropriate section
15298 for output of RTX in mode MODE. You can assume that RTX is some
15299 kind of constant in RTL. The argument MODE is redundant except in
15300 the case of a `const_int' rtx. Select the section by calling
15301 `text_section' or one of the alternatives for other sections.
15303 Do not define this macro if you put all constants in the read-only
15307 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
15308 unsigned HOST_WIDE_INT align)
15310 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15313 default_elf_select_rtx_section (mode, x, align);
15316 /* A C statement or statements to switch to the appropriate
15317 section for output of DECL. DECL is either a `VAR_DECL' node
15318 or a constant of some sort. RELOC indicates whether forming
15319 the initial value of DECL requires link-time relocations. */
15322 rs6000_elf_select_section (tree decl, int reloc,
15323 unsigned HOST_WIDE_INT align)
15325 /* Pretend that we're always building for a shared library when
15326 ABI_AIX, because otherwise we end up with dynamic relocations
15327 in read-only sections. This happens for function pointers,
15328 references to vtables in typeinfo, and probably other cases. */
15329 default_elf_select_section_1 (decl, reloc, align,
15330 flag_pic || DEFAULT_ABI == ABI_AIX);
15333 /* A C statement to build up a unique section name, expressed as a
15334 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15335 RELOC indicates whether the initial value of EXP requires
15336 link-time relocations. If you do not define this macro, GCC will use
15337 the symbol name prefixed by `.' as the section name. Note - this
15338 macro can now be called for uninitialized data items as well as
15339 initialized data and functions. */
15342 rs6000_elf_unique_section (tree decl, int reloc)
15344 /* As above, pretend that we're always building for a shared library
15345 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
15346 default_unique_section_1 (decl, reloc,
15347 flag_pic || DEFAULT_ABI == ABI_AIX);
15350 /* For a SYMBOL_REF, set generic flags and then perform some
15351 target-specific processing.
15353 When the AIX ABI is requested on a non-AIX system, replace the
15354 function name with the real name (with a leading .) rather than the
15355 function descriptor name. This saves a lot of overriding code to
15356 read the prefixes. */
15359 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15361 default_encode_section_info (decl, rtl, first);
15364 && TREE_CODE (decl) == FUNCTION_DECL
15366 && DEFAULT_ABI == ABI_AIX)
15368 rtx sym_ref = XEXP (rtl, 0);
15369 size_t len = strlen (XSTR (sym_ref, 0));
15370 char *str = alloca (len + 2);
15372 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15373 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15378 rs6000_elf_in_small_data_p (tree decl)
15380 if (rs6000_sdata == SDATA_NONE)
15383 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15385 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15386 if (strcmp (section, ".sdata") == 0
15387 || strcmp (section, ".sdata2") == 0
15388 || strcmp (section, ".sbss") == 0
15389 || strcmp (section, ".sbss2") == 0
15390 || strcmp (section, ".PPC.EMB.sdata0") == 0
15391 || strcmp (section, ".PPC.EMB.sbss0") == 0)
15396 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15399 && (unsigned HOST_WIDE_INT) size <= g_switch_value
15400 /* If it's not public, and we're not going to reference it there,
15401 there's no need to put it in the small data section. */
15402 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15409 #endif /* USING_ELFOS_H */
15412 /* Return a REG that occurs in ADDR with coefficient 1.
15413 ADDR can be effectively incremented by incrementing REG.
15415 r0 is special and we must not select it as an address
15416 register by this routine since our caller will try to
15417 increment the returned register via an "la" instruction. */
15420 find_addr_reg (rtx addr)
15422 while (GET_CODE (addr) == PLUS)
15424 if (GET_CODE (XEXP (addr, 0)) == REG
15425 && REGNO (XEXP (addr, 0)) != 0)
15426 addr = XEXP (addr, 0);
15427 else if (GET_CODE (XEXP (addr, 1)) == REG
15428 && REGNO (XEXP (addr, 1)) != 0)
15429 addr = XEXP (addr, 1);
15430 else if (CONSTANT_P (XEXP (addr, 0)))
15431 addr = XEXP (addr, 1);
15432 else if (CONSTANT_P (XEXP (addr, 1)))
15433 addr = XEXP (addr, 0);
15437 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15443 rs6000_fatal_bad_address (rtx op)
15445 fatal_insn ("bad address", op);
15451 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
15452 reference and a constant. */
15455 symbolic_operand (rtx op)
15457 switch (GET_CODE (op))
15464 return (GET_CODE (op) == SYMBOL_REF ||
15465 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15466 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15467 && GET_CODE (XEXP (op, 1)) == CONST_INT);
15476 static tree branch_island_list = 0;
15478 /* Remember to generate a branch island for far calls to the given
15482 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15484 tree branch_island = build_tree_list (function_name, label_name);
15485 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15486 TREE_CHAIN (branch_island) = branch_island_list;
15487 branch_island_list = branch_island;
15490 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15491 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15492 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15493 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15495 /* Generate far-jump branch islands for everything on the
15496 branch_island_list. Invoked immediately after the last instruction
15497 of the epilogue has been emitted; the branch-islands must be
15498 appended to, and contiguous with, the function body. Mach-O stubs
15499 are generated in machopic_output_stub(). */
15502 macho_branch_islands (void)
15505 tree branch_island;
15507 for (branch_island = branch_island_list;
15509 branch_island = TREE_CHAIN (branch_island))
15511 const char *label =
15512 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15514 darwin_strip_name_encoding (
15515 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15516 char name_buf[512];
15517 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
15518 if (name[0] == '*' || name[0] == '&')
15519 strcpy (name_buf, name+1);
15523 strcpy (name_buf+1, name);
15525 strcpy (tmp_buf, "\n");
15526 strcat (tmp_buf, label);
15527 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15528 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15529 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15530 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15531 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15534 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15535 strcat (tmp_buf, label);
15536 strcat (tmp_buf, "_pic\n");
15537 strcat (tmp_buf, label);
15538 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15540 strcat (tmp_buf, "\taddis r11,r11,ha16(");
15541 strcat (tmp_buf, name_buf);
15542 strcat (tmp_buf, " - ");
15543 strcat (tmp_buf, label);
15544 strcat (tmp_buf, "_pic)\n");
15546 strcat (tmp_buf, "\tmtlr r0\n");
15548 strcat (tmp_buf, "\taddi r12,r11,lo16(");
15549 strcat (tmp_buf, name_buf);
15550 strcat (tmp_buf, " - ");
15551 strcat (tmp_buf, label);
15552 strcat (tmp_buf, "_pic)\n");
15554 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15558 strcat (tmp_buf, ":\nlis r12,hi16(");
15559 strcat (tmp_buf, name_buf);
15560 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15561 strcat (tmp_buf, name_buf);
15562 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15564 output_asm_insn (tmp_buf, 0);
15565 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15566 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15567 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15568 BRANCH_ISLAND_LINE_NUMBER (branch_island));
15569 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15572 branch_island_list = 0;
15575 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
15576 already there or not. */
15579 no_previous_def (tree function_name)
15581 tree branch_island;
15582 for (branch_island = branch_island_list;
15584 branch_island = TREE_CHAIN (branch_island))
15585 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15590 /* GET_PREV_LABEL gets the label name from the previous definition of
15594 get_prev_label (tree function_name)
15596 tree branch_island;
15597 for (branch_island = branch_island_list;
15599 branch_island = TREE_CHAIN (branch_island))
15600 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15601 return BRANCH_ISLAND_LABEL_NAME (branch_island);
15605 /* INSN is either a function call or a millicode call. It may have an
15606 unconditional jump in its delay slot.
15608 CALL_DEST is the routine we are calling. */
15611 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15613 static char buf[256];
15614 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15615 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15618 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15620 if (no_previous_def (funname))
15622 int line_number = 0;
15623 rtx label_rtx = gen_label_rtx ();
15624 char *label_buf, temp_buf[256];
15625 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15626 CODE_LABEL_NUMBER (label_rtx));
15627 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15628 labelname = get_identifier (label_buf);
15629 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15631 line_number = NOTE_LINE_NUMBER (insn);
15632 add_compiler_branch_island (labelname, funname, line_number);
15635 labelname = get_prev_label (funname);
15637 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15638 instruction will reach 'foo', otherwise link as 'bl L42'".
15639 "L42" should be a 'branch island', that will do a far jump to
15640 'foo'. Branch islands are generated in
15641 macho_branch_islands(). */
15642 sprintf (buf, "jbsr %%z%d,%.246s",
15643 dest_operand_number, IDENTIFIER_POINTER (labelname));
15646 sprintf (buf, "bl %%z%d", dest_operand_number);
15650 #endif /* TARGET_MACHO */
15652 /* Generate PIC and indirect symbol stubs. */
15655 machopic_output_stub (FILE *file, const char *symb, const char *stub)
15657 unsigned int length;
15658 char *symbol_name, *lazy_ptr_name;
15659 char *local_label_0;
15660 static int label = 0;
15662 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
15663 symb = (*targetm.strip_name_encoding) (symb);
15666 length = strlen (symb);
15667 symbol_name = alloca (length + 32);
15668 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15670 lazy_ptr_name = alloca (length + 32);
15671 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15674 machopic_picsymbol_stub1_section ();
15676 machopic_symbol_stub1_section ();
15677 fprintf (file, "\t.align 2\n");
15679 fprintf (file, "%s:\n", stub);
15680 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15685 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15686 sprintf (local_label_0, "\"L%011d$spb\"", label);
15688 fprintf (file, "\tmflr r0\n");
15689 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15690 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15691 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15692 lazy_ptr_name, local_label_0);
15693 fprintf (file, "\tmtlr r0\n");
15694 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15695 lazy_ptr_name, local_label_0);
15696 fprintf (file, "\tmtctr r12\n");
15697 fprintf (file, "\tbctr\n");
15701 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15702 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15703 fprintf (file, "\tmtctr r12\n");
15704 fprintf (file, "\tbctr\n");
15707 machopic_lazy_symbol_ptr_section ();
15708 fprintf (file, "%s:\n", lazy_ptr_name);
15709 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15710 fprintf (file, "\t.long dyld_stub_binding_helper\n");
15713 /* Legitimize PIC addresses. If the address is already
15714 position-independent, we return ORIG. Newly generated
15715 position-independent addresses go into a reg. This is REG if non
15716 zero, otherwise we allocate register(s) as necessary. */
15718 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15721 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15726 if (reg == NULL && ! reload_in_progress && ! reload_completed)
15727 reg = gen_reg_rtx (Pmode);
15729 if (GET_CODE (orig) == CONST)
15731 if (GET_CODE (XEXP (orig, 0)) == PLUS
15732 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15735 if (GET_CODE (XEXP (orig, 0)) == PLUS)
15737 /* Use a different reg for the intermediate value, as
15738 it will be marked UNCHANGING. */
15739 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15742 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15745 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15751 if (GET_CODE (offset) == CONST_INT)
15753 if (SMALL_INT (offset))
15754 return plus_constant (base, INTVAL (offset));
15755 else if (! reload_in_progress && ! reload_completed)
15756 offset = force_reg (Pmode, offset);
15759 rtx mem = force_const_mem (Pmode, orig);
15760 return machopic_legitimize_pic_address (mem, Pmode, reg);
15763 return gen_rtx_PLUS (Pmode, base, offset);
15766 /* Fall back on generic machopic code. */
15767 return machopic_legitimize_pic_address (orig, mode, reg);
15770 /* This is just a placeholder to make linking work without having to
15771 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15772 ever needed for Darwin (not too likely!) this would have to get a
15773 real definition. */
15780 #endif /* TARGET_MACHO */
15783 static unsigned int
15784 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15786 return default_section_type_flags_1 (decl, name, reloc,
15787 flag_pic || DEFAULT_ABI == ABI_AIX);
15790 /* Record an element in the table of global constructors. SYMBOL is
15791 a SYMBOL_REF of the function to be called; PRIORITY is a number
15792 between 0 and MAX_INIT_PRIORITY.
15794 This differs from default_named_section_asm_out_constructor in
15795 that we have special handling for -mrelocatable. */
15798 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15800 const char *section = ".ctors";
15803 if (priority != DEFAULT_INIT_PRIORITY)
15805 sprintf (buf, ".ctors.%.5u",
15806 /* Invert the numbering so the linker puts us in the proper
15807 order; constructors are run from right to left, and the
15808 linker sorts in increasing order. */
15809 MAX_INIT_PRIORITY - priority);
15813 named_section_flags (section, SECTION_WRITE);
15814 assemble_align (POINTER_SIZE);
15816 if (TARGET_RELOCATABLE)
15818 fputs ("\t.long (", asm_out_file);
15819 output_addr_const (asm_out_file, symbol);
15820 fputs (")@fixup\n", asm_out_file);
15823 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15827 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15829 const char *section = ".dtors";
15832 if (priority != DEFAULT_INIT_PRIORITY)
15834 sprintf (buf, ".dtors.%.5u",
15835 /* Invert the numbering so the linker puts us in the proper
15836 order; constructors are run from right to left, and the
15837 linker sorts in increasing order. */
15838 MAX_INIT_PRIORITY - priority);
15842 named_section_flags (section, SECTION_WRITE);
15843 assemble_align (POINTER_SIZE);
15845 if (TARGET_RELOCATABLE)
15847 fputs ("\t.long (", asm_out_file);
15848 output_addr_const (asm_out_file, symbol);
15849 fputs (")@fixup\n", asm_out_file);
15852 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15856 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15860 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15861 ASM_OUTPUT_LABEL (file, name);
15862 fputs (DOUBLE_INT_ASM_OP, file);
15864 assemble_name (file, name);
15865 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15866 assemble_name (file, name);
15867 fputs (",24\n\t.type\t.", file);
15868 assemble_name (file, name);
15869 fputs (",@function\n", file);
15870 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15872 fputs ("\t.globl\t.", file);
15873 assemble_name (file, name);
15876 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15878 ASM_OUTPUT_LABEL (file, name);
15882 if (TARGET_RELOCATABLE
15883 && (get_pool_size () != 0 || current_function_profile)
15888 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15890 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15891 fprintf (file, "\t.long ");
15892 assemble_name (file, buf);
15894 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15895 assemble_name (file, buf);
15899 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15900 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15902 if (DEFAULT_ABI == ABI_AIX)
15904 const char *desc_name, *orig_name;
15906 orig_name = (*targetm.strip_name_encoding) (name);
15907 desc_name = orig_name;
15908 while (*desc_name == '.')
15911 if (TREE_PUBLIC (decl))
15912 fprintf (file, "\t.globl %s\n", desc_name);
15914 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15915 fprintf (file, "%s:\n", desc_name);
15916 fprintf (file, "\t.long %s\n", orig_name);
15917 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15918 if (DEFAULT_ABI == ABI_AIX)
15919 fputs ("\t.long 0\n", file);
15920 fprintf (file, "\t.previous\n");
15922 ASM_OUTPUT_LABEL (file, name);
15928 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15930 fputs (GLOBAL_ASM_OP, stream);
15931 RS6000_OUTPUT_BASENAME (stream, name);
15932 putc ('\n', stream);
15936 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15939 static const char * const suffix[3] = { "PR", "RO", "RW" };
15941 if (flags & SECTION_CODE)
15943 else if (flags & SECTION_WRITE)
15948 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15949 (flags & SECTION_CODE) ? "." : "",
15950 name, suffix[smclass], flags & SECTION_ENTSIZE);
15954 rs6000_xcoff_select_section (tree decl, int reloc,
15955 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15957 if (decl_readonly_section_1 (decl, reloc, 1))
15959 if (TREE_PUBLIC (decl))
15960 read_only_data_section ();
15962 read_only_private_data_section ();
15966 if (TREE_PUBLIC (decl))
15969 private_data_section ();
15974 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15978 /* Use select_section for private and uninitialized data. */
15979 if (!TREE_PUBLIC (decl)
15980 || DECL_COMMON (decl)
15981 || DECL_INITIAL (decl) == NULL_TREE
15982 || DECL_INITIAL (decl) == error_mark_node
15983 || (flag_zero_initialized_in_bss
15984 && initializer_zerop (DECL_INITIAL (decl))))
15987 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15988 name = (*targetm.strip_name_encoding) (name);
15989 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15992 /* Select section for constant in constant pool.
15994 On RS/6000, all constants are in the private read-only data area.
15995 However, if this is being placed in the TOC it must be output as a
15999 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
16000 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
16002 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16005 read_only_private_data_section ();
16008 /* Remove any trailing [DS] or the like from the symbol name. */
16010 static const char *
16011 rs6000_xcoff_strip_name_encoding (const char *name)
16016 len = strlen (name);
16017 if (name[len - 1] == ']')
16018 return ggc_alloc_string (name, len - 4);
16023 /* Section attributes. AIX is always PIC. */
16025 static unsigned int
16026 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
16028 unsigned int align;
16029 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
16031 /* Align to at least UNIT size. */
16032 if (flags & SECTION_CODE)
16033 align = MIN_UNITS_PER_WORD;
16035 /* Increase alignment of large objects if not already stricter. */
16036 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
16037 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
16038 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
16040 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
16043 /* Output at beginning of assembler file.
16045 Initialize the section names for the RS/6000 at this point.
16047 Specify filename, including full path, to assembler.
16049 We want to go into the TOC section so at least one .toc will be emitted.
16050 Also, in order to output proper .bs/.es pairs, we need at least one static
16051 [RW] section emitted.
16053 Finally, declare mcount when profiling to make the assembler happy. */
16056 rs6000_xcoff_file_start (void)
16058 rs6000_gen_section_name (&xcoff_bss_section_name,
16059 main_input_filename, ".bss_");
16060 rs6000_gen_section_name (&xcoff_private_data_section_name,
16061 main_input_filename, ".rw_");
16062 rs6000_gen_section_name (&xcoff_read_only_section_name,
16063 main_input_filename, ".ro_");
16065 fputs ("\t.file\t", asm_out_file);
16066 output_quoted_string (asm_out_file, main_input_filename);
16067 fputc ('\n', asm_out_file);
16069 if (write_symbols != NO_DEBUG)
16070 private_data_section ();
16073 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
16074 rs6000_file_start ();
16077 /* Output at end of assembler file.
16078 On the RS/6000, referencing data should automatically pull in text. */
16081 rs6000_xcoff_file_end (void)
16084 fputs ("_section_.text:\n", asm_out_file);
16086 fputs (TARGET_32BIT
16087 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
16090 #endif /* TARGET_XCOFF */
16093 /* Cross-module name binding. Darwin does not support overriding
16094 functions at dynamic-link time. */
16097 rs6000_binds_local_p (tree decl)
16099 return default_binds_local_p_1 (decl, 0);
16103 /* Compute a (partial) cost for rtx X. Return true if the complete
16104 cost has been computed, and false if subexpressions should be
16105 scanned. In either case, *TOTAL contains the cost result. */
16108 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
16113 /* On the RS/6000, if it is valid in the insn, it is free.
16114 So this always returns 0. */
16125 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
16126 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
16127 + 0x8000) >= 0x10000)
16128 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
16129 ? COSTS_N_INSNS (2)
16130 : COSTS_N_INSNS (1));
16136 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
16137 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
16138 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
16139 ? COSTS_N_INSNS (2)
16140 : COSTS_N_INSNS (1));
16146 *total = COSTS_N_INSNS (2);
16149 switch (rs6000_cpu)
16151 case PROCESSOR_RIOS1:
16152 case PROCESSOR_PPC405:
16153 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16154 ? COSTS_N_INSNS (5)
16155 : (INTVAL (XEXP (x, 1)) >= -256
16156 && INTVAL (XEXP (x, 1)) <= 255)
16157 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16160 case PROCESSOR_PPC440:
16161 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16162 ? COSTS_N_INSNS (3)
16163 : COSTS_N_INSNS (2));
16166 case PROCESSOR_RS64A:
16167 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16168 ? GET_MODE (XEXP (x, 1)) != DImode
16169 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
16170 : (INTVAL (XEXP (x, 1)) >= -256
16171 && INTVAL (XEXP (x, 1)) <= 255)
16172 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
16175 case PROCESSOR_RIOS2:
16176 case PROCESSOR_MPCCORE:
16177 case PROCESSOR_PPC604e:
16178 *total = COSTS_N_INSNS (2);
16181 case PROCESSOR_PPC601:
16182 *total = COSTS_N_INSNS (5);
16185 case PROCESSOR_PPC603:
16186 case PROCESSOR_PPC7400:
16187 case PROCESSOR_PPC750:
16188 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16189 ? COSTS_N_INSNS (5)
16190 : (INTVAL (XEXP (x, 1)) >= -256
16191 && INTVAL (XEXP (x, 1)) <= 255)
16192 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
16195 case PROCESSOR_PPC7450:
16196 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16197 ? COSTS_N_INSNS (4)
16198 : COSTS_N_INSNS (3));
16201 case PROCESSOR_PPC403:
16202 case PROCESSOR_PPC604:
16203 case PROCESSOR_PPC8540:
16204 *total = COSTS_N_INSNS (4);
16207 case PROCESSOR_PPC620:
16208 case PROCESSOR_PPC630:
16209 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16210 ? GET_MODE (XEXP (x, 1)) != DImode
16211 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
16212 : (INTVAL (XEXP (x, 1)) >= -256
16213 && INTVAL (XEXP (x, 1)) <= 255)
16214 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
16217 case PROCESSOR_POWER4:
16218 case PROCESSOR_POWER5:
16219 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
16220 ? GET_MODE (XEXP (x, 1)) != DImode
16221 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
16222 : COSTS_N_INSNS (2));
16231 if (GET_CODE (XEXP (x, 1)) == CONST_INT
16232 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
16234 *total = COSTS_N_INSNS (2);
16241 switch (rs6000_cpu)
16243 case PROCESSOR_RIOS1:
16244 *total = COSTS_N_INSNS (19);
16247 case PROCESSOR_RIOS2:
16248 *total = COSTS_N_INSNS (13);
16251 case PROCESSOR_RS64A:
16252 *total = (GET_MODE (XEXP (x, 1)) != DImode
16253 ? COSTS_N_INSNS (65)
16254 : COSTS_N_INSNS (67));
16257 case PROCESSOR_MPCCORE:
16258 *total = COSTS_N_INSNS (6);
16261 case PROCESSOR_PPC403:
16262 *total = COSTS_N_INSNS (33);
16265 case PROCESSOR_PPC405:
16266 *total = COSTS_N_INSNS (35);
16269 case PROCESSOR_PPC440:
16270 *total = COSTS_N_INSNS (34);
16273 case PROCESSOR_PPC601:
16274 *total = COSTS_N_INSNS (36);
16277 case PROCESSOR_PPC603:
16278 *total = COSTS_N_INSNS (37);
16281 case PROCESSOR_PPC604:
16282 case PROCESSOR_PPC604e:
16283 *total = COSTS_N_INSNS (20);
16286 case PROCESSOR_PPC620:
16287 case PROCESSOR_PPC630:
16288 *total = (GET_MODE (XEXP (x, 1)) != DImode
16289 ? COSTS_N_INSNS (21)
16290 : COSTS_N_INSNS (37));
16293 case PROCESSOR_PPC750:
16294 case PROCESSOR_PPC8540:
16295 case PROCESSOR_PPC7400:
16296 *total = COSTS_N_INSNS (19);
16299 case PROCESSOR_PPC7450:
16300 *total = COSTS_N_INSNS (23);
16303 case PROCESSOR_POWER4:
16304 case PROCESSOR_POWER5:
16305 *total = (GET_MODE (XEXP (x, 1)) != DImode
16306 ? COSTS_N_INSNS (18)
16307 : COSTS_N_INSNS (34));
16315 *total = COSTS_N_INSNS (4);
16319 /* MEM should be slightly more expensive than (plus (reg) (const)). */
16328 /* A C expression returning the cost of moving data from a register of class
16329 CLASS1 to one of CLASS2. */
16332 rs6000_register_move_cost (enum machine_mode mode,
16333 enum reg_class from, enum reg_class to)
16335 /* Moves from/to GENERAL_REGS. */
16336 if (reg_classes_intersect_p (to, GENERAL_REGS)
16337 || reg_classes_intersect_p (from, GENERAL_REGS))
16339 if (! reg_classes_intersect_p (to, GENERAL_REGS))
16342 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
16343 return (rs6000_memory_move_cost (mode, from, 0)
16344 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
16346 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
16347 else if (from == CR_REGS)
16351 /* A move will cost one instruction per GPR moved. */
16352 return 2 * HARD_REGNO_NREGS (0, mode);
16355 /* Moving between two similar registers is just one instruction. */
16356 else if (reg_classes_intersect_p (to, from))
16357 return mode == TFmode ? 4 : 2;
16359 /* Everything else has to go through GENERAL_REGS. */
16361 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16362 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16365 /* A C expressions returning the cost of moving data of MODE from a register to
16369 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16370 int in ATTRIBUTE_UNUSED)
16372 if (reg_classes_intersect_p (class, GENERAL_REGS))
16373 return 4 * HARD_REGNO_NREGS (0, mode);
16374 else if (reg_classes_intersect_p (class, FLOAT_REGS))
16375 return 4 * HARD_REGNO_NREGS (32, mode);
16376 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16377 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16379 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16382 /* Return an RTX representing where to find the function value of a
16383 function returning MODE. */
16385 rs6000_complex_function_value (enum machine_mode mode)
16387 unsigned int regno;
16389 enum machine_mode inner = GET_MODE_INNER (mode);
16390 unsigned int inner_bytes = GET_MODE_SIZE (inner);
16392 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
16393 regno = FP_ARG_RETURN;
16396 regno = GP_ARG_RETURN;
16398 /* 32-bit is OK since it'll go in r3/r4. */
16399 if (TARGET_32BIT && inner_bytes >= 4)
16400 return gen_rtx_REG (mode, regno);
16403 if (inner_bytes >= 8)
16404 return gen_rtx_REG (mode, regno);
16406 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16408 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16409 GEN_INT (inner_bytes));
16410 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16413 /* Define how to find the value returned by a function.
16414 VALTYPE is the data type of the value (as a tree).
16415 If the precise function being called is known, FUNC is its FUNCTION_DECL;
16416 otherwise, FUNC is 0.
16418 On the SPE, both FPs and vectors are returned in r3.
16420 On RS/6000 an integer value is in r3 and a floating-point value is in
16421 fp1, unless -msoft-float. */
16424 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16426 enum machine_mode mode;
16427 unsigned int regno;
16429 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16431 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
16432 return gen_rtx_PARALLEL (DImode,
16434 gen_rtx_EXPR_LIST (VOIDmode,
16435 gen_rtx_REG (SImode, GP_ARG_RETURN),
16437 gen_rtx_EXPR_LIST (VOIDmode,
16438 gen_rtx_REG (SImode,
16439 GP_ARG_RETURN + 1),
16443 if ((INTEGRAL_TYPE_P (valtype)
16444 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16445 || POINTER_TYPE_P (valtype))
16446 mode = TARGET_32BIT ? SImode : DImode;
16448 mode = TYPE_MODE (valtype);
16450 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
16451 regno = FP_ARG_RETURN;
16452 else if (TREE_CODE (valtype) == COMPLEX_TYPE
16453 && targetm.calls.split_complex_arg)
16454 return rs6000_complex_function_value (mode);
16455 else if (TREE_CODE (valtype) == VECTOR_TYPE
16456 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16457 regno = ALTIVEC_ARG_RETURN;
16459 regno = GP_ARG_RETURN;
16461 return gen_rtx_REG (mode, regno);
16464 /* Define how to find the value returned by a library function
16465 assuming the value has mode MODE. */
16467 rs6000_libcall_value (enum machine_mode mode)
16469 unsigned int regno;
16471 if (GET_MODE_CLASS (mode) == MODE_FLOAT
16472 && TARGET_HARD_FLOAT && TARGET_FPRS)
16473 regno = FP_ARG_RETURN;
16474 else if (ALTIVEC_VECTOR_MODE (mode)
16475 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
16476 regno = ALTIVEC_ARG_RETURN;
16477 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16478 return rs6000_complex_function_value (mode);
16480 regno = GP_ARG_RETURN;
16482 return gen_rtx_REG (mode, regno);
16485 /* Define the offset between two registers, FROM to be eliminated and its
16486 replacement TO, at the start of a routine. */
16488 rs6000_initial_elimination_offset (int from, int to)
16490 rs6000_stack_t *info = rs6000_stack_info ();
16491 HOST_WIDE_INT offset;
16493 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16494 offset = info->push_p ? 0 : -info->total_size;
16495 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16496 offset = info->total_size;
16497 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16498 offset = info->push_p ? info->total_size : 0;
16499 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16507 /* Return true if TYPE is of type __ev64_opaque__. */
16510 is_ev64_opaque_type (tree type)
16513 && (type == opaque_V2SI_type_node
16514 || type == opaque_V2SF_type_node
16515 || type == opaque_p_V2SI_type_node));
16519 rs6000_dwarf_register_span (rtx reg)
16523 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16526 regno = REGNO (reg);
16528 /* The duality of the SPE register size wreaks all kinds of havoc.
16529 This is a way of distinguishing r0 in 32-bits from r0 in
16532 gen_rtx_PARALLEL (VOIDmode,
16535 gen_rtx_REG (SImode, regno + 1200),
16536 gen_rtx_REG (SImode, regno))
16538 gen_rtx_REG (SImode, regno),
16539 gen_rtx_REG (SImode, regno + 1200)));
16542 /* Map internal gcc register numbers to DWARF2 register numbers. */
16545 rs6000_dbx_register_number (unsigned int regno)
16547 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16549 if (regno == MQ_REGNO)
16551 if (regno == LINK_REGISTER_REGNUM)
16553 if (regno == COUNT_REGISTER_REGNUM)
16555 if (CR_REGNO_P (regno))
16556 return regno - CR0_REGNO + 86;
16557 if (regno == XER_REGNO)
16559 if (ALTIVEC_REGNO_P (regno))
16560 return regno - FIRST_ALTIVEC_REGNO + 1124;
16561 if (regno == VRSAVE_REGNO)
16563 if (regno == VSCR_REGNO)
16565 if (regno == SPE_ACC_REGNO)
16567 if (regno == SPEFSCR_REGNO)
16569 /* SPE high reg number. We get these values of regno from
16570 rs6000_dwarf_register_span. */
16571 if (regno >= 1200 && regno < 1232)
16577 #include "gt-rs6000.h"