1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
73 /* Structure used to define the rs6000 stack */
74 typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
81 int toc_save_p; /* true if the TOC needs to be saved */
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Support adjust_priority scheduler hook
128 and -mprioritize-restricted-insns= option. */
129 const char *rs6000_sched_restricted_insns_priority_str;
130 int rs6000_sched_restricted_insns_priority;
132 /* Support for -msched-costly-dep option. */
133 const char *rs6000_sched_costly_dep_str;
134 enum rs6000_dependence_cost rs6000_sched_costly_dep;
136 /* Support for -minsert-sched-nops option. */
137 const char *rs6000_sched_insert_nops_str;
138 enum rs6000_nop_insertion rs6000_sched_insert_nops;
140 /* Size of long double */
141 const char *rs6000_long_double_size_string;
142 int rs6000_long_double_type_size;
144 /* Whether -mabi=altivec has appeared */
145 int rs6000_altivec_abi;
147 /* Whether VRSAVE instructions should be generated. */
148 int rs6000_altivec_vrsave;
150 /* String from -mvrsave= option. */
151 const char *rs6000_altivec_vrsave_string;
153 /* Nonzero if we want SPE ABI extensions. */
156 /* Whether isel instructions should be generated. */
159 /* Whether SPE simd instructions should be generated. */
162 /* Nonzero if floating point operations are done in the GPRs. */
163 int rs6000_float_gprs = 0;
165 /* String from -mfloat-gprs=. */
166 const char *rs6000_float_gprs_string;
168 /* String from -misel=. */
169 const char *rs6000_isel_string;
171 /* String from -mspe=. */
172 const char *rs6000_spe_string;
174 /* Set to nonzero once AIX common-mode calls have been defined. */
175 static GTY(()) int common_mode_defined;
177 /* Save information from a "cmpxx" operation until the branch or scc is
179 rtx rs6000_compare_op0, rs6000_compare_op1;
180 int rs6000_compare_fp_p;
182 /* Label number of label created for -mrelocatable, to call to so we can
183 get the address of the GOT section */
184 int rs6000_pic_labelno;
187 /* Which abi to adhere to */
188 const char *rs6000_abi_name;
190 /* Semantics of the small data area */
191 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
193 /* Which small data model to use */
194 const char *rs6000_sdata_name = (char *)0;
196 /* Counter for labels which are to be placed in .fixup. */
197 int fixuplabelno = 0;
200 /* Bit size of immediate TLS offsets and string from which it is decoded. */
201 int rs6000_tls_size = 32;
202 const char *rs6000_tls_size_string;
204 /* ABI enumeration available for subtarget to use. */
205 enum rs6000_abi rs6000_current_abi;
207 /* ABI string from -mabi= option. */
208 const char *rs6000_abi_string;
211 const char *rs6000_debug_name;
212 int rs6000_debug_stack; /* debug stack applications */
213 int rs6000_debug_arg; /* debug argument handling */
216 static GTY(()) tree opaque_V2SI_type_node;
217 static GTY(()) tree opaque_V2SF_type_node;
218 static GTY(()) tree opaque_p_V2SI_type_node;
219 static GTY(()) tree V16QI_type_node;
220 static GTY(()) tree V2SI_type_node;
221 static GTY(()) tree V2SF_type_node;
222 static GTY(()) tree V4HI_type_node;
223 static GTY(()) tree V4SI_type_node;
224 static GTY(()) tree V4SF_type_node;
225 static GTY(()) tree V8HI_type_node;
226 static GTY(()) tree unsigned_V16QI_type_node;
227 static GTY(()) tree unsigned_V8HI_type_node;
228 static GTY(()) tree unsigned_V4SI_type_node;
229 static GTY(()) tree bool_char_type_node; /* __bool char */
230 static GTY(()) tree bool_short_type_node; /* __bool short */
231 static GTY(()) tree bool_int_type_node; /* __bool int */
232 static GTY(()) tree pixel_type_node; /* __pixel */
233 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
234 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
235 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
236 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
238 int rs6000_warn_altivec_long = 1; /* On by default. */
239 const char *rs6000_warn_altivec_long_switch;
241 const char *rs6000_traceback_name;
243 traceback_default = 0,
249 /* Flag to say the TOC is initialized */
251 char toc_label_name[10];
253 /* Alias set for saves and restores from the rs6000 stack. */
254 static GTY(()) int rs6000_sr_alias_set;
256 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
257 The only place that looks at this is rs6000_set_default_type_attributes;
258 everywhere else should rely on the presence or absence of a longcall
259 attribute on the function declaration. */
260 int rs6000_default_long_calls;
261 const char *rs6000_longcall_switch;
263 /* Control alignment for fields within structures. */
264 /* String from -malign-XXXXX. */
265 const char *rs6000_alignment_string;
266 int rs6000_alignment_flags;
268 struct builtin_description
270 /* mask is not const because we're going to alter it below. This
271 nonsense will go away when we rewrite the -march infrastructure
272 to give us more target flag bits. */
274 const enum insn_code icode;
275 const char *const name;
276 const enum rs6000_builtins code;
279 static bool rs6000_function_ok_for_sibcall (tree, tree);
280 static int num_insns_constant_wide (HOST_WIDE_INT);
281 static void validate_condition_mode (enum rtx_code, enum machine_mode);
282 static rtx rs6000_generate_compare (enum rtx_code);
283 static void rs6000_maybe_dead (rtx);
284 static void rs6000_emit_stack_tie (void);
285 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
286 static rtx spe_synthesize_frame_save (rtx);
287 static bool spe_func_has_64bit_regs_p (void);
288 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
290 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
291 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
292 static unsigned rs6000_hash_constant (rtx);
293 static unsigned toc_hash_function (const void *);
294 static int toc_hash_eq (const void *, const void *);
295 static int constant_pool_expr_1 (rtx, int *, int *);
296 static bool constant_pool_expr_p (rtx);
297 static bool toc_relative_expr_p (rtx);
298 static bool legitimate_small_data_p (enum machine_mode, rtx);
299 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
300 static bool legitimate_indexed_address_p (rtx, int);
301 static bool legitimate_indirect_address_p (rtx, int);
302 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
303 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
304 static struct machine_function * rs6000_init_machine_status (void);
305 static bool rs6000_assemble_integer (rtx, unsigned int, int);
306 #ifdef HAVE_GAS_HIDDEN
307 static void rs6000_assemble_visibility (tree, int);
309 static int rs6000_ra_ever_killed (void);
310 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
311 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
312 extern const struct attribute_spec rs6000_attribute_table[];
313 static void rs6000_set_default_type_attributes (tree);
314 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
315 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
316 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
318 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
319 static bool rs6000_return_in_memory (tree, tree);
320 static void rs6000_file_start (void);
322 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
323 static void rs6000_elf_asm_out_constructor (rtx, int);
324 static void rs6000_elf_asm_out_destructor (rtx, int);
325 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
326 static void rs6000_elf_unique_section (tree, int);
327 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
328 unsigned HOST_WIDE_INT);
329 static void rs6000_elf_encode_section_info (tree, rtx, int)
331 static bool rs6000_elf_in_small_data_p (tree);
334 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
335 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
336 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
337 static void rs6000_xcoff_unique_section (tree, int);
338 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
339 unsigned HOST_WIDE_INT);
340 static const char * rs6000_xcoff_strip_name_encoding (const char *);
341 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
342 static void rs6000_xcoff_file_start (void);
343 static void rs6000_xcoff_file_end (void);
346 static bool rs6000_binds_local_p (tree);
348 static int rs6000_use_dfa_pipeline_interface (void);
349 static int rs6000_variable_issue (FILE *, int, rtx, int);
350 static bool rs6000_rtx_costs (rtx, int, int, int *);
351 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
352 static bool is_microcoded_insn (rtx);
353 static int is_dispatch_slot_restricted (rtx);
354 static bool is_cracked_insn (rtx);
355 static bool is_branch_slot_insn (rtx);
356 static int rs6000_adjust_priority (rtx, int);
357 static int rs6000_issue_rate (void);
358 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
359 static rtx get_next_active_insn (rtx, rtx);
360 static bool insn_terminates_group_p (rtx , enum group_termination);
361 static bool is_costly_group (rtx *, rtx);
362 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
363 static int redefine_groups (FILE *, int, rtx, rtx);
364 static int pad_groups (FILE *, int, rtx, rtx);
365 static void rs6000_sched_finish (FILE *, int);
366 static int rs6000_use_sched_lookahead (void);
368 static void rs6000_init_builtins (void);
369 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
370 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
371 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
372 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
373 static void altivec_init_builtins (void);
374 static void rs6000_common_init_builtins (void);
375 static void rs6000_init_libfuncs (void);
377 static void enable_mask_for_builtins (struct builtin_description *, int,
378 enum rs6000_builtins,
379 enum rs6000_builtins);
380 static void spe_init_builtins (void);
381 static rtx spe_expand_builtin (tree, rtx, bool *);
382 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
383 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
384 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
385 static rs6000_stack_t *rs6000_stack_info (void);
386 static void debug_stack_info (rs6000_stack_t *);
388 static rtx altivec_expand_builtin (tree, rtx, bool *);
389 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
390 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
391 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
392 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
393 static rtx altivec_expand_predicate_builtin (enum insn_code,
394 const char *, tree, rtx);
395 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
396 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
397 static void rs6000_parse_abi_options (void);
398 static void rs6000_parse_alignment_option (void);
399 static void rs6000_parse_tls_size_option (void);
400 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
401 static int first_altivec_reg_to_save (void);
402 static unsigned int compute_vrsave_mask (void);
403 static void is_altivec_return_reg (rtx, void *);
404 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
405 int easy_vector_constant (rtx, enum machine_mode);
406 static int easy_vector_same (rtx, enum machine_mode);
407 static bool is_ev64_opaque_type (tree);
408 static rtx rs6000_dwarf_register_span (rtx);
409 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
410 static rtx rs6000_tls_get_addr (void);
411 static rtx rs6000_got_sym (void);
412 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
413 static const char *rs6000_get_some_local_dynamic_name (void);
414 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
415 static rtx rs6000_complex_function_value (enum machine_mode);
416 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
417 enum machine_mode, tree);
418 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
419 enum machine_mode, tree, int);
420 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
421 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
422 enum machine_mode, tree,
425 static void macho_branch_islands (void);
426 static void add_compiler_branch_island (tree, tree, int);
427 static int no_previous_def (tree function_name);
428 static tree get_prev_label (tree function_name);
431 static tree rs6000_build_builtin_va_list (void);
433 /* Hash table stuff for keeping track of TOC entries. */
435 struct toc_hash_struct GTY(())
437 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
438 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
440 enum machine_mode key_mode;
444 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
446 /* Default register names. */
447 char rs6000_reg_names[][8] =
449 "0", "1", "2", "3", "4", "5", "6", "7",
450 "8", "9", "10", "11", "12", "13", "14", "15",
451 "16", "17", "18", "19", "20", "21", "22", "23",
452 "24", "25", "26", "27", "28", "29", "30", "31",
453 "0", "1", "2", "3", "4", "5", "6", "7",
454 "8", "9", "10", "11", "12", "13", "14", "15",
455 "16", "17", "18", "19", "20", "21", "22", "23",
456 "24", "25", "26", "27", "28", "29", "30", "31",
457 "mq", "lr", "ctr","ap",
458 "0", "1", "2", "3", "4", "5", "6", "7",
460 /* AltiVec registers. */
461 "0", "1", "2", "3", "4", "5", "6", "7",
462 "8", "9", "10", "11", "12", "13", "14", "15",
463 "16", "17", "18", "19", "20", "21", "22", "23",
464 "24", "25", "26", "27", "28", "29", "30", "31",
470 #ifdef TARGET_REGNAMES
471 static const char alt_reg_names[][8] =
473 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
474 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
475 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
476 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
477 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
478 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
479 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
480 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
481 "mq", "lr", "ctr", "ap",
482 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
484 /* AltiVec registers. */
485 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
486 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
487 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
488 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
495 #ifndef MASK_STRICT_ALIGN
496 #define MASK_STRICT_ALIGN 0
498 #ifndef TARGET_PROFILE_KERNEL
499 #define TARGET_PROFILE_KERNEL 0
502 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
503 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
505 /* Return 1 for a symbol ref for a thread-local storage symbol. */
506 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
507 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
509 /* Initialize the GCC target structure. */
510 #undef TARGET_ATTRIBUTE_TABLE
511 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
512 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
513 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
515 #undef TARGET_ASM_ALIGNED_DI_OP
516 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
518 /* Default unaligned ops are only provided for ELF. Find the ops needed
519 for non-ELF systems. */
520 #ifndef OBJECT_FORMAT_ELF
522 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
524 #undef TARGET_ASM_UNALIGNED_HI_OP
525 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
526 #undef TARGET_ASM_UNALIGNED_SI_OP
527 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
528 #undef TARGET_ASM_UNALIGNED_DI_OP
529 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
532 #undef TARGET_ASM_UNALIGNED_HI_OP
533 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
534 #undef TARGET_ASM_UNALIGNED_SI_OP
535 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
539 /* This hook deals with fixups for relocatable code and DI-mode objects
541 #undef TARGET_ASM_INTEGER
542 #define TARGET_ASM_INTEGER rs6000_assemble_integer
544 #ifdef HAVE_GAS_HIDDEN
545 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
546 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
549 #undef TARGET_HAVE_TLS
550 #define TARGET_HAVE_TLS HAVE_AS_TLS
552 #undef TARGET_CANNOT_FORCE_CONST_MEM
553 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
555 #undef TARGET_ASM_FUNCTION_PROLOGUE
556 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
557 #undef TARGET_ASM_FUNCTION_EPILOGUE
558 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
560 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
561 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
562 #undef TARGET_SCHED_VARIABLE_ISSUE
563 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
565 #undef TARGET_SCHED_ISSUE_RATE
566 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
567 #undef TARGET_SCHED_ADJUST_COST
568 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
569 #undef TARGET_SCHED_ADJUST_PRIORITY
570 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
571 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
572 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
573 #undef TARGET_SCHED_FINISH
574 #define TARGET_SCHED_FINISH rs6000_sched_finish
576 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
577 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
579 #undef TARGET_INIT_BUILTINS
580 #define TARGET_INIT_BUILTINS rs6000_init_builtins
582 #undef TARGET_EXPAND_BUILTIN
583 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
585 #undef TARGET_INIT_LIBFUNCS
586 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
589 #undef TARGET_BINDS_LOCAL_P
590 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
593 #undef TARGET_ASM_OUTPUT_MI_THUNK
594 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
596 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
597 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
599 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
600 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
602 #undef TARGET_RTX_COSTS
603 #define TARGET_RTX_COSTS rs6000_rtx_costs
604 #undef TARGET_ADDRESS_COST
605 #define TARGET_ADDRESS_COST hook_int_rtx_0
607 #undef TARGET_VECTOR_OPAQUE_P
608 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
610 #undef TARGET_DWARF_REGISTER_SPAN
611 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
613 /* On rs6000, function arguments are promoted, as are function return
615 #undef TARGET_PROMOTE_FUNCTION_ARGS
616 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
617 #undef TARGET_PROMOTE_FUNCTION_RETURN
618 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
620 #undef TARGET_RETURN_IN_MEMORY
621 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
623 #undef TARGET_SETUP_INCOMING_VARARGS
624 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
626 /* Always strict argument naming on rs6000. */
627 #undef TARGET_STRICT_ARGUMENT_NAMING
628 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
629 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
630 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
631 #undef TARGET_SPLIT_COMPLEX_ARG
632 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
634 #undef TARGET_BUILD_BUILTIN_VA_LIST
635 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
637 struct gcc_target targetm = TARGET_INITIALIZER;
639 /* Override command line options. Mostly we process the processor
640 type and sometimes adjust other TARGET_ options. */
643 rs6000_override_options (const char *default_cpu)
646 struct rs6000_cpu_select *ptr;
649 /* Simplifications for entries below. */
652 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
653 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
656 /* This table occasionally claims that a processor does not support
657 a particular feature even though it does, but the feature is slower
658 than the alternative. Thus, it shouldn't be relied on as a
659 complete description of the processor's support.
661 Please keep this list in order, and don't forget to update the
662 documentation in invoke.texi when adding a new processor or
666 const char *const name; /* Canonical processor name. */
667 const enum processor_type processor; /* Processor type enum value. */
668 const int target_enable; /* Target flags to enable. */
669 } const processor_target_table[]
670 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
671 {"403", PROCESSOR_PPC403,
672 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
673 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
674 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
675 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
676 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
677 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
678 {"601", PROCESSOR_PPC601,
679 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
680 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
681 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
682 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
683 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
684 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
685 {"620", PROCESSOR_PPC620,
686 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
687 {"630", PROCESSOR_PPC630,
688 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
689 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
690 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
691 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
692 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
693 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
694 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
695 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
696 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
697 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
698 {"970", PROCESSOR_POWER4,
699 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
700 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
701 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
702 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
703 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
704 {"G5", PROCESSOR_POWER4,
705 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
706 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
707 {"power2", PROCESSOR_POWER,
708 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
709 {"power3", PROCESSOR_PPC630,
710 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
711 {"power4", PROCESSOR_POWER4,
712 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
713 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
714 {"powerpc64", PROCESSOR_POWERPC64,
715 POWERPC_BASE_MASK | MASK_POWERPC64},
716 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
717 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
718 {"rios2", PROCESSOR_RIOS2,
719 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
720 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
721 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
722 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
725 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
727 /* Some OSs don't support saving the high part of 64-bit registers on
728 context switch. Other OSs don't support saving Altivec registers.
729 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
730 settings; if the user wants either, the user must explicitly specify
731 them and we won't interfere with the user's specification. */
734 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
735 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
736 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
739 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
740 #ifdef OS_MISSING_POWERPC64
741 if (OS_MISSING_POWERPC64)
742 set_masks &= ~MASK_POWERPC64;
744 #ifdef OS_MISSING_ALTIVEC
745 if (OS_MISSING_ALTIVEC)
746 set_masks &= ~MASK_ALTIVEC;
749 /* Don't override these by the processor default if given explicitly. */
750 set_masks &= ~(target_flags_explicit
751 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
753 /* Identify the processor type. */
754 rs6000_select[0].string = default_cpu;
755 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
757 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
759 ptr = &rs6000_select[i];
760 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
762 for (j = 0; j < ptt_size; j++)
763 if (! strcmp (ptr->string, processor_target_table[j].name))
766 rs6000_cpu = processor_target_table[j].processor;
770 target_flags &= ~set_masks;
771 target_flags |= (processor_target_table[j].target_enable
778 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
785 /* If we are optimizing big endian systems for space, use the load/store
786 multiple and string instructions. */
787 if (BYTES_BIG_ENDIAN && optimize_size)
788 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
790 /* Don't allow -mmultiple or -mstring on little endian systems
791 unless the cpu is a 750, because the hardware doesn't support the
792 instructions used in little endian mode, and causes an alignment
793 trap. The 750 does not cause an alignment trap (except when the
794 target is unaligned). */
796 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
800 target_flags &= ~MASK_MULTIPLE;
801 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
802 warning ("-mmultiple is not supported on little endian systems");
807 target_flags &= ~MASK_STRING;
808 if ((target_flags_explicit & MASK_STRING) != 0)
809 warning ("-mstring is not supported on little endian systems");
813 /* Set debug flags */
814 if (rs6000_debug_name)
816 if (! strcmp (rs6000_debug_name, "all"))
817 rs6000_debug_stack = rs6000_debug_arg = 1;
818 else if (! strcmp (rs6000_debug_name, "stack"))
819 rs6000_debug_stack = 1;
820 else if (! strcmp (rs6000_debug_name, "arg"))
821 rs6000_debug_arg = 1;
823 error ("unknown -mdebug-%s switch", rs6000_debug_name);
826 if (rs6000_traceback_name)
828 if (! strncmp (rs6000_traceback_name, "full", 4))
829 rs6000_traceback = traceback_full;
830 else if (! strncmp (rs6000_traceback_name, "part", 4))
831 rs6000_traceback = traceback_part;
832 else if (! strncmp (rs6000_traceback_name, "no", 2))
833 rs6000_traceback = traceback_none;
835 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
836 rs6000_traceback_name);
839 /* Set size of long double */
840 rs6000_long_double_type_size = 64;
841 if (rs6000_long_double_size_string)
844 int size = strtol (rs6000_long_double_size_string, &tail, 10);
845 if (*tail != '\0' || (size != 64 && size != 128))
846 error ("Unknown switch -mlong-double-%s",
847 rs6000_long_double_size_string);
849 rs6000_long_double_type_size = size;
852 /* Set Altivec ABI as default for powerpc64 linux. */
853 if (TARGET_ELF && TARGET_64BIT)
855 rs6000_altivec_abi = 1;
856 rs6000_altivec_vrsave = 1;
859 /* Handle -mabi= options. */
860 rs6000_parse_abi_options ();
862 /* Handle -malign-XXXXX option. */
863 rs6000_parse_alignment_option ();
865 /* Handle generic -mFOO=YES/NO options. */
866 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
867 &rs6000_altivec_vrsave);
868 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
870 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
871 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
874 /* Handle -mtls-size option. */
875 rs6000_parse_tls_size_option ();
877 #ifdef SUBTARGET_OVERRIDE_OPTIONS
878 SUBTARGET_OVERRIDE_OPTIONS;
880 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
881 SUBSUBTARGET_OVERRIDE_OPTIONS;
886 /* The e500 does not have string instructions, and we set
887 MASK_STRING above when optimizing for size. */
888 if ((target_flags & MASK_STRING) != 0)
889 target_flags = target_flags & ~MASK_STRING;
891 /* No SPE means 64-bit long doubles, even if an E500. */
892 if (rs6000_spe_string != 0
893 && !strcmp (rs6000_spe_string, "no"))
894 rs6000_long_double_type_size = 64;
896 else if (rs6000_select[1].string != NULL)
898 /* For the powerpc-eabispe configuration, we set all these by
899 default, so let's unset them if we manually set another
900 CPU that is not the E500. */
901 if (rs6000_abi_string == 0)
903 if (rs6000_spe_string == 0)
905 if (rs6000_float_gprs_string == 0)
906 rs6000_float_gprs = 0;
907 if (rs6000_isel_string == 0)
909 if (rs6000_long_double_size_string == 0)
910 rs6000_long_double_type_size = 64;
913 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
914 using TARGET_OPTIONS to handle a toggle switch, but we're out of
915 bits in target_flags so TARGET_SWITCHES cannot be used.
916 Assumption here is that rs6000_longcall_switch points into the
917 text of the complete option, rather than being a copy, so we can
918 scan back for the presence or absence of the no- modifier. */
919 if (rs6000_longcall_switch)
921 const char *base = rs6000_longcall_switch;
922 while (base[-1] != 'm') base--;
924 if (*rs6000_longcall_switch != '\0')
925 error ("invalid option `%s'", base);
926 rs6000_default_long_calls = (base[0] != 'n');
929 /* Handle -m(no-)warn-altivec-long similarly. */
930 if (rs6000_warn_altivec_long_switch)
932 const char *base = rs6000_warn_altivec_long_switch;
933 while (base[-1] != 'm') base--;
935 if (*rs6000_warn_altivec_long_switch != '\0')
936 error ("invalid option `%s'", base);
937 rs6000_warn_altivec_long = (base[0] != 'n');
940 /* Handle -mprioritize-restricted-insns option. */
941 rs6000_sched_restricted_insns_priority = DEFAULT_RESTRICTED_INSNS_PRIORITY;
942 if (rs6000_sched_restricted_insns_priority_str)
943 rs6000_sched_restricted_insns_priority =
944 atoi (rs6000_sched_restricted_insns_priority_str);
946 /* Handle -msched-costly-dep option. */
947 rs6000_sched_costly_dep = DEFAULT_SCHED_COSTLY_DEP;
948 if (rs6000_sched_costly_dep_str)
950 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
951 rs6000_sched_costly_dep = no_dep_costly;
952 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
953 rs6000_sched_costly_dep = all_deps_costly;
954 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
955 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
956 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
957 rs6000_sched_costly_dep = store_to_load_dep_costly;
959 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
962 /* Handle -minsert-sched-nops option. */
963 rs6000_sched_insert_nops = DEFAULT_SCHED_FINISH_NOP_INSERTION_SCHEME;
964 if (rs6000_sched_insert_nops_str)
966 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
967 rs6000_sched_insert_nops = sched_finish_none;
968 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
969 rs6000_sched_insert_nops = sched_finish_pad_groups;
970 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
971 rs6000_sched_insert_nops = sched_finish_regroup_exact;
973 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
976 #ifdef TARGET_REGNAMES
977 /* If the user desires alternate register names, copy in the
978 alternate names now. */
980 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
983 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
984 If -maix-struct-return or -msvr4-struct-return was explicitly
985 used, don't override with the ABI default. */
986 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
988 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
989 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
991 target_flags |= MASK_AIX_STRUCT_RET;
994 if (TARGET_LONG_DOUBLE_128
995 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
996 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
998 /* Allocate an alias set for register saves & restores from stack. */
999 rs6000_sr_alias_set = new_alias_set ();
1002 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1004 /* We can only guarantee the availability of DI pseudo-ops when
1005 assembling for 64-bit targets. */
1008 targetm.asm_out.aligned_op.di = NULL;
1009 targetm.asm_out.unaligned_op.di = NULL;
1012 /* Set maximum branch target alignment at two instructions, eight bytes. */
1013 align_jumps_max_skip = 8;
1014 align_loops_max_skip = 8;
1016 /* Arrange to save and restore machine status around nested functions. */
1017 init_machine_status = rs6000_init_machine_status;
1019 /* We should always be splitting complex arguments, but we can't break
1020 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1021 if (DEFAULT_ABI != ABI_AIX)
1022 targetm.calls.split_complex_arg = NULL;
1025 /* Handle generic options of the form -mfoo=yes/no.
1026 NAME is the option name.
1027 VALUE is the option value.
1028 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1029 whether the option value is 'yes' or 'no' respectively. */
1031 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1035 else if (!strcmp (value, "yes"))
1037 else if (!strcmp (value, "no"))
1040 error ("unknown -m%s= option specified: '%s'", name, value);
1043 /* Handle -mabi= options. */
1045 rs6000_parse_abi_options (void)
1047 if (rs6000_abi_string == 0)
1049 else if (! strcmp (rs6000_abi_string, "altivec"))
1051 rs6000_altivec_abi = 1;
1054 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1055 rs6000_altivec_abi = 0;
1056 else if (! strcmp (rs6000_abi_string, "spe"))
1059 rs6000_altivec_abi = 0;
1060 if (!TARGET_SPE_ABI)
1061 error ("not configured for ABI: '%s'", rs6000_abi_string);
1064 else if (! strcmp (rs6000_abi_string, "no-spe"))
1067 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1070 /* Handle -malign-XXXXXX options. */
1072 rs6000_parse_alignment_option (void)
1074 if (rs6000_alignment_string == 0)
1076 else if (! strcmp (rs6000_alignment_string, "power"))
1077 rs6000_alignment_flags = MASK_ALIGN_POWER;
1078 else if (! strcmp (rs6000_alignment_string, "natural"))
1079 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1081 error ("unknown -malign-XXXXX option specified: '%s'",
1082 rs6000_alignment_string);
1085 /* Validate and record the size specified with the -mtls-size option. */
1088 rs6000_parse_tls_size_option (void)
1090 if (rs6000_tls_size_string == 0)
1092 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1093 rs6000_tls_size = 16;
1094 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1095 rs6000_tls_size = 32;
1096 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1097 rs6000_tls_size = 64;
1099 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1103 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1107 /* Do anything needed at the start of the asm file. */
1110 rs6000_file_start (void)
1114 const char *start = buffer;
1115 struct rs6000_cpu_select *ptr;
1116 const char *default_cpu = TARGET_CPU_DEFAULT;
1117 FILE *file = asm_out_file;
1119 default_file_start ();
1121 #ifdef TARGET_BI_ARCH
1122 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1126 if (flag_verbose_asm)
1128 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1129 rs6000_select[0].string = default_cpu;
1131 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1133 ptr = &rs6000_select[i];
1134 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1136 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1141 #ifdef USING_ELFOS_H
1142 switch (rs6000_sdata)
1144 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1145 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1146 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1147 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1150 if (rs6000_sdata && g_switch_value)
1152 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1163 /* Return nonzero if this function is known to have a null epilogue. */
1166 direct_return (void)
1168 if (reload_completed)
1170 rs6000_stack_t *info = rs6000_stack_info ();
1172 if (info->first_gp_reg_save == 32
1173 && info->first_fp_reg_save == 64
1174 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1175 && ! info->lr_save_p
1176 && ! info->cr_save_p
1177 && info->vrsave_mask == 0
1185 /* Returns 1 always. */
1188 any_operand (rtx op ATTRIBUTE_UNUSED,
1189 enum machine_mode mode ATTRIBUTE_UNUSED)
1194 /* Returns 1 if op is the count register. */
1196 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1198 if (GET_CODE (op) != REG)
1201 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1204 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1210 /* Returns 1 if op is an altivec register. */
1212 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1215 return (register_operand (op, mode)
1216 && (GET_CODE (op) != REG
1217 || REGNO (op) > FIRST_PSEUDO_REGISTER
1218 || ALTIVEC_REGNO_P (REGNO (op))));
1222 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1224 if (GET_CODE (op) != REG)
1227 if (XER_REGNO_P (REGNO (op)))
1233 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1234 by such constants completes more quickly. */
1237 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1239 return ( GET_CODE (op) == CONST_INT
1240 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1243 /* Return 1 if OP is a constant that can fit in a D field. */
1246 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1248 return (GET_CODE (op) == CONST_INT
1249 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1252 /* Similar for an unsigned D field. */
1255 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1257 return (GET_CODE (op) == CONST_INT
1258 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1261 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1264 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1266 return (GET_CODE (op) == CONST_INT
1267 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1270 /* Returns 1 if OP is a CONST_INT that is a positive value
1271 and an exact power of 2. */
1274 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1276 return (GET_CODE (op) == CONST_INT
1278 && exact_log2 (INTVAL (op)) >= 0);
1281 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1285 gpc_reg_operand (rtx op, enum machine_mode mode)
1287 return (register_operand (op, mode)
1288 && (GET_CODE (op) != REG
1289 || (REGNO (op) >= ARG_POINTER_REGNUM
1290 && !XER_REGNO_P (REGNO (op)))
1291 || REGNO (op) < MQ_REGNO));
1294 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1298 cc_reg_operand (rtx op, enum machine_mode mode)
1300 return (register_operand (op, mode)
1301 && (GET_CODE (op) != REG
1302 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1303 || CR_REGNO_P (REGNO (op))));
1306 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1307 CR field that isn't CR0. */
1310 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1312 return (register_operand (op, mode)
1313 && (GET_CODE (op) != REG
1314 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1315 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1318 /* Returns 1 if OP is either a constant integer valid for a D-field or
1319 a non-special register. If a register, it must be in the proper
1320 mode unless MODE is VOIDmode. */
1323 reg_or_short_operand (rtx op, enum machine_mode mode)
1325 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1328 /* Similar, except check if the negation of the constant would be
1329 valid for a D-field. */
1332 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1334 if (GET_CODE (op) == CONST_INT)
1335 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1337 return gpc_reg_operand (op, mode);
1340 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1341 a non-special register. If a register, it must be in the proper
1342 mode unless MODE is VOIDmode. */
1345 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1347 if (gpc_reg_operand (op, mode))
1349 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1356 /* Return 1 if the operand is either a register or an integer whose
1357 high-order 16 bits are zero. */
1360 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1362 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1365 /* Return 1 is the operand is either a non-special register or ANY
1366 constant integer. */
1369 reg_or_cint_operand (rtx op, enum machine_mode mode)
1371 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1374 /* Return 1 is the operand is either a non-special register or ANY
1375 32-bit signed constant integer. */
1378 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1380 return (gpc_reg_operand (op, mode)
1381 || (GET_CODE (op) == CONST_INT
1382 #if HOST_BITS_PER_WIDE_INT != 32
1383 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1384 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1389 /* Return 1 is the operand is either a non-special register or a 32-bit
1390 signed constant integer valid for 64-bit addition. */
1393 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1395 return (gpc_reg_operand (op, mode)
1396 || (GET_CODE (op) == CONST_INT
1397 #if HOST_BITS_PER_WIDE_INT == 32
1398 && INTVAL (op) < 0x7fff8000
1400 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1406 /* Return 1 is the operand is either a non-special register or a 32-bit
1407 signed constant integer valid for 64-bit subtraction. */
1410 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1412 return (gpc_reg_operand (op, mode)
1413 || (GET_CODE (op) == CONST_INT
1414 #if HOST_BITS_PER_WIDE_INT == 32
1415 && (- INTVAL (op)) < 0x7fff8000
1417 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1423 /* Return 1 is the operand is either a non-special register or ANY
1424 32-bit unsigned constant integer. */
1427 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1429 if (GET_CODE (op) == CONST_INT)
1431 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1433 if (GET_MODE_BITSIZE (mode) <= 32)
1436 if (INTVAL (op) < 0)
1440 return ((INTVAL (op) & GET_MODE_MASK (mode)
1441 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1443 else if (GET_CODE (op) == CONST_DOUBLE)
1445 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1449 return CONST_DOUBLE_HIGH (op) == 0;
1452 return gpc_reg_operand (op, mode);
1455 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1458 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1460 return (GET_CODE (op) == SYMBOL_REF
1461 || GET_CODE (op) == CONST
1462 || GET_CODE (op) == LABEL_REF);
1465 /* Return 1 if the operand is a simple references that can be loaded via
1466 the GOT (labels involving addition aren't allowed). */
1469 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1471 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1474 /* Return the number of instructions it takes to form a constant in an
1475 integer register. */
1478 num_insns_constant_wide (HOST_WIDE_INT value)
1480 /* signed constant loadable with {cal|addi} */
1481 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1484 /* constant loadable with {cau|addis} */
1485 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1488 #if HOST_BITS_PER_WIDE_INT == 64
1489 else if (TARGET_POWERPC64)
1491 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1492 HOST_WIDE_INT high = value >> 31;
1494 if (high == 0 || high == -1)
1500 return num_insns_constant_wide (high) + 1;
1502 return (num_insns_constant_wide (high)
1503 + num_insns_constant_wide (low) + 1);
1512 num_insns_constant (rtx op, enum machine_mode mode)
1514 if (GET_CODE (op) == CONST_INT)
1516 #if HOST_BITS_PER_WIDE_INT == 64
1517 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1518 && mask64_operand (op, mode))
1522 return num_insns_constant_wide (INTVAL (op));
1525 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1530 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1531 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1532 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1535 else if (GET_CODE (op) == CONST_DOUBLE)
1541 int endian = (WORDS_BIG_ENDIAN == 0);
1543 if (mode == VOIDmode || mode == DImode)
1545 high = CONST_DOUBLE_HIGH (op);
1546 low = CONST_DOUBLE_LOW (op);
1550 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1551 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1553 low = l[1 - endian];
1557 return (num_insns_constant_wide (low)
1558 + num_insns_constant_wide (high));
1562 if (high == 0 && low >= 0)
1563 return num_insns_constant_wide (low);
1565 else if (high == -1 && low < 0)
1566 return num_insns_constant_wide (low);
1568 else if (mask64_operand (op, mode))
1572 return num_insns_constant_wide (high) + 1;
1575 return (num_insns_constant_wide (high)
1576 + num_insns_constant_wide (low) + 1);
1584 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1585 register with one instruction per word. We only do this if we can
1586 safely read CONST_DOUBLE_{LOW,HIGH}. */
1589 easy_fp_constant (rtx op, enum machine_mode mode)
1591 if (GET_CODE (op) != CONST_DOUBLE
1592 || GET_MODE (op) != mode
1593 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1596 /* Consider all constants with -msoft-float to be easy. */
1597 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1601 /* If we are using V.4 style PIC, consider all constants to be hard. */
1602 if (flag_pic && DEFAULT_ABI == ABI_V4)
1605 #ifdef TARGET_RELOCATABLE
1606 /* Similarly if we are using -mrelocatable, consider all constants
1608 if (TARGET_RELOCATABLE)
1617 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1618 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1620 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1621 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1622 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1623 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1626 else if (mode == DFmode)
1631 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1632 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1634 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1635 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1638 else if (mode == SFmode)
1643 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1644 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1646 return num_insns_constant_wide (l) == 1;
1649 else if (mode == DImode)
1650 return ((TARGET_POWERPC64
1651 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1652 || (num_insns_constant (op, DImode) <= 2));
1654 else if (mode == SImode)
1660 /* Return nonzero if all elements of a vector have the same value. */
1663 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1667 units = CONST_VECTOR_NUNITS (op);
1669 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1670 for (i = 1; i < units; ++i)
1671 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1678 /* Return 1 if the operand is a CONST_INT and can be put into a
1679 register without using memory. */
1682 easy_vector_constant (rtx op, enum machine_mode mode)
1686 if (GET_CODE (op) != CONST_VECTOR
1691 if (zero_constant (op, mode)
1692 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1693 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1696 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1699 if (TARGET_SPE && mode == V1DImode)
1702 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1703 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1705 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1707 evmergelo r0, r0, r0
1710 I don't know how efficient it would be to allow bigger constants,
1711 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1712 instructions is better than a 64-bit memory load, but I don't
1713 have the e500 timing specs. */
1714 if (TARGET_SPE && mode == V2SImode
1715 && cst >= -0x7fff && cst <= 0x7fff
1716 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1723 if (EASY_VECTOR_15 (cst, op, mode))
1725 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1729 if (EASY_VECTOR_15 (cst, op, mode))
1731 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1735 if (EASY_VECTOR_15 (cst, op, mode))
1741 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1747 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1750 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1754 if (!easy_vector_constant (op, mode))
1757 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1759 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1763 output_vec_const_move (rtx *operands)
1766 enum machine_mode mode;
1772 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1773 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1774 mode = GET_MODE (dest);
1778 if (zero_constant (vec, mode))
1779 return "vxor %0,%0,%0";
1780 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1782 else if (easy_vector_constant (vec, mode))
1784 operands[1] = GEN_INT (cst);
1788 if (EASY_VECTOR_15 (cst, vec, mode))
1790 operands[1] = GEN_INT (cst);
1791 return "vspltisw %0,%1";
1795 if (EASY_VECTOR_15 (cst, vec, mode))
1797 operands[1] = GEN_INT (cst);
1798 return "vspltish %0,%1";
1802 if (EASY_VECTOR_15 (cst, vec, mode))
1804 operands[1] = GEN_INT (cst);
1805 return "vspltisb %0,%1";
1817 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1818 pattern of V1DI, V4HI, and V2SF.
1820 FIXME: We should probably return # and add post reload
1821 splitters for these, but this way is so easy ;-).
1823 operands[1] = GEN_INT (cst);
1824 operands[2] = GEN_INT (cst2);
1826 return "li %0,%1\n\tevmergelo %0,%0,%0";
1828 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1834 /* Return 1 if the operand is the constant 0. This works for scalars
1835 as well as vectors. */
1837 zero_constant (rtx op, enum machine_mode mode)
1839 return op == CONST0_RTX (mode);
1842 /* Return 1 if the operand is 0.0. */
1844 zero_fp_constant (rtx op, enum machine_mode mode)
1846 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1849 /* Return 1 if the operand is in volatile memory. Note that during
1850 the RTL generation phase, memory_operand does not return TRUE for
1851 volatile memory references. So this function allows us to
1852 recognize volatile references where its safe. */
1855 volatile_mem_operand (rtx op, enum machine_mode mode)
1857 if (GET_CODE (op) != MEM)
1860 if (!MEM_VOLATILE_P (op))
1863 if (mode != GET_MODE (op))
1866 if (reload_completed)
1867 return memory_operand (op, mode);
1869 if (reload_in_progress)
1870 return strict_memory_address_p (mode, XEXP (op, 0));
1872 return memory_address_p (mode, XEXP (op, 0));
1875 /* Return 1 if the operand is an offsettable memory operand. */
1878 offsettable_mem_operand (rtx op, enum machine_mode mode)
1880 return ((GET_CODE (op) == MEM)
1881 && offsettable_address_p (reload_completed || reload_in_progress,
1882 mode, XEXP (op, 0)));
1885 /* Return 1 if the operand is either an easy FP constant (see above) or
1889 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1891 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1894 /* Return 1 if the operand is either a non-special register or an item
1895 that can be used as the operand of a `mode' add insn. */
1898 add_operand (rtx op, enum machine_mode mode)
1900 if (GET_CODE (op) == CONST_INT)
1901 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1902 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1904 return gpc_reg_operand (op, mode);
1907 /* Return 1 if OP is a constant but not a valid add_operand. */
1910 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1912 return (GET_CODE (op) == CONST_INT
1913 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1914 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1917 /* Return 1 if the operand is a non-special register or a constant that
1918 can be used as the operand of an OR or XOR insn on the RS/6000. */
1921 logical_operand (rtx op, enum machine_mode mode)
1923 HOST_WIDE_INT opl, oph;
1925 if (gpc_reg_operand (op, mode))
1928 if (GET_CODE (op) == CONST_INT)
1930 opl = INTVAL (op) & GET_MODE_MASK (mode);
1932 #if HOST_BITS_PER_WIDE_INT <= 32
1933 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1937 else if (GET_CODE (op) == CONST_DOUBLE)
1939 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1942 opl = CONST_DOUBLE_LOW (op);
1943 oph = CONST_DOUBLE_HIGH (op);
1950 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1951 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1954 /* Return 1 if C is a constant that is not a logical operand (as
1955 above), but could be split into one. */
1958 non_logical_cint_operand (rtx op, enum machine_mode mode)
1960 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1961 && ! logical_operand (op, mode)
1962 && reg_or_logical_cint_operand (op, mode));
1965 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1966 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1967 Reject all ones and all zeros, since these should have been optimized
1968 away and confuse the making of MB and ME. */
1971 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1973 HOST_WIDE_INT c, lsb;
1975 if (GET_CODE (op) != CONST_INT)
1980 /* Fail in 64-bit mode if the mask wraps around because the upper
1981 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1982 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1985 /* We don't change the number of transitions by inverting,
1986 so make sure we start with the LS bit zero. */
1990 /* Reject all zeros or all ones. */
1994 /* Find the first transition. */
1997 /* Invert to look for a second transition. */
2000 /* Erase first transition. */
2003 /* Find the second transition (if any). */
2006 /* Match if all the bits above are 1's (or c is zero). */
2010 /* Return 1 for the PowerPC64 rlwinm corner case. */
2013 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2015 HOST_WIDE_INT c, lsb;
2017 if (GET_CODE (op) != CONST_INT)
2022 if ((c & 0x80000001) != 0x80000001)
2036 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2037 It is if there are no more than one 1->0 or 0->1 transitions.
2038 Reject all zeros, since zero should have been optimized away and
2039 confuses the making of MB and ME. */
2042 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2044 if (GET_CODE (op) == CONST_INT)
2046 HOST_WIDE_INT c, lsb;
2050 /* Reject all zeros. */
2054 /* We don't change the number of transitions by inverting,
2055 so make sure we start with the LS bit zero. */
2059 /* Find the transition, and check that all bits above are 1's. */
2062 /* Match if all the bits above are 1's (or c is zero). */
2068 /* Like mask64_operand, but allow up to three transitions. This
2069 predicate is used by insn patterns that generate two rldicl or
2070 rldicr machine insns. */
2073 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2075 if (GET_CODE (op) == CONST_INT)
2077 HOST_WIDE_INT c, lsb;
2081 /* Disallow all zeros. */
2085 /* We don't change the number of transitions by inverting,
2086 so make sure we start with the LS bit zero. */
2090 /* Find the first transition. */
2093 /* Invert to look for a second transition. */
2096 /* Erase first transition. */
2099 /* Find the second transition. */
2102 /* Invert to look for a third transition. */
2105 /* Erase second transition. */
2108 /* Find the third transition (if any). */
2111 /* Match if all the bits above are 1's (or c is zero). */
2117 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2118 implement ANDing by the mask IN. */
2120 build_mask64_2_operands (rtx in, rtx *out)
2122 #if HOST_BITS_PER_WIDE_INT >= 64
2123 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2126 if (GET_CODE (in) != CONST_INT)
2132 /* Assume c initially something like 0x00fff000000fffff. The idea
2133 is to rotate the word so that the middle ^^^^^^ group of zeros
2134 is at the MS end and can be cleared with an rldicl mask. We then
2135 rotate back and clear off the MS ^^ group of zeros with a
2137 c = ~c; /* c == 0xff000ffffff00000 */
2138 lsb = c & -c; /* lsb == 0x0000000000100000 */
2139 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2140 c = ~c; /* c == 0x00fff000000fffff */
2141 c &= -lsb; /* c == 0x00fff00000000000 */
2142 lsb = c & -c; /* lsb == 0x0000100000000000 */
2143 c = ~c; /* c == 0xff000fffffffffff */
2144 c &= -lsb; /* c == 0xff00000000000000 */
2146 while ((lsb >>= 1) != 0)
2147 shift++; /* shift == 44 on exit from loop */
2148 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2149 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2150 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2154 /* Assume c initially something like 0xff000f0000000000. The idea
2155 is to rotate the word so that the ^^^ middle group of zeros
2156 is at the LS end and can be cleared with an rldicr mask. We then
2157 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2159 lsb = c & -c; /* lsb == 0x0000010000000000 */
2160 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2161 c = ~c; /* c == 0x00fff0ffffffffff */
2162 c &= -lsb; /* c == 0x00fff00000000000 */
2163 lsb = c & -c; /* lsb == 0x0000100000000000 */
2164 c = ~c; /* c == 0xff000fffffffffff */
2165 c &= -lsb; /* c == 0xff00000000000000 */
2167 while ((lsb >>= 1) != 0)
2168 shift++; /* shift == 44 on exit from loop */
2169 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2170 m1 >>= shift; /* m1 == 0x0000000000000fff */
2171 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2174 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2175 masks will be all 1's. We are guaranteed more than one transition. */
2176 out[0] = GEN_INT (64 - shift);
2177 out[1] = GEN_INT (m1);
2178 out[2] = GEN_INT (shift);
2179 out[3] = GEN_INT (m2);
2187 /* Return 1 if the operand is either a non-special register or a constant
2188 that can be used as the operand of a PowerPC64 logical AND insn. */
2191 and64_operand (rtx op, enum machine_mode mode)
2193 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2194 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2196 return (logical_operand (op, mode) || mask64_operand (op, mode));
2199 /* Like the above, but also match constants that can be implemented
2200 with two rldicl or rldicr insns. */
2203 and64_2_operand (rtx op, enum machine_mode mode)
2205 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2206 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2208 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2211 /* Return 1 if the operand is either a non-special register or a
2212 constant that can be used as the operand of an RS/6000 logical AND insn. */
2215 and_operand (rtx op, enum machine_mode mode)
2217 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2218 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2220 return (logical_operand (op, mode) || mask_operand (op, mode));
2223 /* Return 1 if the operand is a general register or memory operand. */
2226 reg_or_mem_operand (rtx op, enum machine_mode mode)
2228 return (gpc_reg_operand (op, mode)
2229 || memory_operand (op, mode)
2230 || macho_lo_sum_memory_operand (op, mode)
2231 || volatile_mem_operand (op, mode));
2234 /* Return 1 if the operand is a general register or memory operand without
2235 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2239 lwa_operand (rtx op, enum machine_mode mode)
2243 if (reload_completed && GET_CODE (inner) == SUBREG)
2244 inner = SUBREG_REG (inner);
2246 return gpc_reg_operand (inner, mode)
2247 || (memory_operand (inner, mode)
2248 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2249 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2250 && (GET_CODE (XEXP (inner, 0)) != PLUS
2251 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2252 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2255 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2258 symbol_ref_operand (rtx op, enum machine_mode mode)
2260 if (mode != VOIDmode && GET_MODE (op) != mode)
2263 return (GET_CODE (op) == SYMBOL_REF
2264 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2267 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2268 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2271 call_operand (rtx op, enum machine_mode mode)
2273 if (mode != VOIDmode && GET_MODE (op) != mode)
2276 return (GET_CODE (op) == SYMBOL_REF
2277 || (GET_CODE (op) == REG
2278 && (REGNO (op) == LINK_REGISTER_REGNUM
2279 || REGNO (op) == COUNT_REGISTER_REGNUM
2280 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2283 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2287 current_file_function_operand (rtx op,
2288 enum machine_mode mode ATTRIBUTE_UNUSED)
2290 return (GET_CODE (op) == SYMBOL_REF
2291 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2292 && (SYMBOL_REF_LOCAL_P (op)
2293 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2296 /* Return 1 if this operand is a valid input for a move insn. */
2299 input_operand (rtx op, enum machine_mode mode)
2301 /* Memory is always valid. */
2302 if (memory_operand (op, mode))
2305 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2306 if (GET_CODE (op) == CONSTANT_P_RTX)
2309 /* For floating-point, easy constants are valid. */
2310 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2312 && easy_fp_constant (op, mode))
2315 /* Allow any integer constant. */
2316 if (GET_MODE_CLASS (mode) == MODE_INT
2317 && (GET_CODE (op) == CONST_INT
2318 || GET_CODE (op) == CONST_DOUBLE))
2321 /* Allow easy vector constants. */
2322 if (GET_CODE (op) == CONST_VECTOR
2323 && easy_vector_constant (op, mode))
2326 /* For floating-point or multi-word mode, the only remaining valid type
2328 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2329 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2330 return register_operand (op, mode);
2332 /* The only cases left are integral modes one word or smaller (we
2333 do not get called for MODE_CC values). These can be in any
2335 if (register_operand (op, mode))
2338 /* A SYMBOL_REF referring to the TOC is valid. */
2339 if (legitimate_constant_pool_address_p (op))
2342 /* A constant pool expression (relative to the TOC) is valid */
2343 if (toc_relative_expr_p (op))
2346 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2348 if (DEFAULT_ABI == ABI_V4
2349 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2350 && small_data_operand (op, Pmode))
2357 /* Darwin, AIX increases natural record alignment to doubleword if the first
2358 field is an FP double while the FP fields remain word aligned. */
2361 rs6000_special_round_type_align (tree type, int computed, int specified)
2363 tree field = TYPE_FIELDS (type);
2365 /* Skip all the static variables only if ABI is greater than
2367 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2368 field = TREE_CHAIN (field);
2370 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2371 return MAX (computed, specified);
2373 return MAX (MAX (computed, specified), 64);
2376 /* Return 1 for an operand in small memory on V.4/eabi. */
2379 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2380 enum machine_mode mode ATTRIBUTE_UNUSED)
2385 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2388 if (DEFAULT_ABI != ABI_V4)
2391 if (GET_CODE (op) == SYMBOL_REF)
2394 else if (GET_CODE (op) != CONST
2395 || GET_CODE (XEXP (op, 0)) != PLUS
2396 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2397 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2402 rtx sum = XEXP (op, 0);
2403 HOST_WIDE_INT summand;
2405 /* We have to be careful here, because it is the referenced address
2406 that must be 32k from _SDA_BASE_, not just the symbol. */
2407 summand = INTVAL (XEXP (sum, 1));
2408 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2411 sym_ref = XEXP (sum, 0);
2414 return SYMBOL_REF_SMALL_P (sym_ref);
2420 /* Return true, if operand is a memory operand and has a
2421 displacement divisible by 4. */
2424 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2429 if (!memory_operand (op, mode))
2432 addr = XEXP (op, 0);
2433 if (GET_CODE (addr) == PLUS
2434 && GET_CODE (XEXP (addr, 0)) == REG
2435 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2436 off = INTVAL (XEXP (addr, 1));
2438 return (off % 4) == 0;
2441 /* Return true if either operand is a general purpose register. */
2444 gpr_or_gpr_p (rtx op0, rtx op1)
2446 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2447 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2451 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2454 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2456 switch (GET_CODE(op))
2459 if (RS6000_SYMBOL_REF_TLS_P (op))
2461 else if (CONSTANT_POOL_ADDRESS_P (op))
2463 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2471 else if (! strcmp (XSTR (op, 0), toc_label_name))
2480 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2481 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2483 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2492 constant_pool_expr_p (rtx op)
2496 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2500 toc_relative_expr_p (rtx op)
2504 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2507 /* SPE offset addressing is limited to 5-bits worth of double words. */
2508 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2511 legitimate_constant_pool_address_p (rtx x)
2514 && GET_CODE (x) == PLUS
2515 && GET_CODE (XEXP (x, 0)) == REG
2516 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2517 && constant_pool_expr_p (XEXP (x, 1)));
2521 legitimate_small_data_p (enum machine_mode mode, rtx x)
2523 return (DEFAULT_ABI == ABI_V4
2524 && !flag_pic && !TARGET_TOC
2525 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2526 && small_data_operand (x, mode));
2530 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2532 unsigned HOST_WIDE_INT offset, extra;
2534 if (GET_CODE (x) != PLUS)
2536 if (GET_CODE (XEXP (x, 0)) != REG)
2538 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2540 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2543 offset = INTVAL (XEXP (x, 1));
2551 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2552 which leaves the only valid constant offset of zero, which by
2553 canonicalization rules is also invalid. */
2560 /* SPE vector modes. */
2561 return SPE_CONST_OFFSET_OK (offset);
2565 if (mode == DFmode || !TARGET_POWERPC64)
2567 else if (offset & 3)
2573 if (mode == TFmode || !TARGET_POWERPC64)
2575 else if (offset & 3)
2586 return (offset < 0x10000) && (offset + extra < 0x10000);
2590 legitimate_indexed_address_p (rtx x, int strict)
2594 if (GET_CODE (x) != PLUS)
2599 if (!REG_P (op0) || !REG_P (op1))
2602 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2603 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2604 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2605 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2609 legitimate_indirect_address_p (rtx x, int strict)
2611 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2615 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2617 if (!TARGET_MACHO || !flag_pic
2618 || mode != SImode || GET_CODE(x) != MEM)
2622 if (GET_CODE (x) != LO_SUM)
2624 if (GET_CODE (XEXP (x, 0)) != REG)
2626 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2630 return CONSTANT_P (x);
2634 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2636 if (GET_CODE (x) != LO_SUM)
2638 if (GET_CODE (XEXP (x, 0)) != REG)
2640 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2644 if (TARGET_ELF || TARGET_MACHO)
2646 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2650 if (GET_MODE_NUNITS (mode) != 1)
2652 if (GET_MODE_BITSIZE (mode) > 32
2653 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2656 return CONSTANT_P (x);
2663 /* Try machine-dependent ways of modifying an illegitimate address
2664 to be legitimate. If we find one, return the new, valid address.
2665 This is used from only one place: `memory_address' in explow.c.
2667 OLDX is the address as it was before break_out_memory_refs was
2668 called. In some cases it is useful to look at this to decide what
2671 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2673 It is always safe for this function to do nothing. It exists to
2674 recognize opportunities to optimize the output.
2676 On RS/6000, first check for the sum of a register with a constant
2677 integer that is out of range. If so, generate code to add the
2678 constant with the low-order 16 bits masked to the register and force
2679 this result into another register (this can be done with `cau').
2680 Then generate an address of REG+(CONST&0xffff), allowing for the
2681 possibility of bit 16 being a one.
2683 Then check for the sum of a register and something not constant, try to
2684 load the other things into a register and return the sum. */
2687 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2688 enum machine_mode mode)
2690 if (GET_CODE (x) == SYMBOL_REF)
2692 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2694 return rs6000_legitimize_tls_address (x, model);
2697 if (GET_CODE (x) == PLUS
2698 && GET_CODE (XEXP (x, 0)) == REG
2699 && GET_CODE (XEXP (x, 1)) == CONST_INT
2700 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2702 HOST_WIDE_INT high_int, low_int;
2704 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2705 high_int = INTVAL (XEXP (x, 1)) - low_int;
2706 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2707 GEN_INT (high_int)), 0);
2708 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2710 else if (GET_CODE (x) == PLUS
2711 && GET_CODE (XEXP (x, 0)) == REG
2712 && GET_CODE (XEXP (x, 1)) != CONST_INT
2713 && GET_MODE_NUNITS (mode) == 1
2714 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2716 || (mode != DFmode && mode != TFmode))
2717 && (TARGET_POWERPC64 || mode != DImode)
2720 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2721 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2723 else if (ALTIVEC_VECTOR_MODE (mode))
2727 /* Make sure both operands are registers. */
2728 if (GET_CODE (x) == PLUS)
2729 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2730 force_reg (Pmode, XEXP (x, 1)));
2732 reg = force_reg (Pmode, x);
2735 else if (SPE_VECTOR_MODE (mode))
2737 /* We accept [reg + reg] and [reg + OFFSET]. */
2739 if (GET_CODE (x) == PLUS)
2741 rtx op1 = XEXP (x, 0);
2742 rtx op2 = XEXP (x, 1);
2744 op1 = force_reg (Pmode, op1);
2746 if (GET_CODE (op2) != REG
2747 && (GET_CODE (op2) != CONST_INT
2748 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2749 op2 = force_reg (Pmode, op2);
2751 return gen_rtx_PLUS (Pmode, op1, op2);
2754 return force_reg (Pmode, x);
2760 && GET_CODE (x) != CONST_INT
2761 && GET_CODE (x) != CONST_DOUBLE
2763 && GET_MODE_NUNITS (mode) == 1
2764 && (GET_MODE_BITSIZE (mode) <= 32
2765 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2767 rtx reg = gen_reg_rtx (Pmode);
2768 emit_insn (gen_elf_high (reg, x));
2769 return gen_rtx_LO_SUM (Pmode, reg, x);
2771 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2774 && ! MACHO_DYNAMIC_NO_PIC_P
2776 && GET_CODE (x) != CONST_INT
2777 && GET_CODE (x) != CONST_DOUBLE
2779 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2783 rtx reg = gen_reg_rtx (Pmode);
2784 emit_insn (gen_macho_high (reg, x));
2785 return gen_rtx_LO_SUM (Pmode, reg, x);
2788 && constant_pool_expr_p (x)
2789 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2791 return create_TOC_reference (x);
2797 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2798 We need to emit DTP-relative relocations. */
2801 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2806 fputs ("\t.long\t", file);
2809 fputs (DOUBLE_INT_ASM_OP, file);
2814 output_addr_const (file, x);
2815 fputs ("@dtprel+0x8000", file);
2818 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2820 static GTY(()) rtx rs6000_tls_symbol;
2822 rs6000_tls_get_addr (void)
2824 if (!rs6000_tls_symbol)
2825 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2827 return rs6000_tls_symbol;
2830 /* Construct the SYMBOL_REF for TLS GOT references. */
2832 static GTY(()) rtx rs6000_got_symbol;
2834 rs6000_got_sym (void)
2836 if (!rs6000_got_symbol)
2838 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2839 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2840 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2843 return rs6000_got_symbol;
2846 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2847 this (thread-local) address. */
2850 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2854 dest = gen_reg_rtx (Pmode);
2855 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2861 tlsreg = gen_rtx_REG (Pmode, 13);
2862 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2866 tlsreg = gen_rtx_REG (Pmode, 2);
2867 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2871 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2875 tmp = gen_reg_rtx (Pmode);
2878 tlsreg = gen_rtx_REG (Pmode, 13);
2879 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2883 tlsreg = gen_rtx_REG (Pmode, 2);
2884 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2888 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2890 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2895 rtx r3, got, tga, tmp1, tmp2, eqv;
2898 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2902 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2905 rtx gsym = rs6000_got_sym ();
2906 got = gen_reg_rtx (Pmode);
2908 rs6000_emit_move (got, gsym, Pmode);
2912 static int tls_got_labelno = 0;
2913 rtx tempLR, lab, tmp3, mem;
2916 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2917 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2918 tempLR = gen_reg_rtx (Pmode);
2919 tmp1 = gen_reg_rtx (Pmode);
2920 tmp2 = gen_reg_rtx (Pmode);
2921 tmp3 = gen_reg_rtx (Pmode);
2922 mem = gen_rtx_MEM (Pmode, tmp1);
2923 RTX_UNCHANGING_P (mem) = 1;
2925 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2927 emit_move_insn (tmp1, tempLR);
2928 emit_move_insn (tmp2, mem);
2929 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2930 last = emit_move_insn (got, tmp3);
2931 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2933 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2935 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2941 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2943 r3 = gen_rtx_REG (Pmode, 3);
2945 insn = gen_tls_gd_64 (r3, got, addr);
2947 insn = gen_tls_gd_32 (r3, got, addr);
2950 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2951 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2952 insn = emit_call_insn (insn);
2953 CONST_OR_PURE_CALL_P (insn) = 1;
2954 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2955 insn = get_insns ();
2957 emit_libcall_block (insn, dest, r3, addr);
2959 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2961 r3 = gen_rtx_REG (Pmode, 3);
2963 insn = gen_tls_ld_64 (r3, got);
2965 insn = gen_tls_ld_32 (r3, got);
2968 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2969 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2970 insn = emit_call_insn (insn);
2971 CONST_OR_PURE_CALL_P (insn) = 1;
2972 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2973 insn = get_insns ();
2975 tmp1 = gen_reg_rtx (Pmode);
2976 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2978 emit_libcall_block (insn, tmp1, r3, eqv);
2979 if (rs6000_tls_size == 16)
2982 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2984 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2986 else if (rs6000_tls_size == 32)
2988 tmp2 = gen_reg_rtx (Pmode);
2990 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2992 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2995 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2997 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3001 tmp2 = gen_reg_rtx (Pmode);
3003 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3005 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3007 insn = gen_rtx_SET (Pmode, dest,
3008 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3014 /* IE, or 64 bit offset LE. */
3015 tmp2 = gen_reg_rtx (Pmode);
3017 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3019 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3022 insn = gen_tls_tls_64 (dest, tmp2, addr);
3024 insn = gen_tls_tls_32 (dest, tmp2, addr);
3032 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3033 instruction definitions. */
3036 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3038 return RS6000_SYMBOL_REF_TLS_P (x);
3041 /* Return 1 if X contains a thread-local symbol. */
3044 rs6000_tls_referenced_p (rtx x)
3046 if (! TARGET_HAVE_TLS)
3049 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3052 /* Return 1 if *X is a thread-local symbol. This is the same as
3053 rs6000_tls_symbol_ref except for the type of the unused argument. */
3056 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3058 return RS6000_SYMBOL_REF_TLS_P (*x);
3061 /* The convention appears to be to define this wherever it is used.
3062 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3063 is now used here. */
3064 #ifndef REG_MODE_OK_FOR_BASE_P
3065 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3068 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3069 replace the input X, or the original X if no replacement is called for.
3070 The output parameter *WIN is 1 if the calling macro should goto WIN,
3073 For RS/6000, we wish to handle large displacements off a base
3074 register by splitting the addend across an addiu/addis and the mem insn.
3075 This cuts number of extra insns needed from 3 to 1.
3077 On Darwin, we use this to generate code for floating point constants.
3078 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3079 The Darwin code is inside #if TARGET_MACHO because only then is
3080 machopic_function_base_name() defined. */
3082 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3083 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3085 /* We must recognize output that we have already generated ourselves. */
3086 if (GET_CODE (x) == PLUS
3087 && GET_CODE (XEXP (x, 0)) == PLUS
3088 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3089 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3090 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3092 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3093 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3094 opnum, (enum reload_type)type);
3100 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3101 && GET_CODE (x) == LO_SUM
3102 && GET_CODE (XEXP (x, 0)) == PLUS
3103 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3104 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3105 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3106 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3107 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3108 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3109 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3111 /* Result of previous invocation of this function on Darwin
3112 floating point constant. */
3113 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3114 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3115 opnum, (enum reload_type)type);
3120 if (GET_CODE (x) == PLUS
3121 && GET_CODE (XEXP (x, 0)) == REG
3122 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3123 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3124 && GET_CODE (XEXP (x, 1)) == CONST_INT
3125 && !SPE_VECTOR_MODE (mode)
3126 && !ALTIVEC_VECTOR_MODE (mode))
3128 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3129 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3131 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3133 /* Check for 32-bit overflow. */
3134 if (high + low != val)
3140 /* Reload the high part into a base reg; leave the low part
3141 in the mem directly. */
3143 x = gen_rtx_PLUS (GET_MODE (x),
3144 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3148 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3149 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3150 opnum, (enum reload_type)type);
3155 if (GET_CODE (x) == SYMBOL_REF
3156 && DEFAULT_ABI == ABI_DARWIN
3157 && !ALTIVEC_VECTOR_MODE (mode)
3158 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3159 /* Don't do this for TFmode, since the result isn't offsettable. */
3164 rtx offset = gen_rtx_CONST (Pmode,
3165 gen_rtx_MINUS (Pmode, x,
3166 gen_rtx_SYMBOL_REF (Pmode,
3167 machopic_function_base_name ())));
3168 x = gen_rtx_LO_SUM (GET_MODE (x),
3169 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3170 gen_rtx_HIGH (Pmode, offset)), offset);
3173 x = gen_rtx_LO_SUM (GET_MODE (x),
3174 gen_rtx_HIGH (Pmode, x), x);
3176 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3177 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3178 opnum, (enum reload_type)type);
3184 && constant_pool_expr_p (x)
3185 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3187 (x) = create_TOC_reference (x);
3195 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3196 that is a valid memory address for an instruction.
3197 The MODE argument is the machine mode for the MEM expression
3198 that wants to use this address.
3200 On the RS/6000, there are four valid address: a SYMBOL_REF that
3201 refers to a constant pool entry of an address (or the sum of it
3202 plus a constant), a short (16-bit signed) constant plus a register,
3203 the sum of two registers, or a register indirect, possibly with an
3204 auto-increment. For DFmode and DImode with a constant plus register,
3205 we must ensure that both words are addressable or PowerPC64 with offset
3208 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3209 32-bit DImode, TImode), indexed addressing cannot be used because
3210 adjacent memory cells are accessed by adding word-sized offsets
3211 during assembly output. */
3213 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3215 if (RS6000_SYMBOL_REF_TLS_P (x))
3217 if (legitimate_indirect_address_p (x, reg_ok_strict))
3219 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3220 && !ALTIVEC_VECTOR_MODE (mode)
3221 && !SPE_VECTOR_MODE (mode)
3223 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3225 if (legitimate_small_data_p (mode, x))
3227 if (legitimate_constant_pool_address_p (x))
3229 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3231 && GET_CODE (x) == PLUS
3232 && GET_CODE (XEXP (x, 0)) == REG
3233 && XEXP (x, 0) == virtual_stack_vars_rtx
3234 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3236 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3239 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3241 || (mode != DFmode && mode != TFmode))
3242 && (TARGET_POWERPC64 || mode != DImode)
3243 && legitimate_indexed_address_p (x, reg_ok_strict))
3245 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3250 /* Go to LABEL if ADDR (a legitimate address expression)
3251 has an effect that depends on the machine mode it is used for.
3253 On the RS/6000 this is true of all integral offsets (since AltiVec
3254 modes don't allow them) or is a pre-increment or decrement.
3256 ??? Except that due to conceptual problems in offsettable_address_p
3257 we can't really report the problems of integral offsets. So leave
3258 this assuming that the adjustable offset must be valid for the
3259 sub-words of a TFmode operand, which is what we had before. */
3262 rs6000_mode_dependent_address (rtx addr)
3264 switch (GET_CODE (addr))
3267 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3269 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3270 return val + 12 + 0x8000 >= 0x10000;
3279 return TARGET_UPDATE;
3288 /* Try to output insns to set TARGET equal to the constant C if it can
3289 be done in less than N insns. Do all computations in MODE.
3290 Returns the place where the output has been placed if it can be
3291 done and the insns have been emitted. If it would take more than N
3292 insns, zero is returned and no insns and emitted. */
3295 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3296 rtx source, int n ATTRIBUTE_UNUSED)
3298 rtx result, insn, set;
3299 HOST_WIDE_INT c0, c1;
3301 if (mode == QImode || mode == HImode)
3304 dest = gen_reg_rtx (mode);
3305 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3308 else if (mode == SImode)
3310 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3312 emit_insn (gen_rtx_SET (VOIDmode, result,
3313 GEN_INT (INTVAL (source)
3314 & (~ (HOST_WIDE_INT) 0xffff))));
3315 emit_insn (gen_rtx_SET (VOIDmode, dest,
3316 gen_rtx_IOR (SImode, result,
3317 GEN_INT (INTVAL (source) & 0xffff))));
3320 else if (mode == DImode)
3322 if (GET_CODE (source) == CONST_INT)
3324 c0 = INTVAL (source);
3327 else if (GET_CODE (source) == CONST_DOUBLE)
3329 #if HOST_BITS_PER_WIDE_INT >= 64
3330 c0 = CONST_DOUBLE_LOW (source);
3333 c0 = CONST_DOUBLE_LOW (source);
3334 c1 = CONST_DOUBLE_HIGH (source);
3340 result = rs6000_emit_set_long_const (dest, c0, c1);
3345 insn = get_last_insn ();
3346 set = single_set (insn);
3347 if (! CONSTANT_P (SET_SRC (set)))
3348 set_unique_reg_note (insn, REG_EQUAL, source);
3353 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3354 fall back to a straight forward decomposition. We do this to avoid
3355 exponential run times encountered when looking for longer sequences
3356 with rs6000_emit_set_const. */
3358 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3360 if (!TARGET_POWERPC64)
3362 rtx operand1, operand2;
3364 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3366 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3368 emit_move_insn (operand1, GEN_INT (c1));
3369 emit_move_insn (operand2, GEN_INT (c2));
3373 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3376 ud2 = (c1 & 0xffff0000) >> 16;
3377 #if HOST_BITS_PER_WIDE_INT >= 64
3381 ud4 = (c2 & 0xffff0000) >> 16;
3383 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3384 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3387 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3389 emit_move_insn (dest, GEN_INT (ud1));
3392 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3393 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3396 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3399 emit_move_insn (dest, GEN_INT (ud2 << 16));
3401 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3403 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3404 || (ud4 == 0 && ! (ud3 & 0x8000)))
3407 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3410 emit_move_insn (dest, GEN_INT (ud3 << 16));
3413 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3414 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3416 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3421 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3424 emit_move_insn (dest, GEN_INT (ud4 << 16));
3427 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3429 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3431 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3432 GEN_INT (ud2 << 16)));
3434 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3440 /* Emit a move from SOURCE to DEST in mode MODE. */
3442 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3446 operands[1] = source;
3448 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3449 if (GET_CODE (operands[1]) == CONST_DOUBLE
3450 && ! FLOAT_MODE_P (mode)
3451 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3453 /* FIXME. This should never happen. */
3454 /* Since it seems that it does, do the safe thing and convert
3456 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3458 if (GET_CODE (operands[1]) == CONST_DOUBLE
3459 && ! FLOAT_MODE_P (mode)
3460 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3461 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3462 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3463 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3466 /* Check if GCC is setting up a block move that will end up using FP
3467 registers as temporaries. We must make sure this is acceptable. */
3468 if (GET_CODE (operands[0]) == MEM
3469 && GET_CODE (operands[1]) == MEM
3471 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3472 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3473 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3474 ? 32 : MEM_ALIGN (operands[0])))
3475 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3477 : MEM_ALIGN (operands[1]))))
3478 && ! MEM_VOLATILE_P (operands [0])
3479 && ! MEM_VOLATILE_P (operands [1]))
3481 emit_move_insn (adjust_address (operands[0], SImode, 0),
3482 adjust_address (operands[1], SImode, 0));
3483 emit_move_insn (adjust_address (operands[0], SImode, 4),
3484 adjust_address (operands[1], SImode, 4));
3488 if (!no_new_pseudos)
3490 if (GET_CODE (operands[1]) == MEM && optimize > 0
3491 && (mode == QImode || mode == HImode || mode == SImode)
3492 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3494 rtx reg = gen_reg_rtx (word_mode);
3496 emit_insn (gen_rtx_SET (word_mode, reg,
3497 gen_rtx_ZERO_EXTEND (word_mode,
3499 operands[1] = gen_lowpart (mode, reg);
3501 if (GET_CODE (operands[0]) != REG)
3502 operands[1] = force_reg (mode, operands[1]);
3505 if (mode == SFmode && ! TARGET_POWERPC
3506 && TARGET_HARD_FLOAT && TARGET_FPRS
3507 && GET_CODE (operands[0]) == MEM)
3511 if (reload_in_progress || reload_completed)
3512 regnum = true_regnum (operands[1]);
3513 else if (GET_CODE (operands[1]) == REG)
3514 regnum = REGNO (operands[1]);
3518 /* If operands[1] is a register, on POWER it may have
3519 double-precision data in it, so truncate it to single
3521 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3524 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3525 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3526 operands[1] = newreg;
3530 /* Recognize the case where operand[1] is a reference to thread-local
3531 data and load its address to a register. */
3532 if (GET_CODE (operands[1]) == SYMBOL_REF)
3534 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3536 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3539 /* Handle the case where reload calls us with an invalid address. */
3540 if (reload_in_progress && mode == Pmode
3541 && (! general_operand (operands[1], mode)
3542 || ! nonimmediate_operand (operands[0], mode)))
3545 /* Handle the case of CONSTANT_P_RTX. */
3546 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3549 /* 128-bit constant floating-point values on Darwin should really be
3550 loaded as two parts. */
3551 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3552 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3553 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3555 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3556 know how to get a DFmode SUBREG of a TFmode. */
3557 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3558 simplify_gen_subreg (DImode, operands[1], mode, 0),
3560 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3561 GET_MODE_SIZE (DImode)),
3562 simplify_gen_subreg (DImode, operands[1], mode,
3563 GET_MODE_SIZE (DImode)),
3568 /* FIXME: In the long term, this switch statement should go away
3569 and be replaced by a sequence of tests based on things like
3575 if (CONSTANT_P (operands[1])
3576 && GET_CODE (operands[1]) != CONST_INT)
3577 operands[1] = force_const_mem (mode, operands[1]);
3583 if (CONSTANT_P (operands[1])
3584 && ! easy_fp_constant (operands[1], mode))
3585 operands[1] = force_const_mem (mode, operands[1]);
3596 if (CONSTANT_P (operands[1])
3597 && !easy_vector_constant (operands[1], mode))
3598 operands[1] = force_const_mem (mode, operands[1]);
3603 /* Use default pattern for address of ELF small data */
3606 && DEFAULT_ABI == ABI_V4
3607 && (GET_CODE (operands[1]) == SYMBOL_REF
3608 || GET_CODE (operands[1]) == CONST)
3609 && small_data_operand (operands[1], mode))
3611 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3615 if (DEFAULT_ABI == ABI_V4
3616 && mode == Pmode && mode == SImode
3617 && flag_pic == 1 && got_operand (operands[1], mode))
3619 emit_insn (gen_movsi_got (operands[0], operands[1]));
3623 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3627 && CONSTANT_P (operands[1])
3628 && GET_CODE (operands[1]) != HIGH
3629 && GET_CODE (operands[1]) != CONST_INT)
3631 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3633 /* If this is a function address on -mcall-aixdesc,
3634 convert it to the address of the descriptor. */
3635 if (DEFAULT_ABI == ABI_AIX
3636 && GET_CODE (operands[1]) == SYMBOL_REF
3637 && XSTR (operands[1], 0)[0] == '.')
3639 const char *name = XSTR (operands[1], 0);
3641 while (*name == '.')
3643 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3644 CONSTANT_POOL_ADDRESS_P (new_ref)
3645 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3646 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3647 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3648 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3649 operands[1] = new_ref;
3652 if (DEFAULT_ABI == ABI_DARWIN)
3655 if (MACHO_DYNAMIC_NO_PIC_P)
3657 /* Take care of any required data indirection. */
3658 operands[1] = rs6000_machopic_legitimize_pic_address (
3659 operands[1], mode, operands[0]);
3660 if (operands[0] != operands[1])
3661 emit_insn (gen_rtx_SET (VOIDmode,
3662 operands[0], operands[1]));
3666 emit_insn (gen_macho_high (target, operands[1]));
3667 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3671 emit_insn (gen_elf_high (target, operands[1]));
3672 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3676 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3677 and we have put it in the TOC, we just need to make a TOC-relative
3680 && GET_CODE (operands[1]) == SYMBOL_REF
3681 && constant_pool_expr_p (operands[1])
3682 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3683 get_pool_mode (operands[1])))
3685 operands[1] = create_TOC_reference (operands[1]);
3687 else if (mode == Pmode
3688 && CONSTANT_P (operands[1])
3689 && ((GET_CODE (operands[1]) != CONST_INT
3690 && ! easy_fp_constant (operands[1], mode))
3691 || (GET_CODE (operands[1]) == CONST_INT
3692 && num_insns_constant (operands[1], mode) > 2)
3693 || (GET_CODE (operands[0]) == REG
3694 && FP_REGNO_P (REGNO (operands[0]))))
3695 && GET_CODE (operands[1]) != HIGH
3696 && ! legitimate_constant_pool_address_p (operands[1])
3697 && ! toc_relative_expr_p (operands[1]))
3699 /* Emit a USE operation so that the constant isn't deleted if
3700 expensive optimizations are turned on because nobody
3701 references it. This should only be done for operands that
3702 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3703 This should not be done for operands that contain LABEL_REFs.
3704 For now, we just handle the obvious case. */
3705 if (GET_CODE (operands[1]) != LABEL_REF)
3706 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3709 /* Darwin uses a special PIC legitimizer. */
3710 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3713 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3715 if (operands[0] != operands[1])
3716 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3721 /* If we are to limit the number of things we put in the TOC and
3722 this is a symbol plus a constant we can add in one insn,
3723 just put the symbol in the TOC and add the constant. Don't do
3724 this if reload is in progress. */
3725 if (GET_CODE (operands[1]) == CONST
3726 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3727 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3728 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3729 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3730 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3731 && ! side_effects_p (operands[0]))
3734 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3735 rtx other = XEXP (XEXP (operands[1], 0), 1);
3737 sym = force_reg (mode, sym);
3739 emit_insn (gen_addsi3 (operands[0], sym, other));
3741 emit_insn (gen_adddi3 (operands[0], sym, other));
3745 operands[1] = force_const_mem (mode, operands[1]);
3748 && constant_pool_expr_p (XEXP (operands[1], 0))
3749 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3750 get_pool_constant (XEXP (operands[1], 0)),
3751 get_pool_mode (XEXP (operands[1], 0))))
3754 = gen_rtx_MEM (mode,
3755 create_TOC_reference (XEXP (operands[1], 0)));
3756 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3757 RTX_UNCHANGING_P (operands[1]) = 1;
3763 if (GET_CODE (operands[0]) == MEM
3764 && GET_CODE (XEXP (operands[0], 0)) != REG
3765 && ! reload_in_progress)
3767 = replace_equiv_address (operands[0],
3768 copy_addr_to_reg (XEXP (operands[0], 0)));
3770 if (GET_CODE (operands[1]) == MEM
3771 && GET_CODE (XEXP (operands[1], 0)) != REG
3772 && ! reload_in_progress)
3774 = replace_equiv_address (operands[1],
3775 copy_addr_to_reg (XEXP (operands[1], 0)));
3778 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3780 gen_rtx_SET (VOIDmode,
3781 operands[0], operands[1]),
3782 gen_rtx_CLOBBER (VOIDmode,
3783 gen_rtx_SCRATCH (SImode)))));
3792 /* Above, we may have called force_const_mem which may have returned
3793 an invalid address. If we can, fix this up; otherwise, reload will
3794 have to deal with it. */
3795 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3796 operands[1] = validize_mem (operands[1]);
3799 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3802 /* Nonzero if we can use a floating-point register to pass this arg. */
3803 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3804 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3805 && (CUM)->fregno <= FP_ARG_MAX_REG \
3806 && TARGET_HARD_FLOAT && TARGET_FPRS)
3808 /* Nonzero if we can use an AltiVec register to pass this arg. */
3809 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3810 (ALTIVEC_VECTOR_MODE (MODE) \
3811 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3812 && TARGET_ALTIVEC_ABI \
3815 /* Return a nonzero value to say to return the function value in
3816 memory, just as large structures are always returned. TYPE will be
3817 the data type of the value, and FNTYPE will be the type of the
3818 function doing the returning, or @code{NULL} for libcalls.
3820 The AIX ABI for the RS/6000 specifies that all structures are
3821 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3822 specifies that structures <= 8 bytes are returned in r3/r4, but a
3823 draft put them in memory, and GCC used to implement the draft
3824 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3825 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3826 compatibility can change DRAFT_V4_STRUCT_RET to override the
3827 default, and -m switches get the final word. See
3828 rs6000_override_options for more details.
3830 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3831 long double support is enabled. These values are returned in memory.
3833 int_size_in_bytes returns -1 for variable size objects, which go in
3834 memory always. The cast to unsigned makes -1 > 8. */
3837 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3839 if (AGGREGATE_TYPE_P (type)
3840 && (TARGET_AIX_STRUCT_RET
3841 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3843 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3848 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3849 for a call to a function whose data type is FNTYPE.
3850 For a library call, FNTYPE is 0.
3852 For incoming args we set the number of arguments in the prototype large
3853 so we never return a PARALLEL. */
3856 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3857 rtx libname ATTRIBUTE_UNUSED, int incoming,
3858 int libcall, int n_named_args)
3860 static CUMULATIVE_ARGS zero_cumulative;
3862 *cum = zero_cumulative;
3864 cum->fregno = FP_ARG_MIN_REG;
3865 cum->vregno = ALTIVEC_ARG_MIN_REG;
3866 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3867 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3868 ? CALL_LIBCALL : CALL_NORMAL);
3869 cum->sysv_gregno = GP_ARG_MIN_REG;
3870 cum->stdarg = fntype
3871 && (TYPE_ARG_TYPES (fntype) != 0
3872 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3873 != void_type_node));
3875 cum->nargs_prototype = 0;
3876 if (incoming || cum->prototype)
3877 cum->nargs_prototype = n_named_args;
3879 /* Check for a longcall attribute. */
3881 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3882 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3883 cum->call_cookie = CALL_LONG;
3885 if (TARGET_DEBUG_ARG)
3887 fprintf (stderr, "\ninit_cumulative_args:");
3890 tree ret_type = TREE_TYPE (fntype);
3891 fprintf (stderr, " ret code = %s,",
3892 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3895 if (cum->call_cookie & CALL_LONG)
3896 fprintf (stderr, " longcall,");
3898 fprintf (stderr, " proto = %d, nargs = %d\n",
3899 cum->prototype, cum->nargs_prototype);
3904 && TARGET_ALTIVEC_ABI
3905 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
3907 error ("Cannot return value in vector register because"
3908 " altivec instructions are disabled, use -maltivec"
3909 " to enable them.");
3913 /* If defined, a C expression which determines whether, and in which
3914 direction, to pad out an argument with extra space. The value
3915 should be of type `enum direction': either `upward' to pad above
3916 the argument, `downward' to pad below, or `none' to inhibit
3919 For the AIX ABI structs are always stored left shifted in their
3923 function_arg_padding (enum machine_mode mode, tree type)
3925 #ifndef AGGREGATE_PADDING_FIXED
3926 #define AGGREGATE_PADDING_FIXED 0
3928 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3929 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3932 if (!AGGREGATE_PADDING_FIXED)
3934 /* GCC used to pass structures of the same size as integer types as
3935 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3936 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3937 passed padded downward, except that -mstrict-align further
3938 muddied the water in that multi-component structures of 2 and 4
3939 bytes in size were passed padded upward.
3941 The following arranges for best compatibility with previous
3942 versions of gcc, but removes the -mstrict-align dependency. */
3943 if (BYTES_BIG_ENDIAN)
3945 HOST_WIDE_INT size = 0;
3947 if (mode == BLKmode)
3949 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3950 size = int_size_in_bytes (type);
3953 size = GET_MODE_SIZE (mode);
3955 if (size == 1 || size == 2 || size == 4)
3961 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3963 if (type != 0 && AGGREGATE_TYPE_P (type))
3967 /* Fall back to the default. */
3968 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
3971 /* If defined, a C expression that gives the alignment boundary, in bits,
3972 of an argument with the specified mode and type. If it is not defined,
3973 PARM_BOUNDARY is used for all arguments.
3975 V.4 wants long longs to be double word aligned. */
3978 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
3980 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3982 else if (SPE_VECTOR_MODE (mode))
3984 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3987 return PARM_BOUNDARY;
3990 /* Compute the size (in words) of a function argument. */
3992 static unsigned long
3993 rs6000_arg_size (enum machine_mode mode, tree type)
3997 if (mode != BLKmode)
3998 size = GET_MODE_SIZE (mode);
4000 size = int_size_in_bytes (type);
4003 return (size + 3) >> 2;
4005 return (size + 7) >> 3;
4008 /* Update the data in CUM to advance over an argument
4009 of mode MODE and data type TYPE.
4010 (TYPE is null for libcalls where that information may not be available.) */
4013 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4014 tree type, int named)
4016 cum->nargs_prototype--;
4018 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4020 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4023 if (!TARGET_ALTIVEC)
4024 error ("Cannot pass argument in vector register because"
4025 " altivec instructions are disabled, use -maltivec"
4026 " to enable them.");
4028 /* PowerPC64 Linux and AIX allocates GPRs for a vector argument
4029 even if it is going to be passed in a vector register.
4030 Darwin does the same for variable-argument functions. */
4031 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4032 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4036 /* Vector parameters must be 16-byte aligned. This places
4037 them at 2 mod 4 in terms of words in 32-bit mode, since
4038 the parameter save area starts at offset 24 from the
4039 stack. In 64-bit mode, they just have to start on an
4040 even word, since the parameter save area is 16-byte
4041 aligned. Space for GPRs is reserved even if the argument
4042 will be passed in memory. */
4044 align = ((6 - (cum->words & 3)) & 3);
4046 align = cum->words & 1;
4047 cum->words += align + rs6000_arg_size (mode, type);
4049 if (TARGET_DEBUG_ARG)
4051 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4053 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4054 cum->nargs_prototype, cum->prototype,
4055 GET_MODE_NAME (mode));
4059 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4061 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4063 else if (DEFAULT_ABI == ABI_V4)
4065 if (TARGET_HARD_FLOAT && TARGET_FPRS
4066 && (mode == SFmode || mode == DFmode))
4068 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4073 cum->words += cum->words & 1;
4074 cum->words += rs6000_arg_size (mode, type);
4080 int gregno = cum->sysv_gregno;
4082 /* Aggregates and IEEE quad get passed by reference. */
4083 if ((type && AGGREGATE_TYPE_P (type))
4087 n_words = rs6000_arg_size (mode, type);
4089 /* Long long and SPE vectors are put in odd registers. */
4090 if (n_words == 2 && (gregno & 1) == 0)
4093 /* Long long and SPE vectors are not split between registers
4095 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4097 /* Long long is aligned on the stack. */
4099 cum->words += cum->words & 1;
4100 cum->words += n_words;
4103 /* Note: continuing to accumulate gregno past when we've started
4104 spilling to the stack indicates the fact that we've started
4105 spilling to the stack to expand_builtin_saveregs. */
4106 cum->sysv_gregno = gregno + n_words;
4109 if (TARGET_DEBUG_ARG)
4111 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4112 cum->words, cum->fregno);
4113 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4114 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4115 fprintf (stderr, "mode = %4s, named = %d\n",
4116 GET_MODE_NAME (mode), named);
4121 int align = (TARGET_32BIT && (cum->words & 1) != 0
4122 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4124 cum->words += align + rs6000_arg_size (mode, type);
4126 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4127 && TARGET_HARD_FLOAT && TARGET_FPRS)
4128 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4130 if (TARGET_DEBUG_ARG)
4132 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4133 cum->words, cum->fregno);
4134 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4135 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4136 fprintf (stderr, "named = %d, align = %d\n", named, align);
4141 /* Determine where to put a SIMD argument on the SPE. */
4144 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4149 int gregno = cum->sysv_gregno;
4150 int n_words = rs6000_arg_size (mode, type);
4152 /* SPE vectors are put in odd registers. */
4153 if (n_words == 2 && (gregno & 1) == 0)
4156 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4159 enum machine_mode m = SImode;
4161 r1 = gen_rtx_REG (m, gregno);
4162 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4163 r2 = gen_rtx_REG (m, gregno + 1);
4164 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4165 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4172 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4173 return gen_rtx_REG (mode, cum->sysv_gregno);
4179 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4182 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4183 tree type, int align_words)
4187 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4188 in vararg list into zero, one or two GPRs */
4189 if (align_words >= GP_ARG_NUM_REG)
4190 return gen_rtx_PARALLEL (DFmode,
4192 gen_rtx_EXPR_LIST (VOIDmode,
4193 NULL_RTX, const0_rtx),
4194 gen_rtx_EXPR_LIST (VOIDmode,
4198 else if (align_words + rs6000_arg_size (mode, type)
4200 /* If this is partially on the stack, then we only
4201 include the portion actually in registers here. */
4202 return gen_rtx_PARALLEL (DFmode,
4204 gen_rtx_EXPR_LIST (VOIDmode,
4205 gen_rtx_REG (SImode,
4209 gen_rtx_EXPR_LIST (VOIDmode,
4214 /* split a DFmode arg into two GPRs */
4215 return gen_rtx_PARALLEL (DFmode,
4217 gen_rtx_EXPR_LIST (VOIDmode,
4218 gen_rtx_REG (SImode,
4222 gen_rtx_EXPR_LIST (VOIDmode,
4223 gen_rtx_REG (SImode,
4227 gen_rtx_EXPR_LIST (VOIDmode,
4228 gen_rtx_REG (mode, cum->fregno),
4231 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4233 else if (mode == DImode)
4235 if (align_words < GP_ARG_NUM_REG - 1)
4236 return gen_rtx_PARALLEL (DImode,
4238 gen_rtx_EXPR_LIST (VOIDmode,
4239 gen_rtx_REG (SImode,
4243 gen_rtx_EXPR_LIST (VOIDmode,
4244 gen_rtx_REG (SImode,
4248 else if (align_words == GP_ARG_NUM_REG - 1)
4249 return gen_rtx_PARALLEL (DImode,
4251 gen_rtx_EXPR_LIST (VOIDmode,
4252 NULL_RTX, const0_rtx),
4253 gen_rtx_EXPR_LIST (VOIDmode,
4254 gen_rtx_REG (SImode,
4259 else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4262 int size = int_size_in_bytes (type);
4263 int no_units = ((size - 1) / 4) + 1;
4264 int max_no_words = GP_ARG_NUM_REG - align_words;
4265 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4266 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4268 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4270 for (k=0; k < rtlvec_len; k++)
4271 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4272 gen_rtx_REG (SImode,
4275 k == 0 ? const0_rtx : GEN_INT (k*4));
4277 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4283 /* Determine where to put an argument to a function.
4284 Value is zero to push the argument on the stack,
4285 or a hard register in which to store the argument.
4287 MODE is the argument's machine mode.
4288 TYPE is the data type of the argument (as a tree).
4289 This is null for libcalls where that information may
4291 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4292 the preceding args and about the function being called.
4293 NAMED is nonzero if this argument is a named parameter
4294 (otherwise it is an extra parameter matching an ellipsis).
4296 On RS/6000 the first eight words of non-FP are normally in registers
4297 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4298 Under V.4, the first 8 FP args are in registers.
4300 If this is floating-point and no prototype is specified, we use
4301 both an FP and integer register (or possibly FP reg and stack). Library
4302 functions (when CALL_LIBCALL is set) always have the proper types for args,
4303 so we can pass the FP value just in one register. emit_library_function
4304 doesn't support PARALLEL anyway. */
4307 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4308 tree type, int named)
4310 enum rs6000_abi abi = DEFAULT_ABI;
4312 /* Return a marker to indicate whether CR1 needs to set or clear the
4313 bit that V.4 uses to say fp args were passed in registers.
4314 Assume that we don't need the marker for software floating point,
4315 or compiler generated library calls. */
4316 if (mode == VOIDmode)
4319 && cum->nargs_prototype < 0
4320 && (cum->call_cookie & CALL_LIBCALL) == 0
4321 && (cum->prototype || TARGET_NO_PROTOTYPE))
4323 /* For the SPE, we need to crxor CR6 always. */
4325 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4326 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4327 return GEN_INT (cum->call_cookie
4328 | ((cum->fregno == FP_ARG_MIN_REG)
4329 ? CALL_V4_SET_FP_ARGS
4330 : CALL_V4_CLEAR_FP_ARGS));
4333 return GEN_INT (cum->call_cookie);
4336 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4337 if (TARGET_64BIT && ! cum->prototype)
4339 /* Vector parameters get passed in vector register
4340 and also in GPRs or memory, in absence of prototype. */
4343 align_words = (cum->words + 1) & ~1;
4345 if (align_words >= GP_ARG_NUM_REG)
4351 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4353 return gen_rtx_PARALLEL (mode,
4355 gen_rtx_EXPR_LIST (VOIDmode,
4357 gen_rtx_EXPR_LIST (VOIDmode,
4358 gen_rtx_REG (mode, cum->vregno),
4362 return gen_rtx_REG (mode, cum->vregno);
4363 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4365 if (named || abi == ABI_V4)
4369 /* Vector parameters to varargs functions under AIX or Darwin
4370 get passed in memory and possibly also in GPRs. */
4371 int align, align_words;
4372 enum machine_mode part_mode = mode;
4374 /* Vector parameters must be 16-byte aligned. This places them at
4375 2 mod 4 in terms of words in 32-bit mode, since the parameter
4376 save area starts at offset 24 from the stack. In 64-bit mode,
4377 they just have to start on an even word, since the parameter
4378 save area is 16-byte aligned. */
4380 align = ((6 - (cum->words & 3)) & 3);
4382 align = cum->words & 1;
4383 align_words = cum->words + align;
4385 /* Out of registers? Memory, then. */
4386 if (align_words >= GP_ARG_NUM_REG)
4389 /* The vector value goes in GPRs. Only the part of the
4390 value in GPRs is reported here. */
4391 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4393 /* Fortunately, there are only two possibilities, the value
4394 is either wholly in GPRs or half in GPRs and half not. */
4397 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4400 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4401 return rs6000_spe_function_arg (cum, mode, type);
4402 else if (abi == ABI_V4)
4404 if (TARGET_HARD_FLOAT && TARGET_FPRS
4405 && (mode == SFmode || mode == DFmode))
4407 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4408 return gen_rtx_REG (mode, cum->fregno);
4415 int gregno = cum->sysv_gregno;
4417 /* Aggregates and IEEE quad get passed by reference. */
4418 if ((type && AGGREGATE_TYPE_P (type))
4422 n_words = rs6000_arg_size (mode, type);
4424 /* Long long and SPE vectors are put in odd registers. */
4425 if (n_words == 2 && (gregno & 1) == 0)
4428 /* Long long does not split between registers and stack. */
4429 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4430 return gen_rtx_REG (mode, gregno);
4437 int align = (TARGET_32BIT && (cum->words & 1) != 0
4438 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4439 int align_words = cum->words + align;
4441 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4444 if (TARGET_32BIT && TARGET_POWERPC64
4445 && (mode == DImode || mode == BLKmode))
4446 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4448 if (USE_FP_FOR_ARG_P (cum, mode, type))
4453 enum machine_mode fmode = mode;
4455 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4457 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4459 /* Long double split over regs and memory. */
4460 if (fmode == TFmode)
4463 /* Currently, we only ever need one reg here because complex
4464 doubles are split. */
4465 if (cum->fregno != FP_ARG_MAX_REG - 1)
4468 fpr[1] = gen_rtx_REG (fmode, cum->fregno);
4470 /* Do we also need to pass this arg in the parameter save
4473 && (cum->nargs_prototype <= 0
4474 || (DEFAULT_ABI == ABI_AIX
4476 && align_words >= GP_ARG_NUM_REG)));
4478 if (!needs_psave && mode == fmode)
4481 if (TARGET_32BIT && TARGET_POWERPC64
4482 && mode == DFmode && cum->stdarg)
4483 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4485 /* Describe where this piece goes. */
4487 *r = gen_rtx_EXPR_LIST (VOIDmode, *r, const0_rtx);
4492 /* Now describe the part that goes in gprs or the stack.
4493 This piece must come first, before the fprs. */
4495 if (align_words < GP_ARG_NUM_REG)
4497 unsigned long n_words = rs6000_arg_size (mode, type);
4498 enum machine_mode rmode = mode;
4500 if (align_words + n_words > GP_ARG_NUM_REG)
4501 /* If this is partially on the stack, then we only
4502 include the portion actually in registers here.
4503 We know this can only be one register because
4504 complex doubles are splt. */
4506 reg = gen_rtx_REG (rmode, GP_ARG_MIN_REG + align_words);
4508 *--r = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
4512 return gen_rtx_PARALLEL (mode, gen_rtvec_v (n, r));
4514 else if (align_words < GP_ARG_NUM_REG)
4515 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4521 /* For an arg passed partly in registers and partly in memory,
4522 this is the number of registers used.
4523 For args passed entirely in registers or entirely in memory, zero. */
4526 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4527 tree type, int named)
4531 if (DEFAULT_ABI == ABI_V4)
4534 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4535 && cum->nargs_prototype >= 0)
4538 if (USE_FP_FOR_ARG_P (cum, mode, type))
4540 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4541 ret = FP_ARG_MAX_REG - cum->fregno;
4542 else if (cum->nargs_prototype >= 0)
4546 if (cum->words < GP_ARG_NUM_REG
4547 && GP_ARG_NUM_REG < cum->words + rs6000_arg_size (mode, type))
4548 ret = GP_ARG_NUM_REG - cum->words;
4550 if (ret != 0 && TARGET_DEBUG_ARG)
4551 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4556 /* A C expression that indicates when an argument must be passed by
4557 reference. If nonzero for an argument, a copy of that argument is
4558 made in memory and a pointer to the argument is passed instead of
4559 the argument itself. The pointer is passed in whatever way is
4560 appropriate for passing a pointer to that type.
4562 Under V.4, structures and unions are passed by reference.
4564 As an extension to all ABIs, variable sized types are passed by
4568 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4569 enum machine_mode mode ATTRIBUTE_UNUSED,
4570 tree type, int named ATTRIBUTE_UNUSED)
4572 if (DEFAULT_ABI == ABI_V4
4573 && ((type && AGGREGATE_TYPE_P (type))
4576 if (TARGET_DEBUG_ARG)
4577 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4581 return type && int_size_in_bytes (type) < 0;
4585 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4588 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4593 for (i = 0; i < nregs; i++)
4595 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4596 if (reload_completed)
4598 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4601 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4602 i * GET_MODE_SIZE(reg_mode));
4605 tem = replace_equiv_address (tem, XEXP (tem, 0));
4607 if (tem == NULL_RTX)
4610 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4615 /* Perform any needed actions needed for a function that is receiving a
4616 variable number of arguments.
4620 MODE and TYPE are the mode and type of the current parameter.
4622 PRETEND_SIZE is a variable that should be set to the amount of stack
4623 that must be pushed by the prolog to pretend that our caller pushed
4626 Normally, this macro will push all remaining incoming registers on the
4627 stack and set PRETEND_SIZE to the length of the registers pushed. */
4630 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4631 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4633 CUMULATIVE_ARGS next_cum;
4634 int reg_size = TARGET_32BIT ? 4 : 8;
4635 rtx save_area = NULL_RTX, mem;
4636 int first_reg_offset, set;
4638 /* Skip the last named argument. */
4640 function_arg_advance (&next_cum, mode, type, 1);
4642 if (DEFAULT_ABI == ABI_V4)
4644 /* Indicate to allocate space on the stack for varargs save area. */
4645 cfun->machine->sysv_varargs_p = 1;
4647 save_area = plus_constant (virtual_stack_vars_rtx,
4648 - RS6000_VARARGS_SIZE);
4650 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4654 first_reg_offset = next_cum.words;
4655 save_area = virtual_incoming_args_rtx;
4656 cfun->machine->sysv_varargs_p = 0;
4658 if (MUST_PASS_IN_STACK (mode, type))
4659 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4662 set = get_varargs_alias_set ();
4663 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4665 mem = gen_rtx_MEM (BLKmode,
4666 plus_constant (save_area,
4667 first_reg_offset * reg_size)),
4668 set_mem_alias_set (mem, set);
4669 set_mem_align (mem, BITS_PER_WORD);
4671 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4672 GP_ARG_NUM_REG - first_reg_offset);
4675 /* Save FP registers if needed. */
4676 if (DEFAULT_ABI == ABI_V4
4677 && TARGET_HARD_FLOAT && TARGET_FPRS
4679 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4681 int fregno = next_cum.fregno;
4682 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4683 rtx lab = gen_label_rtx ();
4684 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4686 emit_jump_insn (gen_rtx_SET (VOIDmode,
4688 gen_rtx_IF_THEN_ELSE (VOIDmode,
4689 gen_rtx_NE (VOIDmode, cr1,
4691 gen_rtx_LABEL_REF (VOIDmode, lab),
4694 while (fregno <= FP_ARG_V4_MAX_REG)
4696 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4697 set_mem_alias_set (mem, set);
4698 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4707 /* Create the va_list data type. */
4710 rs6000_build_builtin_va_list (void)
4712 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4714 /* For AIX, prefer 'char *' because that's what the system
4715 header files like. */
4716 if (DEFAULT_ABI != ABI_V4)
4717 return build_pointer_type (char_type_node);
4719 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4720 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4722 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4723 unsigned_char_type_node);
4724 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4725 unsigned_char_type_node);
4726 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4728 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4729 short_unsigned_type_node);
4730 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4732 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4735 DECL_FIELD_CONTEXT (f_gpr) = record;
4736 DECL_FIELD_CONTEXT (f_fpr) = record;
4737 DECL_FIELD_CONTEXT (f_res) = record;
4738 DECL_FIELD_CONTEXT (f_ovf) = record;
4739 DECL_FIELD_CONTEXT (f_sav) = record;
4741 TREE_CHAIN (record) = type_decl;
4742 TYPE_NAME (record) = type_decl;
4743 TYPE_FIELDS (record) = f_gpr;
4744 TREE_CHAIN (f_gpr) = f_fpr;
4745 TREE_CHAIN (f_fpr) = f_res;
4746 TREE_CHAIN (f_res) = f_ovf;
4747 TREE_CHAIN (f_ovf) = f_sav;
4749 layout_type (record);
4751 /* The correct type is an array type of one element. */
4752 return build_array_type (record, build_index_type (size_zero_node));
4755 /* Implement va_start. */
4758 rs6000_va_start (tree valist, rtx nextarg)
4760 HOST_WIDE_INT words, n_gpr, n_fpr;
4761 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4762 tree gpr, fpr, ovf, sav, t;
4764 /* Only SVR4 needs something special. */
4765 if (DEFAULT_ABI != ABI_V4)
4767 std_expand_builtin_va_start (valist, nextarg);
4771 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4772 f_fpr = TREE_CHAIN (f_gpr);
4773 f_res = TREE_CHAIN (f_fpr);
4774 f_ovf = TREE_CHAIN (f_res);
4775 f_sav = TREE_CHAIN (f_ovf);
4777 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4778 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4779 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4780 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4781 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4783 /* Count number of gp and fp argument registers used. */
4784 words = current_function_args_info.words;
4785 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4786 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4788 if (TARGET_DEBUG_ARG)
4789 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4790 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4791 words, n_gpr, n_fpr);
4793 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4794 TREE_SIDE_EFFECTS (t) = 1;
4795 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4797 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4798 TREE_SIDE_EFFECTS (t) = 1;
4799 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4801 /* Find the overflow area. */
4802 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4804 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4805 build_int_2 (words * UNITS_PER_WORD, 0));
4806 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4807 TREE_SIDE_EFFECTS (t) = 1;
4808 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4810 /* Find the register save area. */
4811 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4812 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4813 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4814 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4815 TREE_SIDE_EFFECTS (t) = 1;
4816 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4819 /* Implement va_arg. */
4822 rs6000_va_arg (tree valist, tree type)
4824 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4825 tree gpr, fpr, ovf, sav, reg, t, u;
4826 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4827 rtx lab_false, lab_over, addr_rtx, r;
4829 if (DEFAULT_ABI != ABI_V4)
4831 /* Variable sized types are passed by reference. */
4832 if (int_size_in_bytes (type) < 0)
4834 u = build_pointer_type (type);
4836 /* Args grow upward. */
4837 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4838 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4839 TREE_SIDE_EFFECTS (t) = 1;
4841 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4842 TREE_SIDE_EFFECTS (t) = 1;
4844 t = build1 (INDIRECT_REF, u, t);
4845 TREE_SIDE_EFFECTS (t) = 1;
4847 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4849 if (targetm.calls.split_complex_arg
4850 && TREE_CODE (type) == COMPLEX_TYPE)
4852 tree elem_type = TREE_TYPE (type);
4853 enum machine_mode elem_mode = TYPE_MODE (elem_type);
4854 int elem_size = GET_MODE_SIZE (elem_mode);
4856 if (elem_size < UNITS_PER_WORD)
4858 rtx real_part, imag_part, dest_real, rr;
4860 real_part = rs6000_va_arg (valist, elem_type);
4861 imag_part = rs6000_va_arg (valist, elem_type);
4863 /* We're not returning the value here, but the address.
4864 real_part and imag_part are not contiguous, and we know
4865 there is space available to pack real_part next to
4866 imag_part. float _Complex is not promoted to
4867 double _Complex by the default promotion rules that
4868 promote float to double. */
4869 if (2 * elem_size > UNITS_PER_WORD)
4872 real_part = gen_rtx_MEM (elem_mode, real_part);
4873 imag_part = gen_rtx_MEM (elem_mode, imag_part);
4875 dest_real = adjust_address (imag_part, elem_mode, -elem_size);
4876 rr = gen_reg_rtx (elem_mode);
4877 emit_move_insn (rr, real_part);
4878 emit_move_insn (dest_real, rr);
4880 return XEXP (dest_real, 0);
4884 return std_expand_builtin_va_arg (valist, type);
4887 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4888 f_fpr = TREE_CHAIN (f_gpr);
4889 f_res = TREE_CHAIN (f_fpr);
4890 f_ovf = TREE_CHAIN (f_res);
4891 f_sav = TREE_CHAIN (f_ovf);
4893 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4894 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4895 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4896 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4897 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4899 size = int_size_in_bytes (type);
4900 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4902 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4904 /* Aggregates and long doubles are passed by reference. */
4910 size = UNITS_PER_WORD;
4913 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4915 /* FP args go in FP registers, if present. */
4924 /* Otherwise into GP registers. */
4932 /* Pull the value out of the saved registers.... */
4934 lab_false = gen_label_rtx ();
4935 lab_over = gen_label_rtx ();
4936 addr_rtx = gen_reg_rtx (Pmode);
4938 /* AltiVec vectors never go in registers. */
4939 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4941 TREE_THIS_VOLATILE (reg) = 1;
4942 emit_cmp_and_jump_insns
4943 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4944 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4947 /* Long long is aligned in the registers. */
4950 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4951 build_int_2 (n_reg - 1, 0));
4952 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4953 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4954 TREE_SIDE_EFFECTS (u) = 1;
4955 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4959 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4963 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4964 build_int_2 (n_reg, 0));
4965 TREE_SIDE_EFFECTS (u) = 1;
4967 u = build1 (CONVERT_EXPR, integer_type_node, u);
4968 TREE_SIDE_EFFECTS (u) = 1;
4970 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4971 TREE_SIDE_EFFECTS (u) = 1;
4973 t = build (PLUS_EXPR, ptr_type_node, t, u);
4974 TREE_SIDE_EFFECTS (t) = 1;
4976 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4978 emit_move_insn (addr_rtx, r);
4980 emit_jump_insn (gen_jump (lab_over));
4984 emit_label (lab_false);
4986 /* ... otherwise out of the overflow area. */
4988 /* Make sure we don't find reg 7 for the next int arg.
4990 All AltiVec vectors go in the overflow area. So in the AltiVec
4991 case we need to get the vectors from the overflow area, but
4992 remember where the GPRs and FPRs are. */
4993 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4994 || !TARGET_ALTIVEC))
4996 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4997 TREE_SIDE_EFFECTS (t) = 1;
4998 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5001 /* Care for on-stack alignment if needed. */
5008 /* AltiVec vectors are 16 byte aligned. */
5009 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
5014 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
5015 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
5019 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5021 emit_move_insn (addr_rtx, r);
5023 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5024 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5025 TREE_SIDE_EFFECTS (t) = 1;
5026 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5028 emit_label (lab_over);
5032 r = gen_rtx_MEM (Pmode, addr_rtx);
5033 set_mem_alias_set (r, get_varargs_alias_set ());
5034 emit_move_insn (addr_rtx, r);
5042 #define def_builtin(MASK, NAME, TYPE, CODE) \
5044 if ((MASK) & target_flags) \
5045 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5049 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5051 static const struct builtin_description bdesc_3arg[] =
5053 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5054 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5055 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5056 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5057 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5058 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5059 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5060 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5061 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5062 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5063 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5064 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5065 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5066 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5067 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5068 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5069 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5070 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5071 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5072 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5073 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5074 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5075 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5078 /* DST operations: void foo (void *, const int, const char). */
5080 static const struct builtin_description bdesc_dst[] =
5082 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5083 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5084 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5085 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5088 /* Simple binary operations: VECc = foo (VECa, VECb). */
5090 static struct builtin_description bdesc_2arg[] =
5092 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5093 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5094 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5095 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5096 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5097 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5098 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5099 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5100 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5101 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5102 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5103 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5104 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5105 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5106 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5107 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5108 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5109 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5110 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5111 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5112 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5113 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5114 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5115 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5116 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5117 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5118 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5119 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5120 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5121 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5122 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5123 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5124 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5125 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5126 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5127 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5128 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5129 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5130 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5131 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5132 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5133 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5134 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5135 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5136 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5137 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5138 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5139 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5140 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5141 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5142 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5143 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5144 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5145 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5146 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5147 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5148 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5149 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5150 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5151 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5152 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5153 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5154 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5155 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5156 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5157 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5158 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5159 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5160 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5161 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5162 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5163 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5164 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5165 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5166 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5167 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5168 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5169 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5170 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5171 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5172 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5173 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5174 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5175 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5176 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5177 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5178 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5179 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5180 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5181 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5182 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5183 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5184 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5185 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5186 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5187 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5188 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5189 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5190 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5191 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5192 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5193 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5194 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5195 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5196 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5197 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5198 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5199 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5200 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5201 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5202 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5203 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5204 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5206 /* Place holder, leave as first spe builtin. */
5207 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5208 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5209 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5210 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5211 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5212 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5213 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5214 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5215 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5216 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5217 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5218 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5219 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5220 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5221 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5222 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5223 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5224 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5225 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5226 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5227 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5228 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5229 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5230 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5231 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5232 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5233 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5234 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5235 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5236 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5237 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5238 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5239 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5240 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5241 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5242 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5243 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5244 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5245 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5246 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5247 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5248 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5249 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5250 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5251 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5252 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5253 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5254 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5255 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5256 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5257 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5258 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5259 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5260 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5261 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5262 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5263 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5264 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5265 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5266 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5267 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5268 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5269 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5270 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5271 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5272 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5273 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5274 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5275 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5276 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5277 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5278 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5279 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5280 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5281 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5282 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5283 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5284 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5285 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5286 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5287 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5288 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5289 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5290 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5291 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5292 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5293 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5294 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5295 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5296 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5297 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5298 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5299 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5300 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5301 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5302 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5303 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5304 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5305 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5306 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5307 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5308 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5309 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5310 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5311 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5312 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5313 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5314 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5315 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5317 /* SPE binary operations expecting a 5-bit unsigned literal. */
5318 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5320 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5321 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5322 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5323 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5324 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5325 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5326 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5327 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5328 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5329 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5330 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5331 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5332 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5333 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5334 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5335 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5336 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5337 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5338 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5339 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5340 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5341 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5342 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5343 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5344 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5345 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5347 /* Place-holder. Leave as last binary SPE builtin. */
5348 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5351 /* AltiVec predicates. */
5353 struct builtin_description_predicates
5355 const unsigned int mask;
5356 const enum insn_code icode;
5358 const char *const name;
5359 const enum rs6000_builtins code;
5362 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5364 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5365 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5366 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5367 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5368 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5369 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5370 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5371 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5372 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5373 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5374 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5375 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5376 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5379 /* SPE predicates. */
5380 static struct builtin_description bdesc_spe_predicates[] =
5382 /* Place-holder. Leave as first. */
5383 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5384 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5385 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5386 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5387 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5388 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5389 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5390 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5391 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5392 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5393 /* Place-holder. Leave as last. */
5394 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5397 /* SPE evsel predicates. */
5398 static struct builtin_description bdesc_spe_evsel[] =
5400 /* Place-holder. Leave as first. */
5401 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5402 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5403 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5404 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5405 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5406 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5407 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5408 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5409 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5410 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5411 /* Place-holder. Leave as last. */
5412 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5415 /* ABS* operations. */
5417 static const struct builtin_description bdesc_abs[] =
5419 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5420 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5421 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5422 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5423 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5424 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5425 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5428 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5431 static struct builtin_description bdesc_1arg[] =
5433 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5434 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5435 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5436 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5437 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5438 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5439 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5440 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5441 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5442 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5443 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5444 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5445 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5446 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5447 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5448 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5449 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5451 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5452 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5453 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5454 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5455 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5456 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5457 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5458 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5459 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5460 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5461 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5462 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5463 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5464 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5465 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5466 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5467 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5468 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5469 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5470 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5471 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5472 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5473 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5474 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5475 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5476 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5477 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5478 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5479 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5480 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5482 /* Place-holder. Leave as last unary SPE builtin. */
5483 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5487 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5490 tree arg0 = TREE_VALUE (arglist);
5491 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5492 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5493 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5495 if (icode == CODE_FOR_nothing)
5496 /* Builtin not supported on this processor. */
5499 /* If we got invalid arguments bail out before generating bad rtl. */
5500 if (arg0 == error_mark_node)
5503 if (icode == CODE_FOR_altivec_vspltisb
5504 || icode == CODE_FOR_altivec_vspltish
5505 || icode == CODE_FOR_altivec_vspltisw
5506 || icode == CODE_FOR_spe_evsplatfi
5507 || icode == CODE_FOR_spe_evsplati)
5509 /* Only allow 5-bit *signed* literals. */
5510 if (GET_CODE (op0) != CONST_INT
5511 || INTVAL (op0) > 0x1f
5512 || INTVAL (op0) < -0x1f)
5514 error ("argument 1 must be a 5-bit signed literal");
5520 || GET_MODE (target) != tmode
5521 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5522 target = gen_reg_rtx (tmode);
5524 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5525 op0 = copy_to_mode_reg (mode0, op0);
5527 pat = GEN_FCN (icode) (target, op0);
5536 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5538 rtx pat, scratch1, scratch2;
5539 tree arg0 = TREE_VALUE (arglist);
5540 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5541 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5542 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5544 /* If we have invalid arguments, bail out before generating bad rtl. */
5545 if (arg0 == error_mark_node)
5549 || GET_MODE (target) != tmode
5550 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5551 target = gen_reg_rtx (tmode);
5553 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5554 op0 = copy_to_mode_reg (mode0, op0);
5556 scratch1 = gen_reg_rtx (mode0);
5557 scratch2 = gen_reg_rtx (mode0);
5559 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5568 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5571 tree arg0 = TREE_VALUE (arglist);
5572 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5573 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5574 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5575 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5576 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5577 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5579 if (icode == CODE_FOR_nothing)
5580 /* Builtin not supported on this processor. */
5583 /* If we got invalid arguments bail out before generating bad rtl. */
5584 if (arg0 == error_mark_node || arg1 == error_mark_node)
5587 if (icode == CODE_FOR_altivec_vcfux
5588 || icode == CODE_FOR_altivec_vcfsx
5589 || icode == CODE_FOR_altivec_vctsxs
5590 || icode == CODE_FOR_altivec_vctuxs
5591 || icode == CODE_FOR_altivec_vspltb
5592 || icode == CODE_FOR_altivec_vsplth
5593 || icode == CODE_FOR_altivec_vspltw
5594 || icode == CODE_FOR_spe_evaddiw
5595 || icode == CODE_FOR_spe_evldd
5596 || icode == CODE_FOR_spe_evldh
5597 || icode == CODE_FOR_spe_evldw
5598 || icode == CODE_FOR_spe_evlhhesplat
5599 || icode == CODE_FOR_spe_evlhhossplat
5600 || icode == CODE_FOR_spe_evlhhousplat
5601 || icode == CODE_FOR_spe_evlwhe
5602 || icode == CODE_FOR_spe_evlwhos
5603 || icode == CODE_FOR_spe_evlwhou
5604 || icode == CODE_FOR_spe_evlwhsplat
5605 || icode == CODE_FOR_spe_evlwwsplat
5606 || icode == CODE_FOR_spe_evrlwi
5607 || icode == CODE_FOR_spe_evslwi
5608 || icode == CODE_FOR_spe_evsrwis
5609 || icode == CODE_FOR_spe_evsubifw
5610 || icode == CODE_FOR_spe_evsrwiu)
5612 /* Only allow 5-bit unsigned literals. */
5614 if (TREE_CODE (arg1) != INTEGER_CST
5615 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5617 error ("argument 2 must be a 5-bit unsigned literal");
5623 || GET_MODE (target) != tmode
5624 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5625 target = gen_reg_rtx (tmode);
5627 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5628 op0 = copy_to_mode_reg (mode0, op0);
5629 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5630 op1 = copy_to_mode_reg (mode1, op1);
5632 pat = GEN_FCN (icode) (target, op0, op1);
5641 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5642 tree arglist, rtx target)
5645 tree cr6_form = TREE_VALUE (arglist);
5646 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5647 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5648 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5649 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5650 enum machine_mode tmode = SImode;
5651 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5652 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5655 if (TREE_CODE (cr6_form) != INTEGER_CST)
5657 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5661 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5666 /* If we have invalid arguments, bail out before generating bad rtl. */
5667 if (arg0 == error_mark_node || arg1 == error_mark_node)
5671 || GET_MODE (target) != tmode
5672 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5673 target = gen_reg_rtx (tmode);
5675 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5676 op0 = copy_to_mode_reg (mode0, op0);
5677 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5678 op1 = copy_to_mode_reg (mode1, op1);
5680 scratch = gen_reg_rtx (mode0);
5682 pat = GEN_FCN (icode) (scratch, op0, op1,
5683 gen_rtx_SYMBOL_REF (Pmode, opcode));
5688 /* The vec_any* and vec_all* predicates use the same opcodes for two
5689 different operations, but the bits in CR6 will be different
5690 depending on what information we want. So we have to play tricks
5691 with CR6 to get the right bits out.
5693 If you think this is disgusting, look at the specs for the
5694 AltiVec predicates. */
5696 switch (cr6_form_int)
5699 emit_insn (gen_cr6_test_for_zero (target));
5702 emit_insn (gen_cr6_test_for_zero_reverse (target));
5705 emit_insn (gen_cr6_test_for_lt (target));
5708 emit_insn (gen_cr6_test_for_lt_reverse (target));
5711 error ("argument 1 of __builtin_altivec_predicate is out of range");
5719 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5722 tree arg0 = TREE_VALUE (arglist);
5723 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5724 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5725 enum machine_mode mode0 = Pmode;
5726 enum machine_mode mode1 = Pmode;
5727 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5728 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5730 if (icode == CODE_FOR_nothing)
5731 /* Builtin not supported on this processor. */
5734 /* If we got invalid arguments bail out before generating bad rtl. */
5735 if (arg0 == error_mark_node || arg1 == error_mark_node)
5739 || GET_MODE (target) != tmode
5740 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5741 target = gen_reg_rtx (tmode);
5743 op1 = copy_to_mode_reg (mode1, op1);
5745 if (op0 == const0_rtx)
5747 addr = gen_rtx_MEM (tmode, op1);
5751 op0 = copy_to_mode_reg (mode0, op0);
5752 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5755 pat = GEN_FCN (icode) (target, addr);
5765 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5767 tree arg0 = TREE_VALUE (arglist);
5768 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5769 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5770 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5771 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5772 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5774 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5775 enum machine_mode mode1 = Pmode;
5776 enum machine_mode mode2 = Pmode;
5778 /* Invalid arguments. Bail before doing anything stoopid! */
5779 if (arg0 == error_mark_node
5780 || arg1 == error_mark_node
5781 || arg2 == error_mark_node)
5784 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5785 op0 = copy_to_mode_reg (tmode, op0);
5787 op2 = copy_to_mode_reg (mode2, op2);
5789 if (op1 == const0_rtx)
5791 addr = gen_rtx_MEM (tmode, op2);
5795 op1 = copy_to_mode_reg (mode1, op1);
5796 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5799 pat = GEN_FCN (icode) (addr, op0);
5806 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5809 tree arg0 = TREE_VALUE (arglist);
5810 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5811 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5812 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5813 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5814 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5815 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5816 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5817 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5818 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5820 if (icode == CODE_FOR_nothing)
5821 /* Builtin not supported on this processor. */
5824 /* If we got invalid arguments bail out before generating bad rtl. */
5825 if (arg0 == error_mark_node
5826 || arg1 == error_mark_node
5827 || arg2 == error_mark_node)
5830 if (icode == CODE_FOR_altivec_vsldoi_4sf
5831 || icode == CODE_FOR_altivec_vsldoi_4si
5832 || icode == CODE_FOR_altivec_vsldoi_8hi
5833 || icode == CODE_FOR_altivec_vsldoi_16qi)
5835 /* Only allow 4-bit unsigned literals. */
5837 if (TREE_CODE (arg2) != INTEGER_CST
5838 || TREE_INT_CST_LOW (arg2) & ~0xf)
5840 error ("argument 3 must be a 4-bit unsigned literal");
5846 || GET_MODE (target) != tmode
5847 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5848 target = gen_reg_rtx (tmode);
5850 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5851 op0 = copy_to_mode_reg (mode0, op0);
5852 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5853 op1 = copy_to_mode_reg (mode1, op1);
5854 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5855 op2 = copy_to_mode_reg (mode2, op2);
5857 pat = GEN_FCN (icode) (target, op0, op1, op2);
5865 /* Expand the lvx builtins. */
5867 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5869 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5870 tree arglist = TREE_OPERAND (exp, 1);
5871 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5873 enum machine_mode tmode, mode0;
5875 enum insn_code icode;
5879 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5880 icode = CODE_FOR_altivec_lvx_16qi;
5882 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5883 icode = CODE_FOR_altivec_lvx_8hi;
5885 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5886 icode = CODE_FOR_altivec_lvx_4si;
5888 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5889 icode = CODE_FOR_altivec_lvx_4sf;
5898 arg0 = TREE_VALUE (arglist);
5899 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5900 tmode = insn_data[icode].operand[0].mode;
5901 mode0 = insn_data[icode].operand[1].mode;
5904 || GET_MODE (target) != tmode
5905 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5906 target = gen_reg_rtx (tmode);
5908 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5909 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5911 pat = GEN_FCN (icode) (target, op0);
5918 /* Expand the stvx builtins. */
5920 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5923 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5924 tree arglist = TREE_OPERAND (exp, 1);
5925 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5927 enum machine_mode mode0, mode1;
5929 enum insn_code icode;
5933 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5934 icode = CODE_FOR_altivec_stvx_16qi;
5936 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5937 icode = CODE_FOR_altivec_stvx_8hi;
5939 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5940 icode = CODE_FOR_altivec_stvx_4si;
5942 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5943 icode = CODE_FOR_altivec_stvx_4sf;
5950 arg0 = TREE_VALUE (arglist);
5951 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5952 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5953 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5954 mode0 = insn_data[icode].operand[0].mode;
5955 mode1 = insn_data[icode].operand[1].mode;
5957 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5958 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5959 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5960 op1 = copy_to_mode_reg (mode1, op1);
5962 pat = GEN_FCN (icode) (op0, op1);
5970 /* Expand the dst builtins. */
5972 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5975 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5976 tree arglist = TREE_OPERAND (exp, 1);
5977 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5978 tree arg0, arg1, arg2;
5979 enum machine_mode mode0, mode1, mode2;
5980 rtx pat, op0, op1, op2;
5981 struct builtin_description *d;
5986 /* Handle DST variants. */
5987 d = (struct builtin_description *) bdesc_dst;
5988 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5989 if (d->code == fcode)
5991 arg0 = TREE_VALUE (arglist);
5992 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5993 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5994 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5995 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5996 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5997 mode0 = insn_data[d->icode].operand[0].mode;
5998 mode1 = insn_data[d->icode].operand[1].mode;
5999 mode2 = insn_data[d->icode].operand[2].mode;
6001 /* Invalid arguments, bail out before generating bad rtl. */
6002 if (arg0 == error_mark_node
6003 || arg1 == error_mark_node
6004 || arg2 == error_mark_node)
6008 if (TREE_CODE (arg2) != INTEGER_CST
6009 || TREE_INT_CST_LOW (arg2) & ~0x3)
6011 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6015 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6016 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6017 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6018 op1 = copy_to_mode_reg (mode1, op1);
6020 pat = GEN_FCN (d->icode) (op0, op1, op2);
6031 /* Expand the builtin in EXP and store the result in TARGET. Store
6032 true in *EXPANDEDP if we found a builtin to expand. */
6034 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6036 struct builtin_description *d;
6037 struct builtin_description_predicates *dp;
6039 enum insn_code icode;
6040 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6041 tree arglist = TREE_OPERAND (exp, 1);
6044 enum machine_mode tmode, mode0;
6045 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6047 target = altivec_expand_ld_builtin (exp, target, expandedp);
6051 target = altivec_expand_st_builtin (exp, target, expandedp);
6055 target = altivec_expand_dst_builtin (exp, target, expandedp);
6063 case ALTIVEC_BUILTIN_STVX:
6064 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6065 case ALTIVEC_BUILTIN_STVEBX:
6066 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6067 case ALTIVEC_BUILTIN_STVEHX:
6068 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6069 case ALTIVEC_BUILTIN_STVEWX:
6070 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6071 case ALTIVEC_BUILTIN_STVXL:
6072 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6074 case ALTIVEC_BUILTIN_MFVSCR:
6075 icode = CODE_FOR_altivec_mfvscr;
6076 tmode = insn_data[icode].operand[0].mode;
6079 || GET_MODE (target) != tmode
6080 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6081 target = gen_reg_rtx (tmode);
6083 pat = GEN_FCN (icode) (target);
6089 case ALTIVEC_BUILTIN_MTVSCR:
6090 icode = CODE_FOR_altivec_mtvscr;
6091 arg0 = TREE_VALUE (arglist);
6092 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6093 mode0 = insn_data[icode].operand[0].mode;
6095 /* If we got invalid arguments bail out before generating bad rtl. */
6096 if (arg0 == error_mark_node)
6099 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6100 op0 = copy_to_mode_reg (mode0, op0);
6102 pat = GEN_FCN (icode) (op0);
6107 case ALTIVEC_BUILTIN_DSSALL:
6108 emit_insn (gen_altivec_dssall ());
6111 case ALTIVEC_BUILTIN_DSS:
6112 icode = CODE_FOR_altivec_dss;
6113 arg0 = TREE_VALUE (arglist);
6115 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6116 mode0 = insn_data[icode].operand[0].mode;
6118 /* If we got invalid arguments bail out before generating bad rtl. */
6119 if (arg0 == error_mark_node)
6122 if (TREE_CODE (arg0) != INTEGER_CST
6123 || TREE_INT_CST_LOW (arg0) & ~0x3)
6125 error ("argument to dss must be a 2-bit unsigned literal");
6129 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6130 op0 = copy_to_mode_reg (mode0, op0);
6132 emit_insn (gen_altivec_dss (op0));
6135 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6136 arg0 = TREE_VALUE (arglist);
6137 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6138 arg0 = TREE_OPERAND (arg0, 0);
6139 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6140 TREE_STRING_POINTER (arg0));
6145 /* Expand abs* operations. */
6146 d = (struct builtin_description *) bdesc_abs;
6147 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6148 if (d->code == fcode)
6149 return altivec_expand_abs_builtin (d->icode, arglist, target);
6151 /* Expand the AltiVec predicates. */
6152 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6153 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6154 if (dp->code == fcode)
6155 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6157 /* LV* are funky. We initialized them differently. */
6160 case ALTIVEC_BUILTIN_LVSL:
6161 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6163 case ALTIVEC_BUILTIN_LVSR:
6164 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6166 case ALTIVEC_BUILTIN_LVEBX:
6167 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6169 case ALTIVEC_BUILTIN_LVEHX:
6170 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6172 case ALTIVEC_BUILTIN_LVEWX:
6173 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6175 case ALTIVEC_BUILTIN_LVXL:
6176 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6178 case ALTIVEC_BUILTIN_LVX:
6179 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6190 /* Binops that need to be initialized manually, but can be expanded
6191 automagically by rs6000_expand_binop_builtin. */
6192 static struct builtin_description bdesc_2arg_spe[] =
6194 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6195 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6196 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6197 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6198 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6199 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6200 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6201 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6202 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6203 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6204 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6205 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6206 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6207 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6208 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6209 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6210 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6211 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6212 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6213 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6214 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6215 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6218 /* Expand the builtin in EXP and store the result in TARGET. Store
6219 true in *EXPANDEDP if we found a builtin to expand.
6221 This expands the SPE builtins that are not simple unary and binary
6224 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6226 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6227 tree arglist = TREE_OPERAND (exp, 1);
6229 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6230 enum insn_code icode;
6231 enum machine_mode tmode, mode0;
6233 struct builtin_description *d;
6238 /* Syntax check for a 5-bit unsigned immediate. */
6241 case SPE_BUILTIN_EVSTDD:
6242 case SPE_BUILTIN_EVSTDH:
6243 case SPE_BUILTIN_EVSTDW:
6244 case SPE_BUILTIN_EVSTWHE:
6245 case SPE_BUILTIN_EVSTWHO:
6246 case SPE_BUILTIN_EVSTWWE:
6247 case SPE_BUILTIN_EVSTWWO:
6248 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6249 if (TREE_CODE (arg1) != INTEGER_CST
6250 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6252 error ("argument 2 must be a 5-bit unsigned literal");
6260 /* The evsplat*i instructions are not quite generic. */
6263 case SPE_BUILTIN_EVSPLATFI:
6264 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6266 case SPE_BUILTIN_EVSPLATI:
6267 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6273 d = (struct builtin_description *) bdesc_2arg_spe;
6274 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6275 if (d->code == fcode)
6276 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6278 d = (struct builtin_description *) bdesc_spe_predicates;
6279 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6280 if (d->code == fcode)
6281 return spe_expand_predicate_builtin (d->icode, arglist, target);
6283 d = (struct builtin_description *) bdesc_spe_evsel;
6284 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6285 if (d->code == fcode)
6286 return spe_expand_evsel_builtin (d->icode, arglist, target);
6290 case SPE_BUILTIN_EVSTDDX:
6291 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6292 case SPE_BUILTIN_EVSTDHX:
6293 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6294 case SPE_BUILTIN_EVSTDWX:
6295 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6296 case SPE_BUILTIN_EVSTWHEX:
6297 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6298 case SPE_BUILTIN_EVSTWHOX:
6299 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6300 case SPE_BUILTIN_EVSTWWEX:
6301 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6302 case SPE_BUILTIN_EVSTWWOX:
6303 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6304 case SPE_BUILTIN_EVSTDD:
6305 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6306 case SPE_BUILTIN_EVSTDH:
6307 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6308 case SPE_BUILTIN_EVSTDW:
6309 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6310 case SPE_BUILTIN_EVSTWHE:
6311 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6312 case SPE_BUILTIN_EVSTWHO:
6313 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6314 case SPE_BUILTIN_EVSTWWE:
6315 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6316 case SPE_BUILTIN_EVSTWWO:
6317 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6318 case SPE_BUILTIN_MFSPEFSCR:
6319 icode = CODE_FOR_spe_mfspefscr;
6320 tmode = insn_data[icode].operand[0].mode;
6323 || GET_MODE (target) != tmode
6324 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6325 target = gen_reg_rtx (tmode);
6327 pat = GEN_FCN (icode) (target);
6332 case SPE_BUILTIN_MTSPEFSCR:
6333 icode = CODE_FOR_spe_mtspefscr;
6334 arg0 = TREE_VALUE (arglist);
6335 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6336 mode0 = insn_data[icode].operand[0].mode;
6338 if (arg0 == error_mark_node)
6341 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6342 op0 = copy_to_mode_reg (mode0, op0);
6344 pat = GEN_FCN (icode) (op0);
6357 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6359 rtx pat, scratch, tmp;
6360 tree form = TREE_VALUE (arglist);
6361 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6362 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6363 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6364 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6365 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6366 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6370 if (TREE_CODE (form) != INTEGER_CST)
6372 error ("argument 1 of __builtin_spe_predicate must be a constant");
6376 form_int = TREE_INT_CST_LOW (form);
6381 if (arg0 == error_mark_node || arg1 == error_mark_node)
6385 || GET_MODE (target) != SImode
6386 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6387 target = gen_reg_rtx (SImode);
6389 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6390 op0 = copy_to_mode_reg (mode0, op0);
6391 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6392 op1 = copy_to_mode_reg (mode1, op1);
6394 scratch = gen_reg_rtx (CCmode);
6396 pat = GEN_FCN (icode) (scratch, op0, op1);
6401 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6402 _lower_. We use one compare, but look in different bits of the
6403 CR for each variant.
6405 There are 2 elements in each SPE simd type (upper/lower). The CR
6406 bits are set as follows:
6408 BIT0 | BIT 1 | BIT 2 | BIT 3
6409 U | L | (U | L) | (U & L)
6411 So, for an "all" relationship, BIT 3 would be set.
6412 For an "any" relationship, BIT 2 would be set. Etc.
6414 Following traditional nomenclature, these bits map to:
6416 BIT0 | BIT 1 | BIT 2 | BIT 3
6419 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6424 /* All variant. OV bit. */
6426 /* We need to get to the OV bit, which is the ORDERED bit. We
6427 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6428 that's ugly and will trigger a validate_condition_mode abort.
6429 So let's just use another pattern. */
6430 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6432 /* Any variant. EQ bit. */
6436 /* Upper variant. LT bit. */
6440 /* Lower variant. GT bit. */
6445 error ("argument 1 of __builtin_spe_predicate is out of range");
6449 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6450 emit_move_insn (target, tmp);
6455 /* The evsel builtins look like this:
6457 e = __builtin_spe_evsel_OP (a, b, c, d);
6461 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6462 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6466 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6469 tree arg0 = TREE_VALUE (arglist);
6470 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6471 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6472 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6473 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6474 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6475 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6476 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6477 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6478 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6483 if (arg0 == error_mark_node || arg1 == error_mark_node
6484 || arg2 == error_mark_node || arg3 == error_mark_node)
6488 || GET_MODE (target) != mode0
6489 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6490 target = gen_reg_rtx (mode0);
6492 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6493 op0 = copy_to_mode_reg (mode0, op0);
6494 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6495 op1 = copy_to_mode_reg (mode0, op1);
6496 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6497 op2 = copy_to_mode_reg (mode0, op2);
6498 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6499 op3 = copy_to_mode_reg (mode0, op3);
6501 /* Generate the compare. */
6502 scratch = gen_reg_rtx (CCmode);
6503 pat = GEN_FCN (icode) (scratch, op0, op1);
6508 if (mode0 == V2SImode)
6509 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6511 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6516 /* Expand an expression EXP that calls a built-in function,
6517 with result going to TARGET if that's convenient
6518 (and in mode MODE if that's convenient).
6519 SUBTARGET may be used as the target for computing one of EXP's operands.
6520 IGNORE is nonzero if the value is to be ignored. */
6523 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6524 enum machine_mode mode ATTRIBUTE_UNUSED,
6525 int ignore ATTRIBUTE_UNUSED)
6527 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6528 tree arglist = TREE_OPERAND (exp, 1);
6529 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6530 struct builtin_description *d;
6537 ret = altivec_expand_builtin (exp, target, &success);
6544 ret = spe_expand_builtin (exp, target, &success);
6550 if (TARGET_ALTIVEC || TARGET_SPE)
6552 /* Handle simple unary operations. */
6553 d = (struct builtin_description *) bdesc_1arg;
6554 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6555 if (d->code == fcode)
6556 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6558 /* Handle simple binary operations. */
6559 d = (struct builtin_description *) bdesc_2arg;
6560 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6561 if (d->code == fcode)
6562 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6564 /* Handle simple ternary operations. */
6565 d = (struct builtin_description *) bdesc_3arg;
6566 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6567 if (d->code == fcode)
6568 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6576 rs6000_init_builtins (void)
6578 V2SI_type_node = build_vector_type (intSI_type_node, 2);
6579 V2SF_type_node = build_vector_type (float_type_node, 2);
6580 V4HI_type_node = build_vector_type (intHI_type_node, 4);
6581 V4SI_type_node = build_vector_type (intSI_type_node, 4);
6582 V4SF_type_node = build_vector_type (float_type_node, 4);
6583 V8HI_type_node = build_vector_type (intHI_type_node, 8);
6584 V16QI_type_node = build_vector_type (intQI_type_node, 16);
6586 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
6587 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
6588 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
6590 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6591 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6592 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6594 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
6595 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
6596 'vector unsigned short'. */
6598 bool_char_type_node = copy_node (unsigned_intQI_type_node);
6599 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
6600 bool_short_type_node = copy_node (unsigned_intHI_type_node);
6601 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
6602 bool_int_type_node = copy_node (unsigned_intSI_type_node);
6603 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
6604 pixel_type_node = copy_node (unsigned_intHI_type_node);
6605 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
6607 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6608 get_identifier ("__bool char"),
6609 bool_char_type_node));
6610 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6611 get_identifier ("__bool short"),
6612 bool_short_type_node));
6613 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6614 get_identifier ("__bool int"),
6615 bool_int_type_node));
6616 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6617 get_identifier ("__pixel"),
6620 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
6621 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
6622 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
6623 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
6625 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6626 get_identifier ("__vector unsigned char"),
6627 unsigned_V16QI_type_node));
6628 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6629 get_identifier ("__vector signed char"),
6631 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6632 get_identifier ("__vector __bool char"),
6633 bool_V16QI_type_node));
6635 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6636 get_identifier ("__vector unsigned short"),
6637 unsigned_V8HI_type_node));
6638 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6639 get_identifier ("__vector signed short"),
6641 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6642 get_identifier ("__vector __bool short"),
6643 bool_V8HI_type_node));
6645 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6646 get_identifier ("__vector unsigned int"),
6647 unsigned_V4SI_type_node));
6648 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6649 get_identifier ("__vector signed int"),
6651 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6652 get_identifier ("__vector __bool int"),
6653 bool_V4SI_type_node));
6655 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6656 get_identifier ("__vector float"),
6658 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6659 get_identifier ("__vector __pixel"),
6660 pixel_V8HI_type_node));
6663 spe_init_builtins ();
6665 altivec_init_builtins ();
6666 if (TARGET_ALTIVEC || TARGET_SPE)
6667 rs6000_common_init_builtins ();
6670 /* Search through a set of builtins and enable the mask bits.
6671 DESC is an array of builtins.
6672 SIZE is the total number of builtins.
6673 START is the builtin enum at which to start.
6674 END is the builtin enum at which to end. */
6676 enable_mask_for_builtins (struct builtin_description *desc, int size,
6677 enum rs6000_builtins start,
6678 enum rs6000_builtins end)
6682 for (i = 0; i < size; ++i)
6683 if (desc[i].code == start)
6689 for (; i < size; ++i)
6691 /* Flip all the bits on. */
6692 desc[i].mask = target_flags;
6693 if (desc[i].code == end)
6699 spe_init_builtins (void)
6701 tree endlink = void_list_node;
6702 tree puint_type_node = build_pointer_type (unsigned_type_node);
6703 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6704 struct builtin_description *d;
6707 tree v2si_ftype_4_v2si
6708 = build_function_type
6709 (opaque_V2SI_type_node,
6710 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6711 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6712 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6713 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6716 tree v2sf_ftype_4_v2sf
6717 = build_function_type
6718 (opaque_V2SF_type_node,
6719 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6720 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6721 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6722 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6725 tree int_ftype_int_v2si_v2si
6726 = build_function_type
6728 tree_cons (NULL_TREE, integer_type_node,
6729 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6730 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6733 tree int_ftype_int_v2sf_v2sf
6734 = build_function_type
6736 tree_cons (NULL_TREE, integer_type_node,
6737 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6738 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6741 tree void_ftype_v2si_puint_int
6742 = build_function_type (void_type_node,
6743 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6744 tree_cons (NULL_TREE, puint_type_node,
6745 tree_cons (NULL_TREE,
6749 tree void_ftype_v2si_puint_char
6750 = build_function_type (void_type_node,
6751 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6752 tree_cons (NULL_TREE, puint_type_node,
6753 tree_cons (NULL_TREE,
6757 tree void_ftype_v2si_pv2si_int
6758 = build_function_type (void_type_node,
6759 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6760 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6761 tree_cons (NULL_TREE,
6765 tree void_ftype_v2si_pv2si_char
6766 = build_function_type (void_type_node,
6767 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6768 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6769 tree_cons (NULL_TREE,
6774 = build_function_type (void_type_node,
6775 tree_cons (NULL_TREE, integer_type_node, endlink));
6778 = build_function_type (integer_type_node, endlink);
6780 tree v2si_ftype_pv2si_int
6781 = build_function_type (opaque_V2SI_type_node,
6782 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6783 tree_cons (NULL_TREE, integer_type_node,
6786 tree v2si_ftype_puint_int
6787 = build_function_type (opaque_V2SI_type_node,
6788 tree_cons (NULL_TREE, puint_type_node,
6789 tree_cons (NULL_TREE, integer_type_node,
6792 tree v2si_ftype_pushort_int
6793 = build_function_type (opaque_V2SI_type_node,
6794 tree_cons (NULL_TREE, pushort_type_node,
6795 tree_cons (NULL_TREE, integer_type_node,
6798 tree v2si_ftype_signed_char
6799 = build_function_type (opaque_V2SI_type_node,
6800 tree_cons (NULL_TREE, signed_char_type_node,
6803 /* The initialization of the simple binary and unary builtins is
6804 done in rs6000_common_init_builtins, but we have to enable the
6805 mask bits here manually because we have run out of `target_flags'
6806 bits. We really need to redesign this mask business. */
6808 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6809 ARRAY_SIZE (bdesc_2arg),
6812 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6813 ARRAY_SIZE (bdesc_1arg),
6815 SPE_BUILTIN_EVSUBFUSIAAW);
6816 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6817 ARRAY_SIZE (bdesc_spe_predicates),
6818 SPE_BUILTIN_EVCMPEQ,
6819 SPE_BUILTIN_EVFSTSTLT);
6820 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6821 ARRAY_SIZE (bdesc_spe_evsel),
6822 SPE_BUILTIN_EVSEL_CMPGTS,
6823 SPE_BUILTIN_EVSEL_FSTSTEQ);
6825 (*lang_hooks.decls.pushdecl)
6826 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6827 opaque_V2SI_type_node));
6829 /* Initialize irregular SPE builtins. */
6831 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6832 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6833 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6834 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6835 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6836 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6837 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6838 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6839 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6840 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6841 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6842 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6843 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6844 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6845 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6846 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6847 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6848 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6851 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6852 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6853 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6854 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6855 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6856 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6857 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6858 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6859 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6860 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6861 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6862 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6863 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6864 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6865 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6866 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6867 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6868 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6869 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6870 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6871 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6872 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6875 d = (struct builtin_description *) bdesc_spe_predicates;
6876 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6880 switch (insn_data[d->icode].operand[1].mode)
6883 type = int_ftype_int_v2si_v2si;
6886 type = int_ftype_int_v2sf_v2sf;
6892 def_builtin (d->mask, d->name, type, d->code);
6895 /* Evsel predicates. */
6896 d = (struct builtin_description *) bdesc_spe_evsel;
6897 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6901 switch (insn_data[d->icode].operand[1].mode)
6904 type = v2si_ftype_4_v2si;
6907 type = v2sf_ftype_4_v2sf;
6913 def_builtin (d->mask, d->name, type, d->code);
6918 altivec_init_builtins (void)
6920 struct builtin_description *d;
6921 struct builtin_description_predicates *dp;
6923 tree pfloat_type_node = build_pointer_type (float_type_node);
6924 tree pint_type_node = build_pointer_type (integer_type_node);
6925 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6926 tree pchar_type_node = build_pointer_type (char_type_node);
6928 tree pvoid_type_node = build_pointer_type (void_type_node);
6930 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6931 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6932 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6933 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6935 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6937 tree int_ftype_int_v4si_v4si
6938 = build_function_type_list (integer_type_node,
6939 integer_type_node, V4SI_type_node,
6940 V4SI_type_node, NULL_TREE);
6941 tree v4sf_ftype_pcfloat
6942 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6943 tree void_ftype_pfloat_v4sf
6944 = build_function_type_list (void_type_node,
6945 pfloat_type_node, V4SF_type_node, NULL_TREE);
6946 tree v4si_ftype_pcint
6947 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6948 tree void_ftype_pint_v4si
6949 = build_function_type_list (void_type_node,
6950 pint_type_node, V4SI_type_node, NULL_TREE);
6951 tree v8hi_ftype_pcshort
6952 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6953 tree void_ftype_pshort_v8hi
6954 = build_function_type_list (void_type_node,
6955 pshort_type_node, V8HI_type_node, NULL_TREE);
6956 tree v16qi_ftype_pcchar
6957 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6958 tree void_ftype_pchar_v16qi
6959 = build_function_type_list (void_type_node,
6960 pchar_type_node, V16QI_type_node, NULL_TREE);
6961 tree void_ftype_v4si
6962 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6963 tree v8hi_ftype_void
6964 = build_function_type (V8HI_type_node, void_list_node);
6965 tree void_ftype_void
6966 = build_function_type (void_type_node, void_list_node);
6968 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6970 tree v16qi_ftype_long_pcvoid
6971 = build_function_type_list (V16QI_type_node,
6972 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6973 tree v8hi_ftype_long_pcvoid
6974 = build_function_type_list (V8HI_type_node,
6975 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6976 tree v4si_ftype_long_pcvoid
6977 = build_function_type_list (V4SI_type_node,
6978 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6980 tree void_ftype_v4si_long_pvoid
6981 = build_function_type_list (void_type_node,
6982 V4SI_type_node, long_integer_type_node,
6983 pvoid_type_node, NULL_TREE);
6984 tree void_ftype_v16qi_long_pvoid
6985 = build_function_type_list (void_type_node,
6986 V16QI_type_node, long_integer_type_node,
6987 pvoid_type_node, NULL_TREE);
6988 tree void_ftype_v8hi_long_pvoid
6989 = build_function_type_list (void_type_node,
6990 V8HI_type_node, long_integer_type_node,
6991 pvoid_type_node, NULL_TREE);
6992 tree int_ftype_int_v8hi_v8hi
6993 = build_function_type_list (integer_type_node,
6994 integer_type_node, V8HI_type_node,
6995 V8HI_type_node, NULL_TREE);
6996 tree int_ftype_int_v16qi_v16qi
6997 = build_function_type_list (integer_type_node,
6998 integer_type_node, V16QI_type_node,
6999 V16QI_type_node, NULL_TREE);
7000 tree int_ftype_int_v4sf_v4sf
7001 = build_function_type_list (integer_type_node,
7002 integer_type_node, V4SF_type_node,
7003 V4SF_type_node, NULL_TREE);
7004 tree v4si_ftype_v4si
7005 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7006 tree v8hi_ftype_v8hi
7007 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7008 tree v16qi_ftype_v16qi
7009 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7010 tree v4sf_ftype_v4sf
7011 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7012 tree void_ftype_pcvoid_int_int
7013 = build_function_type_list (void_type_node,
7014 pcvoid_type_node, integer_type_node,
7015 integer_type_node, NULL_TREE);
7016 tree int_ftype_pcchar
7017 = build_function_type_list (integer_type_node,
7018 pcchar_type_node, NULL_TREE);
7020 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7021 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7022 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7023 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7024 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7025 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7026 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7027 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7028 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7029 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7030 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7031 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7032 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7033 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7034 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7035 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7036 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7037 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7038 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7039 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
7040 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7041 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7042 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7043 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7044 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7045 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7046 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7047 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7048 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7049 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7050 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7051 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7053 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7054 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7055 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7057 /* Add the DST variants. */
7058 d = (struct builtin_description *) bdesc_dst;
7059 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7060 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7062 /* Initialize the predicates. */
7063 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7064 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7066 enum machine_mode mode1;
7069 mode1 = insn_data[dp->icode].operand[1].mode;
7074 type = int_ftype_int_v4si_v4si;
7077 type = int_ftype_int_v8hi_v8hi;
7080 type = int_ftype_int_v16qi_v16qi;
7083 type = int_ftype_int_v4sf_v4sf;
7089 def_builtin (dp->mask, dp->name, type, dp->code);
7092 /* Initialize the abs* operators. */
7093 d = (struct builtin_description *) bdesc_abs;
7094 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7096 enum machine_mode mode0;
7099 mode0 = insn_data[d->icode].operand[0].mode;
7104 type = v4si_ftype_v4si;
7107 type = v8hi_ftype_v8hi;
7110 type = v16qi_ftype_v16qi;
7113 type = v4sf_ftype_v4sf;
7119 def_builtin (d->mask, d->name, type, d->code);
7124 rs6000_common_init_builtins (void)
7126 struct builtin_description *d;
7129 tree v4sf_ftype_v4sf_v4sf_v16qi
7130 = build_function_type_list (V4SF_type_node,
7131 V4SF_type_node, V4SF_type_node,
7132 V16QI_type_node, NULL_TREE);
7133 tree v4si_ftype_v4si_v4si_v16qi
7134 = build_function_type_list (V4SI_type_node,
7135 V4SI_type_node, V4SI_type_node,
7136 V16QI_type_node, NULL_TREE);
7137 tree v8hi_ftype_v8hi_v8hi_v16qi
7138 = build_function_type_list (V8HI_type_node,
7139 V8HI_type_node, V8HI_type_node,
7140 V16QI_type_node, NULL_TREE);
7141 tree v16qi_ftype_v16qi_v16qi_v16qi
7142 = build_function_type_list (V16QI_type_node,
7143 V16QI_type_node, V16QI_type_node,
7144 V16QI_type_node, NULL_TREE);
7146 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7148 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7149 tree v16qi_ftype_int
7150 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7151 tree v8hi_ftype_v16qi
7152 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7153 tree v4sf_ftype_v4sf
7154 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7156 tree v2si_ftype_v2si_v2si
7157 = build_function_type_list (opaque_V2SI_type_node,
7158 opaque_V2SI_type_node,
7159 opaque_V2SI_type_node, NULL_TREE);
7161 tree v2sf_ftype_v2sf_v2sf
7162 = build_function_type_list (opaque_V2SF_type_node,
7163 opaque_V2SF_type_node,
7164 opaque_V2SF_type_node, NULL_TREE);
7166 tree v2si_ftype_int_int
7167 = build_function_type_list (opaque_V2SI_type_node,
7168 integer_type_node, integer_type_node,
7171 tree v2si_ftype_v2si
7172 = build_function_type_list (opaque_V2SI_type_node,
7173 opaque_V2SI_type_node, NULL_TREE);
7175 tree v2sf_ftype_v2sf
7176 = build_function_type_list (opaque_V2SF_type_node,
7177 opaque_V2SF_type_node, NULL_TREE);
7179 tree v2sf_ftype_v2si
7180 = build_function_type_list (opaque_V2SF_type_node,
7181 opaque_V2SI_type_node, NULL_TREE);
7183 tree v2si_ftype_v2sf
7184 = build_function_type_list (opaque_V2SI_type_node,
7185 opaque_V2SF_type_node, NULL_TREE);
7187 tree v2si_ftype_v2si_char
7188 = build_function_type_list (opaque_V2SI_type_node,
7189 opaque_V2SI_type_node,
7190 char_type_node, NULL_TREE);
7192 tree v2si_ftype_int_char
7193 = build_function_type_list (opaque_V2SI_type_node,
7194 integer_type_node, char_type_node, NULL_TREE);
7196 tree v2si_ftype_char
7197 = build_function_type_list (opaque_V2SI_type_node,
7198 char_type_node, NULL_TREE);
7200 tree int_ftype_int_int
7201 = build_function_type_list (integer_type_node,
7202 integer_type_node, integer_type_node,
7205 tree v4si_ftype_v4si_v4si
7206 = build_function_type_list (V4SI_type_node,
7207 V4SI_type_node, V4SI_type_node, NULL_TREE);
7208 tree v4sf_ftype_v4si_int
7209 = build_function_type_list (V4SF_type_node,
7210 V4SI_type_node, integer_type_node, NULL_TREE);
7211 tree v4si_ftype_v4sf_int
7212 = build_function_type_list (V4SI_type_node,
7213 V4SF_type_node, integer_type_node, NULL_TREE);
7214 tree v4si_ftype_v4si_int
7215 = build_function_type_list (V4SI_type_node,
7216 V4SI_type_node, integer_type_node, NULL_TREE);
7217 tree v8hi_ftype_v8hi_int
7218 = build_function_type_list (V8HI_type_node,
7219 V8HI_type_node, integer_type_node, NULL_TREE);
7220 tree v16qi_ftype_v16qi_int
7221 = build_function_type_list (V16QI_type_node,
7222 V16QI_type_node, integer_type_node, NULL_TREE);
7223 tree v16qi_ftype_v16qi_v16qi_int
7224 = build_function_type_list (V16QI_type_node,
7225 V16QI_type_node, V16QI_type_node,
7226 integer_type_node, NULL_TREE);
7227 tree v8hi_ftype_v8hi_v8hi_int
7228 = build_function_type_list (V8HI_type_node,
7229 V8HI_type_node, V8HI_type_node,
7230 integer_type_node, NULL_TREE);
7231 tree v4si_ftype_v4si_v4si_int
7232 = build_function_type_list (V4SI_type_node,
7233 V4SI_type_node, V4SI_type_node,
7234 integer_type_node, NULL_TREE);
7235 tree v4sf_ftype_v4sf_v4sf_int
7236 = build_function_type_list (V4SF_type_node,
7237 V4SF_type_node, V4SF_type_node,
7238 integer_type_node, NULL_TREE);
7239 tree v4sf_ftype_v4sf_v4sf
7240 = build_function_type_list (V4SF_type_node,
7241 V4SF_type_node, V4SF_type_node, NULL_TREE);
7242 tree v4sf_ftype_v4sf_v4sf_v4si
7243 = build_function_type_list (V4SF_type_node,
7244 V4SF_type_node, V4SF_type_node,
7245 V4SI_type_node, NULL_TREE);
7246 tree v4sf_ftype_v4sf_v4sf_v4sf
7247 = build_function_type_list (V4SF_type_node,
7248 V4SF_type_node, V4SF_type_node,
7249 V4SF_type_node, NULL_TREE);
7250 tree v4si_ftype_v4si_v4si_v4si
7251 = build_function_type_list (V4SI_type_node,
7252 V4SI_type_node, V4SI_type_node,
7253 V4SI_type_node, NULL_TREE);
7254 tree v8hi_ftype_v8hi_v8hi
7255 = build_function_type_list (V8HI_type_node,
7256 V8HI_type_node, V8HI_type_node, NULL_TREE);
7257 tree v8hi_ftype_v8hi_v8hi_v8hi
7258 = build_function_type_list (V8HI_type_node,
7259 V8HI_type_node, V8HI_type_node,
7260 V8HI_type_node, NULL_TREE);
7261 tree v4si_ftype_v8hi_v8hi_v4si
7262 = build_function_type_list (V4SI_type_node,
7263 V8HI_type_node, V8HI_type_node,
7264 V4SI_type_node, NULL_TREE);
7265 tree v4si_ftype_v16qi_v16qi_v4si
7266 = build_function_type_list (V4SI_type_node,
7267 V16QI_type_node, V16QI_type_node,
7268 V4SI_type_node, NULL_TREE);
7269 tree v16qi_ftype_v16qi_v16qi
7270 = build_function_type_list (V16QI_type_node,
7271 V16QI_type_node, V16QI_type_node, NULL_TREE);
7272 tree v4si_ftype_v4sf_v4sf
7273 = build_function_type_list (V4SI_type_node,
7274 V4SF_type_node, V4SF_type_node, NULL_TREE);
7275 tree v8hi_ftype_v16qi_v16qi
7276 = build_function_type_list (V8HI_type_node,
7277 V16QI_type_node, V16QI_type_node, NULL_TREE);
7278 tree v4si_ftype_v8hi_v8hi
7279 = build_function_type_list (V4SI_type_node,
7280 V8HI_type_node, V8HI_type_node, NULL_TREE);
7281 tree v8hi_ftype_v4si_v4si
7282 = build_function_type_list (V8HI_type_node,
7283 V4SI_type_node, V4SI_type_node, NULL_TREE);
7284 tree v16qi_ftype_v8hi_v8hi
7285 = build_function_type_list (V16QI_type_node,
7286 V8HI_type_node, V8HI_type_node, NULL_TREE);
7287 tree v4si_ftype_v16qi_v4si
7288 = build_function_type_list (V4SI_type_node,
7289 V16QI_type_node, V4SI_type_node, NULL_TREE);
7290 tree v4si_ftype_v16qi_v16qi
7291 = build_function_type_list (V4SI_type_node,
7292 V16QI_type_node, V16QI_type_node, NULL_TREE);
7293 tree v4si_ftype_v8hi_v4si
7294 = build_function_type_list (V4SI_type_node,
7295 V8HI_type_node, V4SI_type_node, NULL_TREE);
7296 tree v4si_ftype_v8hi
7297 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7298 tree int_ftype_v4si_v4si
7299 = build_function_type_list (integer_type_node,
7300 V4SI_type_node, V4SI_type_node, NULL_TREE);
7301 tree int_ftype_v4sf_v4sf
7302 = build_function_type_list (integer_type_node,
7303 V4SF_type_node, V4SF_type_node, NULL_TREE);
7304 tree int_ftype_v16qi_v16qi
7305 = build_function_type_list (integer_type_node,
7306 V16QI_type_node, V16QI_type_node, NULL_TREE);
7307 tree int_ftype_v8hi_v8hi
7308 = build_function_type_list (integer_type_node,
7309 V8HI_type_node, V8HI_type_node, NULL_TREE);
7311 /* Add the simple ternary operators. */
7312 d = (struct builtin_description *) bdesc_3arg;
7313 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7316 enum machine_mode mode0, mode1, mode2, mode3;
7319 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7322 mode0 = insn_data[d->icode].operand[0].mode;
7323 mode1 = insn_data[d->icode].operand[1].mode;
7324 mode2 = insn_data[d->icode].operand[2].mode;
7325 mode3 = insn_data[d->icode].operand[3].mode;
7327 /* When all four are of the same mode. */
7328 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7333 type = v4si_ftype_v4si_v4si_v4si;
7336 type = v4sf_ftype_v4sf_v4sf_v4sf;
7339 type = v8hi_ftype_v8hi_v8hi_v8hi;
7342 type = v16qi_ftype_v16qi_v16qi_v16qi;
7348 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7353 type = v4si_ftype_v4si_v4si_v16qi;
7356 type = v4sf_ftype_v4sf_v4sf_v16qi;
7359 type = v8hi_ftype_v8hi_v8hi_v16qi;
7362 type = v16qi_ftype_v16qi_v16qi_v16qi;
7368 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7369 && mode3 == V4SImode)
7370 type = v4si_ftype_v16qi_v16qi_v4si;
7371 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7372 && mode3 == V4SImode)
7373 type = v4si_ftype_v8hi_v8hi_v4si;
7374 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7375 && mode3 == V4SImode)
7376 type = v4sf_ftype_v4sf_v4sf_v4si;
7378 /* vchar, vchar, vchar, 4 bit literal. */
7379 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7381 type = v16qi_ftype_v16qi_v16qi_int;
7383 /* vshort, vshort, vshort, 4 bit literal. */
7384 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7386 type = v8hi_ftype_v8hi_v8hi_int;
7388 /* vint, vint, vint, 4 bit literal. */
7389 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7391 type = v4si_ftype_v4si_v4si_int;
7393 /* vfloat, vfloat, vfloat, 4 bit literal. */
7394 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7396 type = v4sf_ftype_v4sf_v4sf_int;
7401 def_builtin (d->mask, d->name, type, d->code);
7404 /* Add the simple binary operators. */
7405 d = (struct builtin_description *) bdesc_2arg;
7406 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7408 enum machine_mode mode0, mode1, mode2;
7411 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7414 mode0 = insn_data[d->icode].operand[0].mode;
7415 mode1 = insn_data[d->icode].operand[1].mode;
7416 mode2 = insn_data[d->icode].operand[2].mode;
7418 /* When all three operands are of the same mode. */
7419 if (mode0 == mode1 && mode1 == mode2)
7424 type = v4sf_ftype_v4sf_v4sf;
7427 type = v4si_ftype_v4si_v4si;
7430 type = v16qi_ftype_v16qi_v16qi;
7433 type = v8hi_ftype_v8hi_v8hi;
7436 type = v2si_ftype_v2si_v2si;
7439 type = v2sf_ftype_v2sf_v2sf;
7442 type = int_ftype_int_int;
7449 /* A few other combos we really don't want to do manually. */
7451 /* vint, vfloat, vfloat. */
7452 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7453 type = v4si_ftype_v4sf_v4sf;
7455 /* vshort, vchar, vchar. */
7456 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7457 type = v8hi_ftype_v16qi_v16qi;
7459 /* vint, vshort, vshort. */
7460 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7461 type = v4si_ftype_v8hi_v8hi;
7463 /* vshort, vint, vint. */
7464 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7465 type = v8hi_ftype_v4si_v4si;
7467 /* vchar, vshort, vshort. */
7468 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7469 type = v16qi_ftype_v8hi_v8hi;
7471 /* vint, vchar, vint. */
7472 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7473 type = v4si_ftype_v16qi_v4si;
7475 /* vint, vchar, vchar. */
7476 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7477 type = v4si_ftype_v16qi_v16qi;
7479 /* vint, vshort, vint. */
7480 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7481 type = v4si_ftype_v8hi_v4si;
7483 /* vint, vint, 5 bit literal. */
7484 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7485 type = v4si_ftype_v4si_int;
7487 /* vshort, vshort, 5 bit literal. */
7488 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7489 type = v8hi_ftype_v8hi_int;
7491 /* vchar, vchar, 5 bit literal. */
7492 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7493 type = v16qi_ftype_v16qi_int;
7495 /* vfloat, vint, 5 bit literal. */
7496 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7497 type = v4sf_ftype_v4si_int;
7499 /* vint, vfloat, 5 bit literal. */
7500 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7501 type = v4si_ftype_v4sf_int;
7503 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7504 type = v2si_ftype_int_int;
7506 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7507 type = v2si_ftype_v2si_char;
7509 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7510 type = v2si_ftype_int_char;
7513 else if (mode0 == SImode)
7518 type = int_ftype_v4si_v4si;
7521 type = int_ftype_v4sf_v4sf;
7524 type = int_ftype_v16qi_v16qi;
7527 type = int_ftype_v8hi_v8hi;
7537 def_builtin (d->mask, d->name, type, d->code);
7540 /* Add the simple unary operators. */
7541 d = (struct builtin_description *) bdesc_1arg;
7542 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7544 enum machine_mode mode0, mode1;
7547 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7550 mode0 = insn_data[d->icode].operand[0].mode;
7551 mode1 = insn_data[d->icode].operand[1].mode;
7553 if (mode0 == V4SImode && mode1 == QImode)
7554 type = v4si_ftype_int;
7555 else if (mode0 == V8HImode && mode1 == QImode)
7556 type = v8hi_ftype_int;
7557 else if (mode0 == V16QImode && mode1 == QImode)
7558 type = v16qi_ftype_int;
7559 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7560 type = v4sf_ftype_v4sf;
7561 else if (mode0 == V8HImode && mode1 == V16QImode)
7562 type = v8hi_ftype_v16qi;
7563 else if (mode0 == V4SImode && mode1 == V8HImode)
7564 type = v4si_ftype_v8hi;
7565 else if (mode0 == V2SImode && mode1 == V2SImode)
7566 type = v2si_ftype_v2si;
7567 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7568 type = v2sf_ftype_v2sf;
7569 else if (mode0 == V2SFmode && mode1 == V2SImode)
7570 type = v2sf_ftype_v2si;
7571 else if (mode0 == V2SImode && mode1 == V2SFmode)
7572 type = v2si_ftype_v2sf;
7573 else if (mode0 == V2SImode && mode1 == QImode)
7574 type = v2si_ftype_char;
7578 def_builtin (d->mask, d->name, type, d->code);
7583 rs6000_init_libfuncs (void)
7585 if (!TARGET_HARD_FLOAT)
7588 if (DEFAULT_ABI != ABI_V4)
7590 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7592 /* AIX library routines for float->int conversion. */
7593 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7594 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7595 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7596 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7599 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7600 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7601 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7602 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7603 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7607 /* 32-bit SVR4 quad floating point routines. */
7609 set_optab_libfunc (add_optab, TFmode, "_q_add");
7610 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7611 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7612 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7613 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7614 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7615 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7617 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7618 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7619 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7620 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7621 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7622 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7624 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7625 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7626 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7627 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7628 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7629 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7630 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7634 /* Expand a block move operation, and return 1 if successful. Return 0
7635 if we should let the compiler generate normal code.
7637 operands[0] is the destination
7638 operands[1] is the source
7639 operands[2] is the length
7640 operands[3] is the alignment */
7642 #define MAX_MOVE_REG 4
7645 expand_block_move (rtx operands[])
7647 rtx orig_dest = operands[0];
7648 rtx orig_src = operands[1];
7649 rtx bytes_rtx = operands[2];
7650 rtx align_rtx = operands[3];
7651 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7656 rtx stores[MAX_MOVE_REG];
7659 /* If this is not a fixed size move, just call memcpy */
7663 /* If this is not a fixed size alignment, abort */
7664 if (GET_CODE (align_rtx) != CONST_INT)
7666 align = INTVAL (align_rtx);
7668 /* Anything to move? */
7669 bytes = INTVAL (bytes_rtx);
7673 /* store_one_arg depends on expand_block_move to handle at least the size of
7674 reg_parm_stack_space. */
7675 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7678 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7681 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7682 rtx (*mov) (rtx, rtx);
7684 enum machine_mode mode = BLKmode;
7688 && bytes > 24 /* move up to 32 bytes at a time */
7696 && ! fixed_regs[12])
7698 move_bytes = (bytes > 32) ? 32 : bytes;
7699 gen_func.movstrsi = gen_movstrsi_8reg;
7701 else if (TARGET_STRING
7702 && bytes > 16 /* move up to 24 bytes at a time */
7708 && ! fixed_regs[10])
7710 move_bytes = (bytes > 24) ? 24 : bytes;
7711 gen_func.movstrsi = gen_movstrsi_6reg;
7713 else if (TARGET_STRING
7714 && bytes > 8 /* move up to 16 bytes at a time */
7720 move_bytes = (bytes > 16) ? 16 : bytes;
7721 gen_func.movstrsi = gen_movstrsi_4reg;
7723 else if (bytes >= 8 && TARGET_POWERPC64
7724 /* 64-bit loads and stores require word-aligned
7726 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7730 gen_func.mov = gen_movdi;
7732 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7733 { /* move up to 8 bytes at a time */
7734 move_bytes = (bytes > 8) ? 8 : bytes;
7735 gen_func.movstrsi = gen_movstrsi_2reg;
7737 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7738 { /* move 4 bytes */
7741 gen_func.mov = gen_movsi;
7743 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7744 { /* move 2 bytes */
7747 gen_func.mov = gen_movhi;
7749 else if (TARGET_STRING && bytes > 1)
7750 { /* move up to 4 bytes at a time */
7751 move_bytes = (bytes > 4) ? 4 : bytes;
7752 gen_func.movstrsi = gen_movstrsi_1reg;
7754 else /* move 1 byte at a time */
7758 gen_func.mov = gen_movqi;
7761 src = adjust_address (orig_src, mode, offset);
7762 dest = adjust_address (orig_dest, mode, offset);
7764 if (mode != BLKmode)
7766 rtx tmp_reg = gen_reg_rtx (mode);
7768 emit_insn ((*gen_func.mov) (tmp_reg, src));
7769 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7772 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7775 for (i = 0; i < num_reg; i++)
7776 emit_insn (stores[i]);
7780 if (mode == BLKmode)
7782 /* Move the address into scratch registers. The movstrsi
7783 patterns require zero offset. */
7784 if (!REG_P (XEXP (src, 0)))
7786 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7787 src = replace_equiv_address (src, src_reg);
7789 set_mem_size (src, GEN_INT (move_bytes));
7791 if (!REG_P (XEXP (dest, 0)))
7793 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7794 dest = replace_equiv_address (dest, dest_reg);
7796 set_mem_size (dest, GEN_INT (move_bytes));
7798 emit_insn ((*gen_func.movstrsi) (dest, src,
7799 GEN_INT (move_bytes & 31),
7808 /* Return 1 if OP is a load multiple operation. It is known to be a
7809 PARALLEL and the first section will be tested. */
7812 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7814 int count = XVECLEN (op, 0);
7815 unsigned int dest_regno;
7819 /* Perform a quick check so we don't blow up below. */
7821 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7822 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7823 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7826 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7827 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7829 for (i = 1; i < count; i++)
7831 rtx elt = XVECEXP (op, 0, i);
7833 if (GET_CODE (elt) != SET
7834 || GET_CODE (SET_DEST (elt)) != REG
7835 || GET_MODE (SET_DEST (elt)) != SImode
7836 || REGNO (SET_DEST (elt)) != dest_regno + i
7837 || GET_CODE (SET_SRC (elt)) != MEM
7838 || GET_MODE (SET_SRC (elt)) != SImode
7839 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7840 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7841 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7842 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7849 /* Similar, but tests for store multiple. Here, the second vector element
7850 is a CLOBBER. It will be tested later. */
7853 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7855 int count = XVECLEN (op, 0) - 1;
7856 unsigned int src_regno;
7860 /* Perform a quick check so we don't blow up below. */
7862 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7863 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7864 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7867 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7868 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7870 for (i = 1; i < count; i++)
7872 rtx elt = XVECEXP (op, 0, i + 1);
7874 if (GET_CODE (elt) != SET
7875 || GET_CODE (SET_SRC (elt)) != REG
7876 || GET_MODE (SET_SRC (elt)) != SImode
7877 || REGNO (SET_SRC (elt)) != src_regno + i
7878 || GET_CODE (SET_DEST (elt)) != MEM
7879 || GET_MODE (SET_DEST (elt)) != SImode
7880 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7881 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7882 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7883 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7890 /* Return a string to perform a load_multiple operation.
7891 operands[0] is the vector.
7892 operands[1] is the source address.
7893 operands[2] is the first destination register. */
7896 rs6000_output_load_multiple (rtx operands[3])
7898 /* We have to handle the case where the pseudo used to contain the address
7899 is assigned to one of the output registers. */
7901 int words = XVECLEN (operands[0], 0);
7904 if (XVECLEN (operands[0], 0) == 1)
7905 return "{l|lwz} %2,0(%1)";
7907 for (i = 0; i < words; i++)
7908 if (refers_to_regno_p (REGNO (operands[2]) + i,
7909 REGNO (operands[2]) + i + 1, operands[1], 0))
7913 xop[0] = GEN_INT (4 * (words-1));
7914 xop[1] = operands[1];
7915 xop[2] = operands[2];
7916 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7921 xop[0] = GEN_INT (4 * (words-1));
7922 xop[1] = operands[1];
7923 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7924 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7929 for (j = 0; j < words; j++)
7932 xop[0] = GEN_INT (j * 4);
7933 xop[1] = operands[1];
7934 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7935 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7937 xop[0] = GEN_INT (i * 4);
7938 xop[1] = operands[1];
7939 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7944 return "{lsi|lswi} %2,%1,%N0";
7947 /* Return 1 for a parallel vrsave operation. */
7950 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7952 int count = XVECLEN (op, 0);
7953 unsigned int dest_regno, src_regno;
7957 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7958 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7959 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7962 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7963 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7965 if (dest_regno != VRSAVE_REGNO
7966 && src_regno != VRSAVE_REGNO)
7969 for (i = 1; i < count; i++)
7971 rtx elt = XVECEXP (op, 0, i);
7973 if (GET_CODE (elt) != CLOBBER
7974 && GET_CODE (elt) != SET)
7981 /* Return 1 for an PARALLEL suitable for mfcr. */
7984 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7986 int count = XVECLEN (op, 0);
7989 /* Perform a quick check so we don't blow up below. */
7991 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7992 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7993 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7996 for (i = 0; i < count; i++)
7998 rtx exp = XVECEXP (op, 0, i);
8003 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8005 if (GET_CODE (src_reg) != REG
8006 || GET_MODE (src_reg) != CCmode
8007 || ! CR_REGNO_P (REGNO (src_reg)))
8010 if (GET_CODE (exp) != SET
8011 || GET_CODE (SET_DEST (exp)) != REG
8012 || GET_MODE (SET_DEST (exp)) != SImode
8013 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8015 unspec = SET_SRC (exp);
8016 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8018 if (GET_CODE (unspec) != UNSPEC
8019 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8020 || XVECLEN (unspec, 0) != 2
8021 || XVECEXP (unspec, 0, 0) != src_reg
8022 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8023 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8029 /* Return 1 for an PARALLEL suitable for mtcrf. */
8032 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8034 int count = XVECLEN (op, 0);
8038 /* Perform a quick check so we don't blow up below. */
8040 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8041 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8042 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8044 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8046 if (GET_CODE (src_reg) != REG
8047 || GET_MODE (src_reg) != SImode
8048 || ! INT_REGNO_P (REGNO (src_reg)))
8051 for (i = 0; i < count; i++)
8053 rtx exp = XVECEXP (op, 0, i);
8057 if (GET_CODE (exp) != SET
8058 || GET_CODE (SET_DEST (exp)) != REG
8059 || GET_MODE (SET_DEST (exp)) != CCmode
8060 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8062 unspec = SET_SRC (exp);
8063 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8065 if (GET_CODE (unspec) != UNSPEC
8066 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8067 || XVECLEN (unspec, 0) != 2
8068 || XVECEXP (unspec, 0, 0) != src_reg
8069 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8070 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8076 /* Return 1 for an PARALLEL suitable for lmw. */
8079 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8081 int count = XVECLEN (op, 0);
8082 unsigned int dest_regno;
8084 unsigned int base_regno;
8085 HOST_WIDE_INT offset;
8088 /* Perform a quick check so we don't blow up below. */
8090 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8091 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8092 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8095 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8096 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8099 || count != 32 - (int) dest_regno)
8102 if (legitimate_indirect_address_p (src_addr, 0))
8105 base_regno = REGNO (src_addr);
8106 if (base_regno == 0)
8109 else if (legitimate_offset_address_p (SImode, src_addr, 0))
8111 offset = INTVAL (XEXP (src_addr, 1));
8112 base_regno = REGNO (XEXP (src_addr, 0));
8117 for (i = 0; i < count; i++)
8119 rtx elt = XVECEXP (op, 0, i);
8122 HOST_WIDE_INT newoffset;
8124 if (GET_CODE (elt) != SET
8125 || GET_CODE (SET_DEST (elt)) != REG
8126 || GET_MODE (SET_DEST (elt)) != SImode
8127 || REGNO (SET_DEST (elt)) != dest_regno + i
8128 || GET_CODE (SET_SRC (elt)) != MEM
8129 || GET_MODE (SET_SRC (elt)) != SImode)
8131 newaddr = XEXP (SET_SRC (elt), 0);
8132 if (legitimate_indirect_address_p (newaddr, 0))
8137 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8139 addr_reg = XEXP (newaddr, 0);
8140 newoffset = INTVAL (XEXP (newaddr, 1));
8144 if (REGNO (addr_reg) != base_regno
8145 || newoffset != offset + 4 * i)
8152 /* Return 1 for an PARALLEL suitable for stmw. */
8155 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8157 int count = XVECLEN (op, 0);
8158 unsigned int src_regno;
8160 unsigned int base_regno;
8161 HOST_WIDE_INT offset;
8164 /* Perform a quick check so we don't blow up below. */
8166 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8167 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8168 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8171 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8172 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8175 || count != 32 - (int) src_regno)
8178 if (legitimate_indirect_address_p (dest_addr, 0))
8181 base_regno = REGNO (dest_addr);
8182 if (base_regno == 0)
8185 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8187 offset = INTVAL (XEXP (dest_addr, 1));
8188 base_regno = REGNO (XEXP (dest_addr, 0));
8193 for (i = 0; i < count; i++)
8195 rtx elt = XVECEXP (op, 0, i);
8198 HOST_WIDE_INT newoffset;
8200 if (GET_CODE (elt) != SET
8201 || GET_CODE (SET_SRC (elt)) != REG
8202 || GET_MODE (SET_SRC (elt)) != SImode
8203 || REGNO (SET_SRC (elt)) != src_regno + i
8204 || GET_CODE (SET_DEST (elt)) != MEM
8205 || GET_MODE (SET_DEST (elt)) != SImode)
8207 newaddr = XEXP (SET_DEST (elt), 0);
8208 if (legitimate_indirect_address_p (newaddr, 0))
8213 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8215 addr_reg = XEXP (newaddr, 0);
8216 newoffset = INTVAL (XEXP (newaddr, 1));
8220 if (REGNO (addr_reg) != base_regno
8221 || newoffset != offset + 4 * i)
8228 /* A validation routine: say whether CODE, a condition code, and MODE
8229 match. The other alternatives either don't make sense or should
8230 never be generated. */
8233 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8235 if ((GET_RTX_CLASS (code) != RTX_COMPARE
8236 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
8237 || GET_MODE_CLASS (mode) != MODE_CC)
8240 /* These don't make sense. */
8241 if ((code == GT || code == LT || code == GE || code == LE)
8242 && mode == CCUNSmode)
8245 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8246 && mode != CCUNSmode)
8249 if (mode != CCFPmode
8250 && (code == ORDERED || code == UNORDERED
8251 || code == UNEQ || code == LTGT
8252 || code == UNGT || code == UNLT
8253 || code == UNGE || code == UNLE))
8256 /* These should never be generated except for
8257 flag_finite_math_only. */
8258 if (mode == CCFPmode
8259 && ! flag_finite_math_only
8260 && (code == LE || code == GE
8261 || code == UNEQ || code == LTGT
8262 || code == UNGT || code == UNLT))
8265 /* These are invalid; the information is not there. */
8266 if (mode == CCEQmode
8267 && code != EQ && code != NE)
8271 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8272 We only check the opcode against the mode of the CC value here. */
8275 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8277 enum rtx_code code = GET_CODE (op);
8278 enum machine_mode cc_mode;
8280 if (!COMPARISON_P (op))
8283 cc_mode = GET_MODE (XEXP (op, 0));
8284 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8287 validate_condition_mode (code, cc_mode);
8292 /* Return 1 if OP is a comparison operation that is valid for a branch
8293 insn and which is true if the corresponding bit in the CC register
8297 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8301 if (! branch_comparison_operator (op, mode))
8304 code = GET_CODE (op);
8305 return (code == EQ || code == LT || code == GT
8306 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
8307 || code == LTU || code == GTU
8308 || code == UNORDERED);
8311 /* Return 1 if OP is a comparison operation that is valid for an scc
8312 insn: it must be a positive comparison. */
8315 scc_comparison_operator (rtx op, enum machine_mode mode)
8317 return branch_positive_comparison_operator (op, mode);
8321 trap_comparison_operator (rtx op, enum machine_mode mode)
8323 if (mode != VOIDmode && mode != GET_MODE (op))
8325 return COMPARISON_P (op);
8329 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8331 enum rtx_code code = GET_CODE (op);
8332 return (code == AND || code == IOR || code == XOR);
8336 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8338 enum rtx_code code = GET_CODE (op);
8339 return (code == IOR || code == XOR);
8343 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8345 enum rtx_code code = GET_CODE (op);
8346 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8349 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8350 mask required to convert the result of a rotate insn into a shift
8351 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8354 includes_lshift_p (rtx shiftop, rtx andop)
8356 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8358 shift_mask <<= INTVAL (shiftop);
8360 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8363 /* Similar, but for right shift. */
8366 includes_rshift_p (rtx shiftop, rtx andop)
8368 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8370 shift_mask >>= INTVAL (shiftop);
8372 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8375 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8376 to perform a left shift. It must have exactly SHIFTOP least
8377 significant 0's, then one or more 1's, then zero or more 0's. */
8380 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8382 if (GET_CODE (andop) == CONST_INT)
8384 HOST_WIDE_INT c, lsb, shift_mask;
8387 if (c == 0 || c == ~0)
8391 shift_mask <<= INTVAL (shiftop);
8393 /* Find the least significant one bit. */
8396 /* It must coincide with the LSB of the shift mask. */
8397 if (-lsb != shift_mask)
8400 /* Invert to look for the next transition (if any). */
8403 /* Remove the low group of ones (originally low group of zeros). */
8406 /* Again find the lsb, and check we have all 1's above. */
8410 else if (GET_CODE (andop) == CONST_DOUBLE
8411 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8413 HOST_WIDE_INT low, high, lsb;
8414 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8416 low = CONST_DOUBLE_LOW (andop);
8417 if (HOST_BITS_PER_WIDE_INT < 64)
8418 high = CONST_DOUBLE_HIGH (andop);
8420 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8421 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8424 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8426 shift_mask_high = ~0;
8427 if (INTVAL (shiftop) > 32)
8428 shift_mask_high <<= INTVAL (shiftop) - 32;
8432 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8439 return high == -lsb;
8442 shift_mask_low = ~0;
8443 shift_mask_low <<= INTVAL (shiftop);
8447 if (-lsb != shift_mask_low)
8450 if (HOST_BITS_PER_WIDE_INT < 64)
8455 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8458 return high == -lsb;
8462 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8468 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8469 to perform a left shift. It must have SHIFTOP or more least
8470 significant 0's, with the remainder of the word 1's. */
8473 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8475 if (GET_CODE (andop) == CONST_INT)
8477 HOST_WIDE_INT c, lsb, shift_mask;
8480 shift_mask <<= INTVAL (shiftop);
8483 /* Find the least significant one bit. */
8486 /* It must be covered by the shift mask.
8487 This test also rejects c == 0. */
8488 if ((lsb & shift_mask) == 0)
8491 /* Check we have all 1's above the transition, and reject all 1's. */
8492 return c == -lsb && lsb != 1;
8494 else if (GET_CODE (andop) == CONST_DOUBLE
8495 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8497 HOST_WIDE_INT low, lsb, shift_mask_low;
8499 low = CONST_DOUBLE_LOW (andop);
8501 if (HOST_BITS_PER_WIDE_INT < 64)
8503 HOST_WIDE_INT high, shift_mask_high;
8505 high = CONST_DOUBLE_HIGH (andop);
8509 shift_mask_high = ~0;
8510 if (INTVAL (shiftop) > 32)
8511 shift_mask_high <<= INTVAL (shiftop) - 32;
8515 if ((lsb & shift_mask_high) == 0)
8518 return high == -lsb;
8524 shift_mask_low = ~0;
8525 shift_mask_low <<= INTVAL (shiftop);
8529 if ((lsb & shift_mask_low) == 0)
8532 return low == -lsb && lsb != 1;
8538 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8539 for lfq and stfq insns.
8541 Note reg1 and reg2 *must* be hard registers. To be sure we will
8542 abort if we are passed pseudo registers. */
8545 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8547 /* We might have been passed a SUBREG. */
8548 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8551 return (REGNO (reg1) == REGNO (reg2) - 1);
8554 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8555 addr1 and addr2 must be in consecutive memory locations
8556 (addr2 == addr1 + 8). */
8559 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8564 /* Extract an offset (if used) from the first addr. */
8565 if (GET_CODE (addr1) == PLUS)
8567 /* If not a REG, return zero. */
8568 if (GET_CODE (XEXP (addr1, 0)) != REG)
8572 reg1 = REGNO (XEXP (addr1, 0));
8573 /* The offset must be constant! */
8574 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8576 offset1 = INTVAL (XEXP (addr1, 1));
8579 else if (GET_CODE (addr1) != REG)
8583 reg1 = REGNO (addr1);
8584 /* This was a simple (mem (reg)) expression. Offset is 0. */
8588 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8589 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8590 register as addr1. */
8591 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8593 if (GET_CODE (addr2) != PLUS)
8596 if (GET_CODE (XEXP (addr2, 0)) != REG
8597 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8600 if (reg1 != REGNO (XEXP (addr2, 0)))
8603 /* The offset for the second addr must be 8 more than the first addr. */
8604 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8607 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8612 /* Return the register class of a scratch register needed to copy IN into
8613 or out of a register in CLASS in MODE. If it can be done directly,
8614 NO_REGS is returned. */
8617 secondary_reload_class (enum reg_class class,
8618 enum machine_mode mode ATTRIBUTE_UNUSED,
8623 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8625 && MACHOPIC_INDIRECT
8629 /* We cannot copy a symbolic operand directly into anything
8630 other than BASE_REGS for TARGET_ELF. So indicate that a
8631 register from BASE_REGS is needed as an intermediate
8634 On Darwin, pic addresses require a load from memory, which
8635 needs a base register. */
8636 if (class != BASE_REGS
8637 && (GET_CODE (in) == SYMBOL_REF
8638 || GET_CODE (in) == HIGH
8639 || GET_CODE (in) == LABEL_REF
8640 || GET_CODE (in) == CONST))
8644 if (GET_CODE (in) == REG)
8647 if (regno >= FIRST_PSEUDO_REGISTER)
8649 regno = true_regnum (in);
8650 if (regno >= FIRST_PSEUDO_REGISTER)
8654 else if (GET_CODE (in) == SUBREG)
8656 regno = true_regnum (in);
8657 if (regno >= FIRST_PSEUDO_REGISTER)
8663 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8665 if (class == GENERAL_REGS || class == BASE_REGS
8666 || (regno >= 0 && INT_REGNO_P (regno)))
8669 /* Constants, memory, and FP registers can go into FP registers. */
8670 if ((regno == -1 || FP_REGNO_P (regno))
8671 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8674 /* Memory, and AltiVec registers can go into AltiVec registers. */
8675 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8676 && class == ALTIVEC_REGS)
8679 /* We can copy among the CR registers. */
8680 if ((class == CR_REGS || class == CR0_REGS)
8681 && regno >= 0 && CR_REGNO_P (regno))
8684 /* Otherwise, we need GENERAL_REGS. */
8685 return GENERAL_REGS;
8688 /* Given a comparison operation, return the bit number in CCR to test. We
8689 know this is a valid comparison.
8691 SCC_P is 1 if this is for an scc. That means that %D will have been
8692 used instead of %C, so the bits will be in different places.
8694 Return -1 if OP isn't a valid comparison for some reason. */
8697 ccr_bit (rtx op, int scc_p)
8699 enum rtx_code code = GET_CODE (op);
8700 enum machine_mode cc_mode;
8705 if (!COMPARISON_P (op))
8710 if (GET_CODE (reg) != REG
8711 || ! CR_REGNO_P (REGNO (reg)))
8714 cc_mode = GET_MODE (reg);
8715 cc_regnum = REGNO (reg);
8716 base_bit = 4 * (cc_regnum - CR0_REGNO);
8718 validate_condition_mode (code, cc_mode);
8720 /* When generating a sCOND operation, only positive conditions are
8722 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8723 && code != GTU && code != LTU)
8729 if (TARGET_E500 && !TARGET_FPRS
8730 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8731 return base_bit + 1;
8732 return scc_p ? base_bit + 3 : base_bit + 2;
8734 if (TARGET_E500 && !TARGET_FPRS
8735 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8736 return base_bit + 1;
8737 return base_bit + 2;
8738 case GT: case GTU: case UNLE:
8739 return base_bit + 1;
8740 case LT: case LTU: case UNGE:
8742 case ORDERED: case UNORDERED:
8743 return base_bit + 3;
8746 /* If scc, we will have done a cror to put the bit in the
8747 unordered position. So test that bit. For integer, this is ! LT
8748 unless this is an scc insn. */
8749 return scc_p ? base_bit + 3 : base_bit;
8752 return scc_p ? base_bit + 3 : base_bit + 1;
8759 /* Return the GOT register. */
8762 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8764 /* The second flow pass currently (June 1999) can't update
8765 regs_ever_live without disturbing other parts of the compiler, so
8766 update it here to make the prolog/epilogue code happy. */
8767 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8768 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8770 current_function_uses_pic_offset_table = 1;
8772 return pic_offset_table_rtx;
8775 /* Function to init struct machine_function.
8776 This will be called, via a pointer variable,
8777 from push_function_context. */
8779 static struct machine_function *
8780 rs6000_init_machine_status (void)
8782 return ggc_alloc_cleared (sizeof (machine_function));
8785 /* These macros test for integers and extract the low-order bits. */
8787 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8788 && GET_MODE (X) == VOIDmode)
8790 #define INT_LOWPART(X) \
8791 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8797 unsigned long val = INT_LOWPART (op);
8799 /* If the high bit is zero, the value is the first 1 bit we find
8801 if ((val & 0x80000000) == 0)
8803 if ((val & 0xffffffff) == 0)
8807 while (((val <<= 1) & 0x80000000) == 0)
8812 /* If the high bit is set and the low bit is not, or the mask is all
8813 1's, the value is zero. */
8814 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8817 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8820 while (((val >>= 1) & 1) != 0)
8830 unsigned long val = INT_LOWPART (op);
8832 /* If the low bit is zero, the value is the first 1 bit we find from
8836 if ((val & 0xffffffff) == 0)
8840 while (((val >>= 1) & 1) == 0)
8846 /* If the low bit is set and the high bit is not, or the mask is all
8847 1's, the value is 31. */
8848 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8851 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8854 while (((val <<= 1) & 0x80000000) != 0)
8860 /* Locate some local-dynamic symbol still in use by this function
8861 so that we can print its name in some tls_ld pattern. */
8864 rs6000_get_some_local_dynamic_name (void)
8868 if (cfun->machine->some_ld_name)
8869 return cfun->machine->some_ld_name;
8871 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8873 && for_each_rtx (&PATTERN (insn),
8874 rs6000_get_some_local_dynamic_name_1, 0))
8875 return cfun->machine->some_ld_name;
8880 /* Helper function for rs6000_get_some_local_dynamic_name. */
8883 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8887 if (GET_CODE (x) == SYMBOL_REF)
8889 const char *str = XSTR (x, 0);
8890 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8892 cfun->machine->some_ld_name = str;
8900 /* Print an operand. Recognize special options, documented below. */
8903 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8904 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8906 #define SMALL_DATA_RELOC "sda21"
8907 #define SMALL_DATA_REG 0
8911 print_operand (FILE *file, rtx x, int code)
8915 unsigned HOST_WIDE_INT uval;
8920 /* Write out an instruction after the call which may be replaced
8921 with glue code by the loader. This depends on the AIX version. */
8922 asm_fprintf (file, RS6000_CALL_GLUE);
8925 /* %a is output_address. */
8928 /* If X is a constant integer whose low-order 5 bits are zero,
8929 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8930 in the AIX assembler where "sri" with a zero shift count
8931 writes a trash instruction. */
8932 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8939 /* If constant, low-order 16 bits of constant, unsigned.
8940 Otherwise, write normally. */
8942 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8944 print_operand (file, x, 0);
8948 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8949 for 64-bit mask direction. */
8950 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8953 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8957 /* X is a CR register. Print the number of the EQ bit of the CR */
8958 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8959 output_operand_lossage ("invalid %%E value");
8961 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8965 /* X is a CR register. Print the shift count needed to move it
8966 to the high-order four bits. */
8967 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8968 output_operand_lossage ("invalid %%f value");
8970 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8974 /* Similar, but print the count for the rotate in the opposite
8976 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8977 output_operand_lossage ("invalid %%F value");
8979 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8983 /* X is a constant integer. If it is negative, print "m",
8984 otherwise print "z". This is to make an aze or ame insn. */
8985 if (GET_CODE (x) != CONST_INT)
8986 output_operand_lossage ("invalid %%G value");
8987 else if (INTVAL (x) >= 0)
8994 /* If constant, output low-order five bits. Otherwise, write
8997 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8999 print_operand (file, x, 0);
9003 /* If constant, output low-order six bits. Otherwise, write
9006 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9008 print_operand (file, x, 0);
9012 /* Print `i' if this is a constant, else nothing. */
9018 /* Write the bit number in CCR for jump. */
9021 output_operand_lossage ("invalid %%j code");
9023 fprintf (file, "%d", i);
9027 /* Similar, but add one for shift count in rlinm for scc and pass
9028 scc flag to `ccr_bit'. */
9031 output_operand_lossage ("invalid %%J code");
9033 /* If we want bit 31, write a shift count of zero, not 32. */
9034 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9038 /* X must be a constant. Write the 1's complement of the
9041 output_operand_lossage ("invalid %%k value");
9043 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9047 /* X must be a symbolic constant on ELF. Write an
9048 expression suitable for an 'addi' that adds in the low 16
9050 if (GET_CODE (x) != CONST)
9052 print_operand_address (file, x);
9057 if (GET_CODE (XEXP (x, 0)) != PLUS
9058 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9059 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9060 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9061 output_operand_lossage ("invalid %%K value");
9062 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9064 /* For GNU as, there must be a non-alphanumeric character
9065 between 'l' and the number. The '-' is added by
9066 print_operand() already. */
9067 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9069 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9073 /* %l is output_asm_label. */
9076 /* Write second word of DImode or DFmode reference. Works on register
9077 or non-indexed memory only. */
9078 if (GET_CODE (x) == REG)
9079 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9080 else if (GET_CODE (x) == MEM)
9082 /* Handle possible auto-increment. Since it is pre-increment and
9083 we have already done it, we can just use an offset of word. */
9084 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9085 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9086 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9089 output_address (XEXP (adjust_address_nv (x, SImode,
9093 if (small_data_operand (x, GET_MODE (x)))
9094 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9095 reg_names[SMALL_DATA_REG]);
9100 /* MB value for a mask operand. */
9101 if (! mask_operand (x, SImode))
9102 output_operand_lossage ("invalid %%m value");
9104 fprintf (file, "%d", extract_MB (x));
9108 /* ME value for a mask operand. */
9109 if (! mask_operand (x, SImode))
9110 output_operand_lossage ("invalid %%M value");
9112 fprintf (file, "%d", extract_ME (x));
9115 /* %n outputs the negative of its operand. */
9118 /* Write the number of elements in the vector times 4. */
9119 if (GET_CODE (x) != PARALLEL)
9120 output_operand_lossage ("invalid %%N value");
9122 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9126 /* Similar, but subtract 1 first. */
9127 if (GET_CODE (x) != PARALLEL)
9128 output_operand_lossage ("invalid %%O value");
9130 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9134 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9136 || INT_LOWPART (x) < 0
9137 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9138 output_operand_lossage ("invalid %%p value");
9140 fprintf (file, "%d", i);
9144 /* The operand must be an indirect memory reference. The result
9145 is the register name. */
9146 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9147 || REGNO (XEXP (x, 0)) >= 32)
9148 output_operand_lossage ("invalid %%P value");
9150 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9154 /* This outputs the logical code corresponding to a boolean
9155 expression. The expression may have one or both operands
9156 negated (if one, only the first one). For condition register
9157 logical operations, it will also treat the negated
9158 CR codes as NOTs, but not handle NOTs of them. */
9160 const char *const *t = 0;
9162 enum rtx_code code = GET_CODE (x);
9163 static const char * const tbl[3][3] = {
9164 { "and", "andc", "nor" },
9165 { "or", "orc", "nand" },
9166 { "xor", "eqv", "xor" } };
9170 else if (code == IOR)
9172 else if (code == XOR)
9175 output_operand_lossage ("invalid %%q value");
9177 if (GET_CODE (XEXP (x, 0)) != NOT)
9181 if (GET_CODE (XEXP (x, 1)) == NOT)
9199 /* X is a CR register. Print the mask for `mtcrf'. */
9200 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9201 output_operand_lossage ("invalid %%R value");
9203 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9207 /* Low 5 bits of 32 - value */
9209 output_operand_lossage ("invalid %%s value");
9211 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9215 /* PowerPC64 mask position. All 0's is excluded.
9216 CONST_INT 32-bit mask is considered sign-extended so any
9217 transition must occur within the CONST_INT, not on the boundary. */
9218 if (! mask64_operand (x, DImode))
9219 output_operand_lossage ("invalid %%S value");
9221 uval = INT_LOWPART (x);
9223 if (uval & 1) /* Clear Left */
9225 #if HOST_BITS_PER_WIDE_INT > 64
9226 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9230 else /* Clear Right */
9233 #if HOST_BITS_PER_WIDE_INT > 64
9234 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9242 fprintf (file, "%d", i);
9246 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9247 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9250 /* Bit 3 is OV bit. */
9251 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9253 /* If we want bit 31, write a shift count of zero, not 32. */
9254 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9258 /* Print the symbolic name of a branch target register. */
9259 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9260 && REGNO (x) != COUNT_REGISTER_REGNUM))
9261 output_operand_lossage ("invalid %%T value");
9262 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9263 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9265 fputs ("ctr", file);
9269 /* High-order 16 bits of constant for use in unsigned operand. */
9271 output_operand_lossage ("invalid %%u value");
9273 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9274 (INT_LOWPART (x) >> 16) & 0xffff);
9278 /* High-order 16 bits of constant for use in signed operand. */
9280 output_operand_lossage ("invalid %%v value");
9282 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9283 (INT_LOWPART (x) >> 16) & 0xffff);
9287 /* Print `u' if this has an auto-increment or auto-decrement. */
9288 if (GET_CODE (x) == MEM
9289 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9290 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9295 /* Print the trap code for this operand. */
9296 switch (GET_CODE (x))
9299 fputs ("eq", file); /* 4 */
9302 fputs ("ne", file); /* 24 */
9305 fputs ("lt", file); /* 16 */
9308 fputs ("le", file); /* 20 */
9311 fputs ("gt", file); /* 8 */
9314 fputs ("ge", file); /* 12 */
9317 fputs ("llt", file); /* 2 */
9320 fputs ("lle", file); /* 6 */
9323 fputs ("lgt", file); /* 1 */
9326 fputs ("lge", file); /* 5 */
9334 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9337 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9338 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9340 print_operand (file, x, 0);
9344 /* MB value for a PowerPC64 rldic operand. */
9345 val = (GET_CODE (x) == CONST_INT
9346 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9351 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9352 if ((val <<= 1) < 0)
9355 #if HOST_BITS_PER_WIDE_INT == 32
9356 if (GET_CODE (x) == CONST_INT && i >= 0)
9357 i += 32; /* zero-extend high-part was all 0's */
9358 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9360 val = CONST_DOUBLE_LOW (x);
9367 for ( ; i < 64; i++)
9368 if ((val <<= 1) < 0)
9373 fprintf (file, "%d", i + 1);
9377 if (GET_CODE (x) == MEM
9378 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9383 /* Like 'L', for third word of TImode */
9384 if (GET_CODE (x) == REG)
9385 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9386 else if (GET_CODE (x) == MEM)
9388 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9389 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9390 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9392 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9393 if (small_data_operand (x, GET_MODE (x)))
9394 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9395 reg_names[SMALL_DATA_REG]);
9400 /* X is a SYMBOL_REF. Write out the name preceded by a
9401 period and without any trailing data in brackets. Used for function
9402 names. If we are configured for System V (or the embedded ABI) on
9403 the PowerPC, do not emit the period, since those systems do not use
9404 TOCs and the like. */
9405 if (GET_CODE (x) != SYMBOL_REF)
9408 if (XSTR (x, 0)[0] != '.')
9410 switch (DEFAULT_ABI)
9425 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9427 assemble_name (file, XSTR (x, 0));
9431 /* Like 'L', for last word of TImode. */
9432 if (GET_CODE (x) == REG)
9433 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9434 else if (GET_CODE (x) == MEM)
9436 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9437 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9438 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9440 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9441 if (small_data_operand (x, GET_MODE (x)))
9442 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9443 reg_names[SMALL_DATA_REG]);
9447 /* Print AltiVec or SPE memory operand. */
9452 if (GET_CODE (x) != MEM)
9460 if (GET_CODE (tmp) == REG)
9462 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9465 /* Handle [reg+UIMM]. */
9466 else if (GET_CODE (tmp) == PLUS &&
9467 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9471 if (GET_CODE (XEXP (tmp, 0)) != REG)
9474 x = INTVAL (XEXP (tmp, 1));
9475 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9479 /* Fall through. Must be [reg+reg]. */
9481 if (GET_CODE (tmp) == REG)
9482 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9483 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9485 if (REGNO (XEXP (tmp, 0)) == 0)
9486 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9487 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9489 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9490 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9498 if (GET_CODE (x) == REG)
9499 fprintf (file, "%s", reg_names[REGNO (x)]);
9500 else if (GET_CODE (x) == MEM)
9502 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9503 know the width from the mode. */
9504 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9505 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9506 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9507 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9508 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9509 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9511 output_address (XEXP (x, 0));
9514 output_addr_const (file, x);
9518 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9522 output_operand_lossage ("invalid %%xn code");
9526 /* Print the address of an operand. */
9529 print_operand_address (FILE *file, rtx x)
9531 if (GET_CODE (x) == REG)
9532 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9533 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9534 || GET_CODE (x) == LABEL_REF)
9536 output_addr_const (file, x);
9537 if (small_data_operand (x, GET_MODE (x)))
9538 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9539 reg_names[SMALL_DATA_REG]);
9540 else if (TARGET_TOC)
9543 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9545 if (REGNO (XEXP (x, 0)) == 0)
9546 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9547 reg_names[ REGNO (XEXP (x, 0)) ]);
9549 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9550 reg_names[ REGNO (XEXP (x, 1)) ]);
9552 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9553 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9554 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9556 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9557 && CONSTANT_P (XEXP (x, 1)))
9559 output_addr_const (file, XEXP (x, 1));
9560 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9564 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9565 && CONSTANT_P (XEXP (x, 1)))
9567 fprintf (file, "lo16(");
9568 output_addr_const (file, XEXP (x, 1));
9569 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9572 else if (legitimate_constant_pool_address_p (x))
9574 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9576 rtx contains_minus = XEXP (x, 1);
9580 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9581 turn it into (sym) for output_addr_const. */
9582 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9583 contains_minus = XEXP (contains_minus, 0);
9585 minus = XEXP (contains_minus, 0);
9586 symref = XEXP (minus, 0);
9587 XEXP (contains_minus, 0) = symref;
9592 name = XSTR (symref, 0);
9593 newname = alloca (strlen (name) + sizeof ("@toc"));
9594 strcpy (newname, name);
9595 strcat (newname, "@toc");
9596 XSTR (symref, 0) = newname;
9598 output_addr_const (file, XEXP (x, 1));
9600 XSTR (symref, 0) = name;
9601 XEXP (contains_minus, 0) = minus;
9604 output_addr_const (file, XEXP (x, 1));
9606 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9612 /* Target hook for assembling integer objects. The PowerPC version has
9613 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9614 is defined. It also needs to handle DI-mode objects on 64-bit
9618 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9620 #ifdef RELOCATABLE_NEEDS_FIXUP
9621 /* Special handling for SI values. */
9622 if (size == 4 && aligned_p)
9624 extern int in_toc_section (void);
9625 static int recurse = 0;
9627 /* For -mrelocatable, we mark all addresses that need to be fixed up
9628 in the .fixup section. */
9629 if (TARGET_RELOCATABLE
9630 && !in_toc_section ()
9631 && !in_text_section ()
9633 && GET_CODE (x) != CONST_INT
9634 && GET_CODE (x) != CONST_DOUBLE
9640 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9642 ASM_OUTPUT_LABEL (asm_out_file, buf);
9643 fprintf (asm_out_file, "\t.long\t(");
9644 output_addr_const (asm_out_file, x);
9645 fprintf (asm_out_file, ")@fixup\n");
9646 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9647 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9648 fprintf (asm_out_file, "\t.long\t");
9649 assemble_name (asm_out_file, buf);
9650 fprintf (asm_out_file, "\n\t.previous\n");
9654 /* Remove initial .'s to turn a -mcall-aixdesc function
9655 address into the address of the descriptor, not the function
9657 else if (GET_CODE (x) == SYMBOL_REF
9658 && XSTR (x, 0)[0] == '.'
9659 && DEFAULT_ABI == ABI_AIX)
9661 const char *name = XSTR (x, 0);
9662 while (*name == '.')
9665 fprintf (asm_out_file, "\t.long\t%s\n", name);
9669 #endif /* RELOCATABLE_NEEDS_FIXUP */
9670 return default_assemble_integer (x, size, aligned_p);
9673 #ifdef HAVE_GAS_HIDDEN
9674 /* Emit an assembler directive to set symbol visibility for DECL to
9678 rs6000_assemble_visibility (tree decl, int vis)
9680 /* Functions need to have their entry point symbol visibility set as
9681 well as their descriptor symbol visibility. */
9682 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9684 static const char * const visibility_types[] = {
9685 NULL, "internal", "hidden", "protected"
9688 const char *name, *type;
9690 name = ((* targetm.strip_name_encoding)
9691 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9692 type = visibility_types[vis];
9694 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9695 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9698 default_assemble_visibility (decl, vis);
9703 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9705 /* Reversal of FP compares takes care -- an ordered compare
9706 becomes an unordered compare and vice versa. */
9707 if (mode == CCFPmode
9708 && (!flag_finite_math_only
9709 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9710 || code == UNEQ || code == LTGT))
9711 return reverse_condition_maybe_unordered (code);
9713 return reverse_condition (code);
9716 /* Generate a compare for CODE. Return a brand-new rtx that
9717 represents the result of the compare. */
9720 rs6000_generate_compare (enum rtx_code code)
9722 enum machine_mode comp_mode;
9725 if (rs6000_compare_fp_p)
9726 comp_mode = CCFPmode;
9727 else if (code == GTU || code == LTU
9728 || code == GEU || code == LEU)
9729 comp_mode = CCUNSmode;
9733 /* First, the compare. */
9734 compare_result = gen_reg_rtx (comp_mode);
9736 /* SPE FP compare instructions on the GPRs. Yuck! */
9737 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9738 && rs6000_compare_fp_p)
9740 rtx cmp, or1, or2, or_result, compare_result2;
9748 cmp = flag_finite_math_only
9749 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9751 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9752 rs6000_compare_op1);
9760 cmp = flag_finite_math_only
9761 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9763 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9764 rs6000_compare_op1);
9772 cmp = flag_finite_math_only
9773 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9775 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9776 rs6000_compare_op1);
9782 /* Synthesize LE and GE from LT/GT || EQ. */
9783 if (code == LE || code == GE || code == LEU || code == GEU)
9785 /* Synthesize GE/LE frome GT/LT || EQ. */
9791 case LE: code = LT; break;
9792 case GE: code = GT; break;
9793 case LEU: code = LT; break;
9794 case GEU: code = GT; break;
9798 or1 = gen_reg_rtx (SImode);
9799 or2 = gen_reg_rtx (SImode);
9800 or_result = gen_reg_rtx (CCEQmode);
9801 compare_result2 = gen_reg_rtx (CCFPmode);
9804 cmp = flag_finite_math_only
9805 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9807 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9808 rs6000_compare_op1);
9811 /* The MC8540 FP compare instructions set the CR bits
9812 differently than other PPC compare instructions. For
9813 that matter, there is no generic test instruction, but a
9814 testgt, testlt, and testeq. For a true condition, bit 2
9815 is set (x1xx) in the CR. Following the traditional CR
9821 ... bit 2 would be a GT CR alias, so later on we
9822 look in the GT bits for the branch instructions.
9823 However, we must be careful to emit correct RTL in
9824 the meantime, so optimizations don't get confused. */
9826 or1 = gen_rtx_NE (SImode, compare_result, const0_rtx);
9827 or2 = gen_rtx_NE (SImode, compare_result2, const0_rtx);
9829 /* OR them together. */
9830 cmp = gen_rtx_SET (VOIDmode, or_result,
9831 gen_rtx_COMPARE (CCEQmode,
9832 gen_rtx_IOR (SImode, or1, or2),
9834 compare_result = or_result;
9839 /* We only care about 1 bit (x1xx), so map everything to NE to
9840 maintain rtl sanity. We'll get to the right bit (x1xx) at
9841 code output time. */
9842 if (code == NE || code == LTGT)
9843 /* Do the inverse here because we have no cmpne
9844 instruction. We use the cmpeq instruction and expect
9845 to get a 0 instead. */
9854 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9855 gen_rtx_COMPARE (comp_mode,
9857 rs6000_compare_op1)));
9859 /* Some kinds of FP comparisons need an OR operation;
9860 under flag_finite_math_only we don't bother. */
9861 if (rs6000_compare_fp_p
9862 && ! flag_finite_math_only
9863 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9864 && (code == LE || code == GE
9865 || code == UNEQ || code == LTGT
9866 || code == UNGT || code == UNLT))
9868 enum rtx_code or1, or2;
9869 rtx or1_rtx, or2_rtx, compare2_rtx;
9870 rtx or_result = gen_reg_rtx (CCEQmode);
9874 case LE: or1 = LT; or2 = EQ; break;
9875 case GE: or1 = GT; or2 = EQ; break;
9876 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9877 case LTGT: or1 = LT; or2 = GT; break;
9878 case UNGT: or1 = UNORDERED; or2 = GT; break;
9879 case UNLT: or1 = UNORDERED; or2 = LT; break;
9882 validate_condition_mode (or1, comp_mode);
9883 validate_condition_mode (or2, comp_mode);
9884 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
9885 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
9886 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9887 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9889 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9891 compare_result = or_result;
9895 validate_condition_mode (code, GET_MODE (compare_result));
9897 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
9901 /* Emit the RTL for an sCOND pattern. */
9904 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9907 enum machine_mode op_mode;
9908 enum rtx_code cond_code;
9910 condition_rtx = rs6000_generate_compare (code);
9911 cond_code = GET_CODE (condition_rtx);
9914 || cond_code == GE || cond_code == LE
9915 || cond_code == GEU || cond_code == LEU
9916 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9918 rtx not_result = gen_reg_rtx (CCEQmode);
9919 rtx not_op, rev_cond_rtx;
9920 enum machine_mode cc_mode;
9922 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9924 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
9925 SImode, XEXP (condition_rtx, 0), const0_rtx);
9926 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9927 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9928 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9931 op_mode = GET_MODE (rs6000_compare_op0);
9932 if (op_mode == VOIDmode)
9933 op_mode = GET_MODE (rs6000_compare_op1);
9935 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9937 PUT_MODE (condition_rtx, DImode);
9938 convert_move (result, condition_rtx, 0);
9942 PUT_MODE (condition_rtx, SImode);
9943 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9947 /* Emit a branch of kind CODE to location LOC. */
9950 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9952 rtx condition_rtx, loc_ref;
9954 condition_rtx = rs6000_generate_compare (code);
9955 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9956 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9957 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9961 /* Return the string to output a conditional branch to LABEL, which is
9962 the operand number of the label, or -1 if the branch is really a
9965 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9966 condition code register and its mode specifies what kind of
9969 REVERSED is nonzero if we should reverse the sense of the comparison.
9971 INSN is the insn. */
9974 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9976 static char string[64];
9977 enum rtx_code code = GET_CODE (op);
9978 rtx cc_reg = XEXP (op, 0);
9979 enum machine_mode mode = GET_MODE (cc_reg);
9980 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9981 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9982 int really_reversed = reversed ^ need_longbranch;
9988 validate_condition_mode (code, mode);
9990 /* Work out which way this really branches. We could use
9991 reverse_condition_maybe_unordered here always but this
9992 makes the resulting assembler clearer. */
9993 if (really_reversed)
9995 /* Reversal of FP compares takes care -- an ordered compare
9996 becomes an unordered compare and vice versa. */
9997 if (mode == CCFPmode)
9998 code = reverse_condition_maybe_unordered (code);
10000 code = reverse_condition (code);
10003 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10005 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10008 /* Opposite of GT. */
10010 else if (code == NE)
10018 /* Not all of these are actually distinct opcodes, but
10019 we distinguish them for clarity of the resulting assembler. */
10020 case NE: case LTGT:
10021 ccode = "ne"; break;
10022 case EQ: case UNEQ:
10023 ccode = "eq"; break;
10025 ccode = "ge"; break;
10026 case GT: case GTU: case UNGT:
10027 ccode = "gt"; break;
10029 ccode = "le"; break;
10030 case LT: case LTU: case UNLT:
10031 ccode = "lt"; break;
10032 case UNORDERED: ccode = "un"; break;
10033 case ORDERED: ccode = "nu"; break;
10034 case UNGE: ccode = "nl"; break;
10035 case UNLE: ccode = "ng"; break;
10040 /* Maybe we have a guess as to how likely the branch is.
10041 The old mnemonics don't have a way to specify this information. */
10043 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10044 if (note != NULL_RTX)
10046 /* PROB is the difference from 50%. */
10047 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10048 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
10050 /* Only hint for highly probable/improbable branches on newer
10051 cpus as static prediction overrides processor dynamic
10052 prediction. For older cpus we may as well always hint, but
10053 assume not taken for branches that are very close to 50% as a
10054 mispredicted taken branch is more expensive than a
10055 mispredicted not-taken branch. */
10057 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10059 if (abs (prob) > REG_BR_PROB_BASE / 20
10060 && ((prob > 0) ^ need_longbranch))
10068 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10070 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10072 /* We need to escape any '%' characters in the reg_names string.
10073 Assume they'd only be the first character.... */
10074 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10076 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10080 /* If the branch distance was too far, we may have to use an
10081 unconditional branch to go the distance. */
10082 if (need_longbranch)
10083 s += sprintf (s, ",$+8\n\tb %s", label);
10085 s += sprintf (s, ",%s", label);
10091 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
10092 operands of the last comparison is nonzero/true, FALSE_COND if it
10093 is zero/false. Return 0 if the hardware has no such operation. */
10096 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10098 enum rtx_code code = GET_CODE (op);
10099 rtx op0 = rs6000_compare_op0;
10100 rtx op1 = rs6000_compare_op1;
10101 REAL_VALUE_TYPE c1;
10102 enum machine_mode compare_mode = GET_MODE (op0);
10103 enum machine_mode result_mode = GET_MODE (dest);
10106 /* These modes should always match. */
10107 if (GET_MODE (op1) != compare_mode
10108 /* In the isel case however, we can use a compare immediate, so
10109 op1 may be a small constant. */
10110 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10112 if (GET_MODE (true_cond) != result_mode)
10114 if (GET_MODE (false_cond) != result_mode)
10117 /* First, work out if the hardware can do this at all, or
10118 if it's too slow.... */
10119 if (! rs6000_compare_fp_p)
10122 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10126 /* Eliminate half of the comparisons by switching operands, this
10127 makes the remaining code simpler. */
10128 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10129 || code == LTGT || code == LT || code == UNLE)
10131 code = reverse_condition_maybe_unordered (code);
10133 true_cond = false_cond;
10137 /* UNEQ and LTGT take four instructions for a comparison with zero,
10138 it'll probably be faster to use a branch here too. */
10139 if (code == UNEQ && HONOR_NANS (compare_mode))
10142 if (GET_CODE (op1) == CONST_DOUBLE)
10143 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10145 /* We're going to try to implement comparisons by performing
10146 a subtract, then comparing against zero. Unfortunately,
10147 Inf - Inf is NaN which is not zero, and so if we don't
10148 know that the operand is finite and the comparison
10149 would treat EQ different to UNORDERED, we can't do it. */
10150 if (HONOR_INFINITIES (compare_mode)
10151 && code != GT && code != UNGE
10152 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10153 /* Constructs of the form (a OP b ? a : b) are safe. */
10154 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10155 || (! rtx_equal_p (op0, true_cond)
10156 && ! rtx_equal_p (op1, true_cond))))
10158 /* At this point we know we can use fsel. */
10160 /* Reduce the comparison to a comparison against zero. */
10161 temp = gen_reg_rtx (compare_mode);
10162 emit_insn (gen_rtx_SET (VOIDmode, temp,
10163 gen_rtx_MINUS (compare_mode, op0, op1)));
10165 op1 = CONST0_RTX (compare_mode);
10167 /* If we don't care about NaNs we can reduce some of the comparisons
10168 down to faster ones. */
10169 if (! HONOR_NANS (compare_mode))
10175 true_cond = false_cond;
10188 /* Now, reduce everything down to a GE. */
10195 temp = gen_reg_rtx (compare_mode);
10196 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10201 temp = gen_reg_rtx (compare_mode);
10202 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10207 temp = gen_reg_rtx (compare_mode);
10208 emit_insn (gen_rtx_SET (VOIDmode, temp,
10209 gen_rtx_NEG (compare_mode,
10210 gen_rtx_ABS (compare_mode, op0))));
10215 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10216 temp = gen_reg_rtx (result_mode);
10217 emit_insn (gen_rtx_SET (VOIDmode, temp,
10218 gen_rtx_IF_THEN_ELSE (result_mode,
10219 gen_rtx_GE (VOIDmode,
10221 true_cond, false_cond)));
10222 false_cond = true_cond;
10225 temp = gen_reg_rtx (compare_mode);
10226 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10231 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10232 temp = gen_reg_rtx (result_mode);
10233 emit_insn (gen_rtx_SET (VOIDmode, temp,
10234 gen_rtx_IF_THEN_ELSE (result_mode,
10235 gen_rtx_GE (VOIDmode,
10237 true_cond, false_cond)));
10238 true_cond = false_cond;
10241 temp = gen_reg_rtx (compare_mode);
10242 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10250 emit_insn (gen_rtx_SET (VOIDmode, dest,
10251 gen_rtx_IF_THEN_ELSE (result_mode,
10252 gen_rtx_GE (VOIDmode,
10254 true_cond, false_cond)));
10258 /* Same as above, but for ints (isel). */
10261 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10263 rtx condition_rtx, cr;
10265 /* All isel implementations thus far are 32-bits. */
10266 if (GET_MODE (rs6000_compare_op0) != SImode)
10269 /* We still have to do the compare, because isel doesn't do a
10270 compare, it just looks at the CRx bits set by a previous compare
10272 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10273 cr = XEXP (condition_rtx, 0);
10275 if (GET_MODE (cr) == CCmode)
10276 emit_insn (gen_isel_signed (dest, condition_rtx,
10277 true_cond, false_cond, cr));
10279 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10280 true_cond, false_cond, cr));
10286 output_isel (rtx *operands)
10288 enum rtx_code code;
10290 code = GET_CODE (operands[1]);
10291 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10293 PUT_CODE (operands[1], reverse_condition (code));
10294 return "isel %0,%3,%2,%j1";
10297 return "isel %0,%2,%3,%j1";
10301 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10303 enum machine_mode mode = GET_MODE (op0);
10307 if (code == SMAX || code == SMIN)
10312 if (code == SMAX || code == UMAX)
10313 target = emit_conditional_move (dest, c, op0, op1, mode,
10314 op0, op1, mode, 0);
10316 target = emit_conditional_move (dest, c, op0, op1, mode,
10317 op1, op0, mode, 0);
10318 if (target == NULL_RTX)
10320 if (target != dest)
10321 emit_move_insn (dest, target);
10324 /* Emit instructions to move SRC to DST. Called by splitters for
10325 multi-register moves. It will emit at most one instruction for
10326 each register that is accessed; that is, it won't emit li/lis pairs
10327 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10331 rs6000_split_multireg_move (rtx dst, rtx src)
10333 /* The register number of the first register being moved. */
10335 /* The mode that is to be moved. */
10336 enum machine_mode mode;
10337 /* The mode that the move is being done in, and its size. */
10338 enum machine_mode reg_mode;
10340 /* The number of registers that will be moved. */
10343 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10344 mode = GET_MODE (dst);
10345 nregs = HARD_REGNO_NREGS (reg, mode);
10346 if (FP_REGNO_P (reg))
10348 else if (ALTIVEC_REGNO_P (reg))
10349 reg_mode = V16QImode;
10351 reg_mode = word_mode;
10352 reg_mode_size = GET_MODE_SIZE (reg_mode);
10354 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10357 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10359 /* Move register range backwards, if we might have destructive
10362 for (i = nregs - 1; i >= 0; i--)
10363 emit_insn (gen_rtx_SET (VOIDmode,
10364 simplify_gen_subreg (reg_mode, dst, mode,
10365 i * reg_mode_size),
10366 simplify_gen_subreg (reg_mode, src, mode,
10367 i * reg_mode_size)));
10373 bool used_update = false;
10375 if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10379 if (GET_CODE (XEXP (src, 0)) == PRE_INC
10380 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10383 breg = XEXP (XEXP (src, 0), 0);
10384 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
10385 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10386 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10387 emit_insn (TARGET_32BIT
10388 ? gen_addsi3 (breg, breg, delta_rtx)
10389 : gen_adddi3 (breg, breg, delta_rtx));
10390 src = gen_rtx_MEM (mode, breg);
10393 /* We have now address involving an base register only.
10394 If we use one of the registers to address memory,
10395 we have change that register last. */
10397 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10398 ? XEXP (XEXP (src, 0), 0)
10404 if (REGNO (breg) >= REGNO (dst)
10405 && REGNO (breg) < REGNO (dst) + nregs)
10406 j = REGNO (breg) - REGNO (dst);
10409 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10413 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10414 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10417 breg = XEXP (XEXP (dst, 0), 0);
10418 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10419 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10420 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10422 /* We have to update the breg before doing the store.
10423 Use store with update, if available. */
10427 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10428 emit_insn (TARGET_32BIT
10429 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10430 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10431 used_update = true;
10434 emit_insn (TARGET_32BIT
10435 ? gen_addsi3 (breg, breg, delta_rtx)
10436 : gen_adddi3 (breg, breg, delta_rtx));
10437 dst = gen_rtx_MEM (mode, breg);
10441 for (i = 0; i < nregs; i++)
10443 /* Calculate index to next subword. */
10448 /* If compiler already emited move of first word by
10449 store with update, no need to do anything. */
10450 if (j == 0 && used_update)
10453 emit_insn (gen_rtx_SET (VOIDmode,
10454 simplify_gen_subreg (reg_mode, dst, mode,
10455 j * reg_mode_size),
10456 simplify_gen_subreg (reg_mode, src, mode,
10457 j * reg_mode_size)));
10463 /* This page contains routines that are used to determine what the
10464 function prologue and epilogue code will do and write them out. */
10466 /* Return the first fixed-point register that is required to be
10467 saved. 32 if none. */
10470 first_reg_to_save (void)
10474 /* Find lowest numbered live register. */
10475 for (first_reg = 13; first_reg <= 31; first_reg++)
10476 if (regs_ever_live[first_reg]
10477 && (! call_used_regs[first_reg]
10478 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10479 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10480 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10485 && current_function_uses_pic_offset_table
10486 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10487 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10493 /* Similar, for FP regs. */
10496 first_fp_reg_to_save (void)
10500 /* Find lowest numbered live register. */
10501 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10502 if (regs_ever_live[first_reg])
10508 /* Similar, for AltiVec regs. */
10511 first_altivec_reg_to_save (void)
10515 /* Stack frame remains as is unless we are in AltiVec ABI. */
10516 if (! TARGET_ALTIVEC_ABI)
10517 return LAST_ALTIVEC_REGNO + 1;
10519 /* Find lowest numbered live register. */
10520 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10521 if (regs_ever_live[i])
10527 /* Return a 32-bit mask of the AltiVec registers we need to set in
10528 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10529 the 32-bit word is 0. */
10531 static unsigned int
10532 compute_vrsave_mask (void)
10534 unsigned int i, mask = 0;
10536 /* First, find out if we use _any_ altivec registers. */
10537 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10538 if (regs_ever_live[i])
10539 mask |= ALTIVEC_REG_BIT (i);
10544 /* Next, remove the argument registers from the set. These must
10545 be in the VRSAVE mask set by the caller, so we don't need to add
10546 them in again. More importantly, the mask we compute here is
10547 used to generate CLOBBERs in the set_vrsave insn, and we do not
10548 wish the argument registers to die. */
10549 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10550 mask &= ~ALTIVEC_REG_BIT (i);
10552 /* Similarly, remove the return value from the set. */
10555 diddle_return_value (is_altivec_return_reg, &yes);
10557 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10564 is_altivec_return_reg (rtx reg, void *xyes)
10566 bool *yes = (bool *) xyes;
10567 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10572 /* Calculate the stack information for the current function. This is
10573 complicated by having two separate calling sequences, the AIX calling
10574 sequence and the V.4 calling sequence.
10576 AIX (and Darwin/Mac OS X) stack frames look like:
10578 SP----> +---------------------------------------+
10579 | back chain to caller | 0 0
10580 +---------------------------------------+
10581 | saved CR | 4 8 (8-11)
10582 +---------------------------------------+
10584 +---------------------------------------+
10585 | reserved for compilers | 12 24
10586 +---------------------------------------+
10587 | reserved for binders | 16 32
10588 +---------------------------------------+
10589 | saved TOC pointer | 20 40
10590 +---------------------------------------+
10591 | Parameter save area (P) | 24 48
10592 +---------------------------------------+
10593 | Alloca space (A) | 24+P etc.
10594 +---------------------------------------+
10595 | Local variable space (L) | 24+P+A
10596 +---------------------------------------+
10597 | Float/int conversion temporary (X) | 24+P+A+L
10598 +---------------------------------------+
10599 | Save area for AltiVec registers (W) | 24+P+A+L+X
10600 +---------------------------------------+
10601 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10602 +---------------------------------------+
10603 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10604 +---------------------------------------+
10605 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10606 +---------------------------------------+
10607 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10608 +---------------------------------------+
10609 old SP->| back chain to caller's caller |
10610 +---------------------------------------+
10612 The required alignment for AIX configurations is two words (i.e., 8
10616 V.4 stack frames look like:
10618 SP----> +---------------------------------------+
10619 | back chain to caller | 0
10620 +---------------------------------------+
10621 | caller's saved LR | 4
10622 +---------------------------------------+
10623 | Parameter save area (P) | 8
10624 +---------------------------------------+
10625 | Alloca space (A) | 8+P
10626 +---------------------------------------+
10627 | Varargs save area (V) | 8+P+A
10628 +---------------------------------------+
10629 | Local variable space (L) | 8+P+A+V
10630 +---------------------------------------+
10631 | Float/int conversion temporary (X) | 8+P+A+V+L
10632 +---------------------------------------+
10633 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10634 +---------------------------------------+
10635 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10636 +---------------------------------------+
10637 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10638 +---------------------------------------+
10639 | SPE: area for 64-bit GP registers |
10640 +---------------------------------------+
10641 | SPE alignment padding |
10642 +---------------------------------------+
10643 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10644 +---------------------------------------+
10645 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10646 +---------------------------------------+
10647 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10648 +---------------------------------------+
10649 old SP->| back chain to caller's caller |
10650 +---------------------------------------+
10652 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10653 given. (But note below and in sysv4.h that we require only 8 and
10654 may round up the size of our stack frame anyways. The historical
10655 reason is early versions of powerpc-linux which didn't properly
10656 align the stack at program startup. A happy side-effect is that
10657 -mno-eabi libraries can be used with -meabi programs.)
10659 The EABI configuration defaults to the V.4 layout. However,
10660 the stack alignment requirements may differ. If -mno-eabi is not
10661 given, the required stack alignment is 8 bytes; if -mno-eabi is
10662 given, the required alignment is 16 bytes. (But see V.4 comment
10665 #ifndef ABI_STACK_BOUNDARY
10666 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10669 static rs6000_stack_t *
10670 rs6000_stack_info (void)
10672 static rs6000_stack_t info, zero_info;
10673 rs6000_stack_t *info_ptr = &info;
10674 int reg_size = TARGET_32BIT ? 4 : 8;
10676 HOST_WIDE_INT total_raw_size;
10678 /* Zero all fields portably. */
10683 /* Cache value so we don't rescan instruction chain over and over. */
10684 if (cfun->machine->insn_chain_scanned_p == 0)
10686 cfun->machine->insn_chain_scanned_p = 1;
10687 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10691 /* Select which calling sequence. */
10692 info_ptr->abi = DEFAULT_ABI;
10694 /* Calculate which registers need to be saved & save area size. */
10695 info_ptr->first_gp_reg_save = first_reg_to_save ();
10696 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10697 even if it currently looks like we won't. */
10698 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10699 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10700 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10701 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10702 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10704 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10706 /* For the SPE, we have an additional upper 32-bits on each GPR.
10707 Ideally we should save the entire 64-bits only when the upper
10708 half is used in SIMD instructions. Since we only record
10709 registers live (not the size they are used in), this proves
10710 difficult because we'd have to traverse the instruction chain at
10711 the right time, taking reload into account. This is a real pain,
10712 so we opt to save the GPRs in 64-bits always if but one register
10713 gets used in 64-bits. Otherwise, all the registers in the frame
10714 get saved in 32-bits.
10716 So... since when we save all GPRs (except the SP) in 64-bits, the
10717 traditional GP save area will be empty. */
10718 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10719 info_ptr->gp_size = 0;
10721 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10722 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10724 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10725 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10726 - info_ptr->first_altivec_reg_save);
10728 /* Does this function call anything? */
10729 info_ptr->calls_p = (! current_function_is_leaf
10730 || cfun->machine->ra_needs_full_frame);
10732 /* Determine if we need to save the link register. */
10733 if (rs6000_ra_ever_killed ()
10734 || (DEFAULT_ABI == ABI_AIX
10735 && current_function_profile
10736 && !TARGET_PROFILE_KERNEL)
10737 #ifdef TARGET_RELOCATABLE
10738 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10740 || (info_ptr->first_fp_reg_save != 64
10741 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10742 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10743 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10744 || (DEFAULT_ABI == ABI_DARWIN
10746 && current_function_uses_pic_offset_table)
10747 || info_ptr->calls_p)
10749 info_ptr->lr_save_p = 1;
10750 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10753 /* Determine if we need to save the condition code registers. */
10754 if (regs_ever_live[CR2_REGNO]
10755 || regs_ever_live[CR3_REGNO]
10756 || regs_ever_live[CR4_REGNO])
10758 info_ptr->cr_save_p = 1;
10759 if (DEFAULT_ABI == ABI_V4)
10760 info_ptr->cr_size = reg_size;
10763 /* If the current function calls __builtin_eh_return, then we need
10764 to allocate stack space for registers that will hold data for
10765 the exception handler. */
10766 if (current_function_calls_eh_return)
10769 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10772 /* SPE saves EH registers in 64-bits. */
10773 ehrd_size = i * (TARGET_SPE_ABI
10774 && info_ptr->spe_64bit_regs_used != 0
10775 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10780 /* Determine various sizes. */
10781 info_ptr->reg_size = reg_size;
10782 info_ptr->fixed_size = RS6000_SAVE_AREA;
10783 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10784 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10785 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10788 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10789 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10791 info_ptr->spe_gp_size = 0;
10793 if (TARGET_ALTIVEC_ABI)
10794 info_ptr->vrsave_mask = compute_vrsave_mask ();
10796 info_ptr->vrsave_mask = 0;
10798 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10799 info_ptr->vrsave_size = 4;
10801 info_ptr->vrsave_size = 0;
10803 /* Calculate the offsets. */
10804 switch (DEFAULT_ABI)
10812 info_ptr->fp_save_offset = - info_ptr->fp_size;
10813 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10815 if (TARGET_ALTIVEC_ABI)
10817 info_ptr->vrsave_save_offset
10818 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10820 /* Align stack so vector save area is on a quadword boundary. */
10821 if (info_ptr->altivec_size != 0)
10822 info_ptr->altivec_padding_size
10823 = 16 - (-info_ptr->vrsave_save_offset % 16);
10825 info_ptr->altivec_padding_size = 0;
10827 info_ptr->altivec_save_offset
10828 = info_ptr->vrsave_save_offset
10829 - info_ptr->altivec_padding_size
10830 - info_ptr->altivec_size;
10832 /* Adjust for AltiVec case. */
10833 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10836 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10837 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10838 info_ptr->lr_save_offset = 2*reg_size;
10842 info_ptr->fp_save_offset = - info_ptr->fp_size;
10843 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10844 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10846 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10848 /* Align stack so SPE GPR save area is aligned on a
10849 double-word boundary. */
10850 if (info_ptr->spe_gp_size != 0)
10851 info_ptr->spe_padding_size
10852 = 8 - (-info_ptr->cr_save_offset % 8);
10854 info_ptr->spe_padding_size = 0;
10856 info_ptr->spe_gp_save_offset
10857 = info_ptr->cr_save_offset
10858 - info_ptr->spe_padding_size
10859 - info_ptr->spe_gp_size;
10861 /* Adjust for SPE case. */
10862 info_ptr->toc_save_offset
10863 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10865 else if (TARGET_ALTIVEC_ABI)
10867 info_ptr->vrsave_save_offset
10868 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10870 /* Align stack so vector save area is on a quadword boundary. */
10871 if (info_ptr->altivec_size != 0)
10872 info_ptr->altivec_padding_size
10873 = 16 - (-info_ptr->vrsave_save_offset % 16);
10875 info_ptr->altivec_padding_size = 0;
10877 info_ptr->altivec_save_offset
10878 = info_ptr->vrsave_save_offset
10879 - info_ptr->altivec_padding_size
10880 - info_ptr->altivec_size;
10882 /* Adjust for AltiVec case. */
10883 info_ptr->toc_save_offset
10884 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10887 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10888 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10889 info_ptr->lr_save_offset = reg_size;
10893 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10894 + info_ptr->gp_size
10895 + info_ptr->altivec_size
10896 + info_ptr->altivec_padding_size
10897 + info_ptr->spe_gp_size
10898 + info_ptr->spe_padding_size
10900 + info_ptr->cr_size
10901 + info_ptr->lr_size
10902 + info_ptr->vrsave_size
10903 + info_ptr->toc_size,
10904 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10907 total_raw_size = (info_ptr->vars_size
10908 + info_ptr->parm_size
10909 + info_ptr->save_size
10910 + info_ptr->varargs_size
10911 + info_ptr->fixed_size);
10913 info_ptr->total_size =
10914 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10916 /* Determine if we need to allocate any stack frame:
10918 For AIX we need to push the stack if a frame pointer is needed
10919 (because the stack might be dynamically adjusted), if we are
10920 debugging, if we make calls, or if the sum of fp_save, gp_save,
10921 and local variables are more than the space needed to save all
10922 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10923 + 18*8 = 288 (GPR13 reserved).
10925 For V.4 we don't have the stack cushion that AIX uses, but assume
10926 that the debugger can handle stackless frames. */
10928 if (info_ptr->calls_p)
10929 info_ptr->push_p = 1;
10931 else if (DEFAULT_ABI == ABI_V4)
10932 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10934 else if (frame_pointer_needed)
10935 info_ptr->push_p = 1;
10937 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10938 info_ptr->push_p = 1;
10942 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10944 /* Zero offsets if we're not saving those registers. */
10945 if (info_ptr->fp_size == 0)
10946 info_ptr->fp_save_offset = 0;
10948 if (info_ptr->gp_size == 0)
10949 info_ptr->gp_save_offset = 0;
10951 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10952 info_ptr->altivec_save_offset = 0;
10954 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10955 info_ptr->vrsave_save_offset = 0;
10957 if (! TARGET_SPE_ABI
10958 || info_ptr->spe_64bit_regs_used == 0
10959 || info_ptr->spe_gp_size == 0)
10960 info_ptr->spe_gp_save_offset = 0;
10962 if (! info_ptr->lr_save_p)
10963 info_ptr->lr_save_offset = 0;
10965 if (! info_ptr->cr_save_p)
10966 info_ptr->cr_save_offset = 0;
10968 if (! info_ptr->toc_save_p)
10969 info_ptr->toc_save_offset = 0;
10974 /* Return true if the current function uses any GPRs in 64-bit SIMD
10978 spe_func_has_64bit_regs_p (void)
10982 /* Functions that save and restore all the call-saved registers will
10983 need to save/restore the registers in 64-bits. */
10984 if (current_function_calls_eh_return
10985 || current_function_calls_setjmp
10986 || current_function_has_nonlocal_goto)
10989 insns = get_insns ();
10991 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10997 i = PATTERN (insn);
10998 if (GET_CODE (i) == SET
10999 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11008 debug_stack_info (rs6000_stack_t *info)
11010 const char *abi_string;
11013 info = rs6000_stack_info ();
11015 fprintf (stderr, "\nStack information for function %s:\n",
11016 ((current_function_decl && DECL_NAME (current_function_decl))
11017 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11022 default: abi_string = "Unknown"; break;
11023 case ABI_NONE: abi_string = "NONE"; break;
11024 case ABI_AIX: abi_string = "AIX"; break;
11025 case ABI_DARWIN: abi_string = "Darwin"; break;
11026 case ABI_V4: abi_string = "V.4"; break;
11029 fprintf (stderr, "\tABI = %5s\n", abi_string);
11031 if (TARGET_ALTIVEC_ABI)
11032 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11034 if (TARGET_SPE_ABI)
11035 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11037 if (info->first_gp_reg_save != 32)
11038 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
11040 if (info->first_fp_reg_save != 64)
11041 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
11043 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11044 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11045 info->first_altivec_reg_save);
11047 if (info->lr_save_p)
11048 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
11050 if (info->cr_save_p)
11051 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
11053 if (info->toc_save_p)
11054 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
11056 if (info->vrsave_mask)
11057 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
11060 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
11063 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
11065 if (info->gp_save_offset)
11066 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
11068 if (info->fp_save_offset)
11069 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
11071 if (info->altivec_save_offset)
11072 fprintf (stderr, "\taltivec_save_offset = %5d\n",
11073 info->altivec_save_offset);
11075 if (info->spe_gp_save_offset)
11076 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
11077 info->spe_gp_save_offset);
11079 if (info->vrsave_save_offset)
11080 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
11081 info->vrsave_save_offset);
11083 if (info->lr_save_offset)
11084 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
11086 if (info->cr_save_offset)
11087 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
11089 if (info->toc_save_offset)
11090 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
11092 if (info->varargs_save_offset)
11093 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11095 if (info->total_size)
11096 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11099 if (info->varargs_size)
11100 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
11102 if (info->vars_size)
11103 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11106 if (info->parm_size)
11107 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
11109 if (info->fixed_size)
11110 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
11113 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
11115 if (info->spe_gp_size)
11116 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
11119 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
11121 if (info->altivec_size)
11122 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
11124 if (info->vrsave_size)
11125 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
11127 if (info->altivec_padding_size)
11128 fprintf (stderr, "\taltivec_padding_size= %5d\n",
11129 info->altivec_padding_size);
11131 if (info->spe_padding_size)
11132 fprintf (stderr, "\tspe_padding_size = %5d\n",
11133 info->spe_padding_size);
11136 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
11139 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
11141 if (info->toc_size)
11142 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
11144 if (info->save_size)
11145 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
11147 if (info->reg_size != 4)
11148 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
11150 fprintf (stderr, "\n");
11154 rs6000_return_addr (int count, rtx frame)
11156 /* Currently we don't optimize very well between prolog and body
11157 code and for PIC code the code can be actually quite bad, so
11158 don't try to be too clever here. */
11159 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11161 cfun->machine->ra_needs_full_frame = 1;
11168 plus_constant (copy_to_reg
11169 (gen_rtx_MEM (Pmode,
11170 memory_address (Pmode, frame))),
11171 RETURN_ADDRESS_OFFSET)));
11174 cfun->machine->ra_need_lr = 1;
11175 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11178 /* Say whether a function is a candidate for sibcall handling or not.
11179 We do not allow indirect calls to be optimized into sibling calls.
11180 Also, we can't do it if there are any vector parameters; there's
11181 nowhere to put the VRsave code so it works; note that functions with
11182 vector parameters are required to have a prototype, so the argument
11183 type info must be available here. (The tail recursion case can work
11184 with vector parameters, but there's no way to distinguish here.) */
11186 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11191 if (TARGET_ALTIVEC_VRSAVE)
11193 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11194 type; type = TREE_CHAIN (type))
11196 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11200 if (DEFAULT_ABI == ABI_DARWIN
11201 || (*targetm.binds_local_p) (decl))
11203 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11205 if (!lookup_attribute ("longcall", attr_list)
11206 || lookup_attribute ("shortcall", attr_list))
11214 rs6000_ra_ever_killed (void)
11220 if (current_function_is_thunk)
11223 /* regs_ever_live has LR marked as used if any sibcalls are present,
11224 but this should not force saving and restoring in the
11225 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11226 clobbers LR, so that is inappropriate. */
11228 /* Also, the prologue can generate a store into LR that
11229 doesn't really count, like this:
11232 bcl to set PIC register
11236 When we're called from the epilogue, we need to avoid counting
11237 this as a store. */
11239 push_topmost_sequence ();
11240 top = get_insns ();
11241 pop_topmost_sequence ();
11242 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11244 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11248 if (FIND_REG_INC_NOTE (insn, reg))
11250 else if (GET_CODE (insn) == CALL_INSN
11251 && !SIBLING_CALL_P (insn))
11253 else if (set_of (reg, insn) != NULL_RTX
11254 && !prologue_epilogue_contains (insn))
11261 /* Add a REG_MAYBE_DEAD note to the insn. */
11263 rs6000_maybe_dead (rtx insn)
11265 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11270 /* Emit instructions needed to load the TOC register.
11271 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11272 a constant pool; or for SVR4 -fpic. */
11275 rs6000_emit_load_toc_table (int fromprolog)
11278 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11280 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11282 rtx temp = (fromprolog
11283 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11284 : gen_reg_rtx (Pmode));
11285 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11287 rs6000_maybe_dead (insn);
11288 insn = emit_move_insn (dest, temp);
11290 rs6000_maybe_dead (insn);
11292 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11295 rtx tempLR = (fromprolog
11296 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11297 : gen_reg_rtx (Pmode));
11298 rtx temp0 = (fromprolog
11299 ? gen_rtx_REG (Pmode, 0)
11300 : gen_reg_rtx (Pmode));
11303 /* possibly create the toc section */
11304 if (! toc_initialized)
11307 function_section (current_function_decl);
11314 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11315 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11317 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11318 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11320 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11322 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11323 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11330 static int reload_toc_labelno = 0;
11332 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11334 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
11335 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11337 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
11338 emit_move_insn (dest, tempLR);
11339 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11341 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11343 rs6000_maybe_dead (insn);
11345 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11347 /* This is for AIX code running in non-PIC ELF32. */
11350 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11351 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11353 insn = emit_insn (gen_elf_high (dest, realsym));
11355 rs6000_maybe_dead (insn);
11356 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11358 rs6000_maybe_dead (insn);
11360 else if (DEFAULT_ABI == ABI_AIX)
11363 insn = emit_insn (gen_load_toc_aix_si (dest));
11365 insn = emit_insn (gen_load_toc_aix_di (dest));
11367 rs6000_maybe_dead (insn);
11373 /* Emit instructions to restore the link register after determining where
11374 its value has been stored. */
11377 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11379 rs6000_stack_t *info = rs6000_stack_info ();
11382 operands[0] = source;
11383 operands[1] = scratch;
11385 if (info->lr_save_p)
11387 rtx frame_rtx = stack_pointer_rtx;
11388 HOST_WIDE_INT sp_offset = 0;
11391 if (frame_pointer_needed
11392 || current_function_calls_alloca
11393 || info->total_size > 32767)
11395 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11396 frame_rtx = operands[1];
11398 else if (info->push_p)
11399 sp_offset = info->total_size;
11401 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11402 tmp = gen_rtx_MEM (Pmode, tmp);
11403 emit_move_insn (tmp, operands[0]);
11406 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11409 static GTY(()) int set = -1;
11412 get_TOC_alias_set (void)
11415 set = new_alias_set ();
11419 /* This returns nonzero if the current function uses the TOC. This is
11420 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11421 is generated by the ABI_V4 load_toc_* patterns. */
11428 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11431 rtx pat = PATTERN (insn);
11434 if (GET_CODE (pat) == PARALLEL)
11435 for (i = 0; i < XVECLEN (pat, 0); i++)
11437 rtx sub = XVECEXP (pat, 0, i);
11438 if (GET_CODE (sub) == USE)
11440 sub = XEXP (sub, 0);
11441 if (GET_CODE (sub) == UNSPEC
11442 && XINT (sub, 1) == UNSPEC_TOC)
11452 create_TOC_reference (rtx symbol)
11454 return gen_rtx_PLUS (Pmode,
11455 gen_rtx_REG (Pmode, TOC_REGISTER),
11456 gen_rtx_CONST (Pmode,
11457 gen_rtx_MINUS (Pmode, symbol,
11458 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11461 /* If _Unwind_* has been called from within the same module,
11462 toc register is not guaranteed to be saved to 40(1) on function
11463 entry. Save it there in that case. */
11466 rs6000_aix_emit_builtin_unwind_init (void)
11469 rtx stack_top = gen_reg_rtx (Pmode);
11470 rtx opcode_addr = gen_reg_rtx (Pmode);
11471 rtx opcode = gen_reg_rtx (SImode);
11472 rtx tocompare = gen_reg_rtx (SImode);
11473 rtx no_toc_save_needed = gen_label_rtx ();
11475 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11476 emit_move_insn (stack_top, mem);
11478 mem = gen_rtx_MEM (Pmode,
11479 gen_rtx_PLUS (Pmode, stack_top,
11480 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11481 emit_move_insn (opcode_addr, mem);
11482 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11483 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11484 : 0xE8410028, SImode));
11486 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11487 SImode, NULL_RTX, NULL_RTX,
11488 no_toc_save_needed);
11490 mem = gen_rtx_MEM (Pmode,
11491 gen_rtx_PLUS (Pmode, stack_top,
11492 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11493 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11494 emit_label (no_toc_save_needed);
11497 /* This ties together stack memory (MEM with an alias set of
11498 rs6000_sr_alias_set) and the change to the stack pointer. */
11501 rs6000_emit_stack_tie (void)
11503 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11505 set_mem_alias_set (mem, rs6000_sr_alias_set);
11506 emit_insn (gen_stack_tie (mem));
11509 /* Emit the correct code for allocating stack space, as insns.
11510 If COPY_R12, make sure a copy of the old frame is left in r12.
11511 The generated code may use hard register 0 as a temporary. */
11514 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11517 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11518 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11519 rtx todec = GEN_INT (-size);
11521 if (current_function_limit_stack)
11523 if (REG_P (stack_limit_rtx)
11524 && REGNO (stack_limit_rtx) > 1
11525 && REGNO (stack_limit_rtx) <= 31)
11527 emit_insn (TARGET_32BIT
11528 ? gen_addsi3 (tmp_reg,
11531 : gen_adddi3 (tmp_reg,
11535 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11538 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11540 && DEFAULT_ABI == ABI_V4)
11542 rtx toload = gen_rtx_CONST (VOIDmode,
11543 gen_rtx_PLUS (Pmode,
11547 emit_insn (gen_elf_high (tmp_reg, toload));
11548 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11549 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11553 warning ("stack limit expression is not supported");
11556 if (copy_r12 || ! TARGET_UPDATE)
11557 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11563 /* Need a note here so that try_split doesn't get confused. */
11564 if (get_last_insn() == NULL_RTX)
11565 emit_note (NOTE_INSN_DELETED);
11566 insn = emit_move_insn (tmp_reg, todec);
11567 try_split (PATTERN (insn), insn, 0);
11571 insn = emit_insn (TARGET_32BIT
11572 ? gen_movsi_update (stack_reg, stack_reg,
11574 : gen_movdi_update (stack_reg, stack_reg,
11575 todec, stack_reg));
11579 insn = emit_insn (TARGET_32BIT
11580 ? gen_addsi3 (stack_reg, stack_reg, todec)
11581 : gen_adddi3 (stack_reg, stack_reg, todec));
11582 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11583 gen_rtx_REG (Pmode, 12));
11586 RTX_FRAME_RELATED_P (insn) = 1;
11588 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11589 gen_rtx_SET (VOIDmode, stack_reg,
11590 gen_rtx_PLUS (Pmode, stack_reg,
11595 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11596 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11597 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11598 deduce these equivalences by itself so it wasn't necessary to hold
11599 its hand so much. */
11602 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11603 rtx reg2, rtx rreg)
11607 /* copy_rtx will not make unique copies of registers, so we need to
11608 ensure we don't have unwanted sharing here. */
11610 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11613 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11615 real = copy_rtx (PATTERN (insn));
11617 if (reg2 != NULL_RTX)
11618 real = replace_rtx (real, reg2, rreg);
11620 real = replace_rtx (real, reg,
11621 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11622 STACK_POINTER_REGNUM),
11625 /* We expect that 'real' is either a SET or a PARALLEL containing
11626 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11627 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11629 if (GET_CODE (real) == SET)
11633 temp = simplify_rtx (SET_SRC (set));
11635 SET_SRC (set) = temp;
11636 temp = simplify_rtx (SET_DEST (set));
11638 SET_DEST (set) = temp;
11639 if (GET_CODE (SET_DEST (set)) == MEM)
11641 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11643 XEXP (SET_DEST (set), 0) = temp;
11646 else if (GET_CODE (real) == PARALLEL)
11649 for (i = 0; i < XVECLEN (real, 0); i++)
11650 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11652 rtx set = XVECEXP (real, 0, i);
11654 temp = simplify_rtx (SET_SRC (set));
11656 SET_SRC (set) = temp;
11657 temp = simplify_rtx (SET_DEST (set));
11659 SET_DEST (set) = temp;
11660 if (GET_CODE (SET_DEST (set)) == MEM)
11662 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11664 XEXP (SET_DEST (set), 0) = temp;
11666 RTX_FRAME_RELATED_P (set) = 1;
11673 real = spe_synthesize_frame_save (real);
11675 RTX_FRAME_RELATED_P (insn) = 1;
11676 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11681 /* Given an SPE frame note, return a PARALLEL of SETs with the
11682 original note, plus a synthetic register save. */
11685 spe_synthesize_frame_save (rtx real)
11687 rtx synth, offset, reg, real2;
11689 if (GET_CODE (real) != SET
11690 || GET_MODE (SET_SRC (real)) != V2SImode)
11693 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11694 frame related note. The parallel contains a set of the register
11695 being saved, and another set to a synthetic register (n+1200).
11696 This is so we can differentiate between 64-bit and 32-bit saves.
11697 Words cannot describe this nastiness. */
11699 if (GET_CODE (SET_DEST (real)) != MEM
11700 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11701 || GET_CODE (SET_SRC (real)) != REG)
11705 (set (mem (plus (reg x) (const y)))
11708 (set (mem (plus (reg x) (const y+4)))
11712 real2 = copy_rtx (real);
11713 PUT_MODE (SET_DEST (real2), SImode);
11714 reg = SET_SRC (real2);
11715 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11716 synth = copy_rtx (real2);
11718 if (BYTES_BIG_ENDIAN)
11720 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11721 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11724 reg = SET_SRC (synth);
11726 synth = replace_rtx (synth, reg,
11727 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11729 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11730 synth = replace_rtx (synth, offset,
11731 GEN_INT (INTVAL (offset)
11732 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11734 RTX_FRAME_RELATED_P (synth) = 1;
11735 RTX_FRAME_RELATED_P (real2) = 1;
11736 if (BYTES_BIG_ENDIAN)
11737 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11739 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11744 /* Returns an insn that has a vrsave set operation with the
11745 appropriate CLOBBERs. */
11748 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11751 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11752 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11755 = gen_rtx_SET (VOIDmode,
11757 gen_rtx_UNSPEC_VOLATILE (SImode,
11758 gen_rtvec (2, reg, vrsave),
11763 /* We need to clobber the registers in the mask so the scheduler
11764 does not move sets to VRSAVE before sets of AltiVec registers.
11766 However, if the function receives nonlocal gotos, reload will set
11767 all call saved registers live. We will end up with:
11769 (set (reg 999) (mem))
11770 (parallel [ (set (reg vrsave) (unspec blah))
11771 (clobber (reg 999))])
11773 The clobber will cause the store into reg 999 to be dead, and
11774 flow will attempt to delete an epilogue insn. In this case, we
11775 need an unspec use/set of the register. */
11777 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11778 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11780 if (!epiloguep || call_used_regs [i])
11781 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11782 gen_rtx_REG (V4SImode, i));
11785 rtx reg = gen_rtx_REG (V4SImode, i);
11788 = gen_rtx_SET (VOIDmode,
11790 gen_rtx_UNSPEC (V4SImode,
11791 gen_rtvec (1, reg), 27));
11795 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11797 for (i = 0; i < nclobs; ++i)
11798 XVECEXP (insn, 0, i) = clobs[i];
11803 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11804 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11807 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11808 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11810 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11811 rtx replacea, replaceb;
11813 int_rtx = GEN_INT (offset);
11815 /* Some cases that need register indexed addressing. */
11816 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11818 && SPE_VECTOR_MODE (mode)
11819 && !SPE_CONST_OFFSET_OK (offset)))
11821 /* Whomever calls us must make sure r11 is available in the
11822 flow path of instructions in the prologue. */
11823 offset_rtx = gen_rtx_REG (Pmode, 11);
11824 emit_move_insn (offset_rtx, int_rtx);
11826 replacea = offset_rtx;
11827 replaceb = int_rtx;
11831 offset_rtx = int_rtx;
11832 replacea = NULL_RTX;
11833 replaceb = NULL_RTX;
11836 reg = gen_rtx_REG (mode, regno);
11837 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11838 mem = gen_rtx_MEM (mode, addr);
11839 set_mem_alias_set (mem, rs6000_sr_alias_set);
11841 insn = emit_move_insn (mem, reg);
11843 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11846 /* Emit an offset memory reference suitable for a frame store, while
11847 converting to a valid addressing mode. */
11850 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11852 rtx int_rtx, offset_rtx;
11854 int_rtx = GEN_INT (offset);
11856 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11858 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11859 emit_move_insn (offset_rtx, int_rtx);
11862 offset_rtx = int_rtx;
11864 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11867 /* Emit function prologue as insns. */
11870 rs6000_emit_prologue (void)
11872 rs6000_stack_t *info = rs6000_stack_info ();
11873 enum machine_mode reg_mode = Pmode;
11874 int reg_size = TARGET_32BIT ? 4 : 8;
11875 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11876 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11877 rtx frame_reg_rtx = sp_reg_rtx;
11878 rtx cr_save_rtx = NULL_RTX;
11880 int saving_FPRs_inline;
11881 int using_store_multiple;
11882 HOST_WIDE_INT sp_offset = 0;
11884 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11886 reg_mode = V2SImode;
11890 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11891 && (!TARGET_SPE_ABI
11892 || info->spe_64bit_regs_used == 0)
11893 && info->first_gp_reg_save < 31);
11894 saving_FPRs_inline = (info->first_fp_reg_save == 64
11895 || FP_SAVE_INLINE (info->first_fp_reg_save)
11896 || current_function_calls_eh_return
11897 || cfun->machine->ra_need_lr);
11899 /* For V.4, update stack before we do any saving and set back pointer. */
11901 && (DEFAULT_ABI == ABI_V4
11902 || current_function_calls_eh_return))
11904 if (info->total_size < 32767)
11905 sp_offset = info->total_size;
11907 frame_reg_rtx = frame_ptr_rtx;
11908 rs6000_emit_allocate_stack (info->total_size,
11909 (frame_reg_rtx != sp_reg_rtx
11910 && (info->cr_save_p
11912 || info->first_fp_reg_save < 64
11913 || info->first_gp_reg_save < 32
11915 if (frame_reg_rtx != sp_reg_rtx)
11916 rs6000_emit_stack_tie ();
11919 /* Save AltiVec registers if needed. */
11920 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11924 /* There should be a non inline version of this, for when we
11925 are saving lots of vector registers. */
11926 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11927 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11929 rtx areg, savereg, mem;
11932 offset = info->altivec_save_offset + sp_offset
11933 + 16 * (i - info->first_altivec_reg_save);
11935 savereg = gen_rtx_REG (V4SImode, i);
11937 areg = gen_rtx_REG (Pmode, 0);
11938 emit_move_insn (areg, GEN_INT (offset));
11940 /* AltiVec addressing mode is [reg+reg]. */
11941 mem = gen_rtx_MEM (V4SImode,
11942 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11944 set_mem_alias_set (mem, rs6000_sr_alias_set);
11946 insn = emit_move_insn (mem, savereg);
11948 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11949 areg, GEN_INT (offset));
11953 /* VRSAVE is a bit vector representing which AltiVec registers
11954 are used. The OS uses this to determine which vector
11955 registers to save on a context switch. We need to save
11956 VRSAVE on the stack frame, add whatever AltiVec registers we
11957 used in this function, and do the corresponding magic in the
11960 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
11961 && info->vrsave_mask != 0)
11963 rtx reg, mem, vrsave;
11966 /* Get VRSAVE onto a GPR. */
11967 reg = gen_rtx_REG (SImode, 12);
11968 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11970 emit_insn (gen_get_vrsave_internal (reg));
11972 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11975 offset = info->vrsave_save_offset + sp_offset;
11977 = gen_rtx_MEM (SImode,
11978 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11979 set_mem_alias_set (mem, rs6000_sr_alias_set);
11980 insn = emit_move_insn (mem, reg);
11982 /* Include the registers in the mask. */
11983 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11985 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11988 /* If we use the link register, get it into r0. */
11989 if (info->lr_save_p)
11990 emit_move_insn (gen_rtx_REG (Pmode, 0),
11991 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11993 /* If we need to save CR, put it into r12. */
11994 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11996 cr_save_rtx = gen_rtx_REG (SImode, 12);
11997 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12000 /* Do any required saving of fpr's. If only one or two to save, do
12001 it ourselves. Otherwise, call function. */
12002 if (saving_FPRs_inline)
12005 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12006 if ((regs_ever_live[info->first_fp_reg_save+i]
12007 && ! call_used_regs[info->first_fp_reg_save+i]))
12008 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12009 info->first_fp_reg_save + i,
12010 info->fp_save_offset + sp_offset + 8 * i,
12013 else if (info->first_fp_reg_save != 64)
12017 const char *alloc_rname;
12019 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12021 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12022 gen_rtx_REG (Pmode,
12023 LINK_REGISTER_REGNUM));
12024 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12025 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12026 alloc_rname = ggc_strdup (rname);
12027 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12028 gen_rtx_SYMBOL_REF (Pmode,
12030 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12032 rtx addr, reg, mem;
12033 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12034 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12035 GEN_INT (info->fp_save_offset
12036 + sp_offset + 8*i));
12037 mem = gen_rtx_MEM (DFmode, addr);
12038 set_mem_alias_set (mem, rs6000_sr_alias_set);
12040 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12042 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12043 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12044 NULL_RTX, NULL_RTX);
12047 /* Save GPRs. This is done as a PARALLEL if we are using
12048 the store-multiple instructions. */
12049 if (using_store_multiple)
12053 p = rtvec_alloc (32 - info->first_gp_reg_save);
12054 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12056 rtx addr, reg, mem;
12057 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12058 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12059 GEN_INT (info->gp_save_offset
12062 mem = gen_rtx_MEM (reg_mode, addr);
12063 set_mem_alias_set (mem, rs6000_sr_alias_set);
12065 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12067 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12068 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12069 NULL_RTX, NULL_RTX);
12074 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12075 if ((regs_ever_live[info->first_gp_reg_save+i]
12076 && ! call_used_regs[info->first_gp_reg_save+i])
12077 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12078 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12079 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12081 rtx addr, reg, mem;
12082 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12084 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12086 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12089 if (!SPE_CONST_OFFSET_OK (offset))
12091 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12092 emit_move_insn (b, GEN_INT (offset));
12095 b = GEN_INT (offset);
12097 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12098 mem = gen_rtx_MEM (V2SImode, addr);
12099 set_mem_alias_set (mem, rs6000_sr_alias_set);
12100 insn = emit_move_insn (mem, reg);
12102 if (GET_CODE (b) == CONST_INT)
12103 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12104 NULL_RTX, NULL_RTX);
12106 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12107 b, GEN_INT (offset));
12111 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12112 GEN_INT (info->gp_save_offset
12115 mem = gen_rtx_MEM (reg_mode, addr);
12116 set_mem_alias_set (mem, rs6000_sr_alias_set);
12118 insn = emit_move_insn (mem, reg);
12119 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12120 NULL_RTX, NULL_RTX);
12125 /* ??? There's no need to emit actual instructions here, but it's the
12126 easiest way to get the frame unwind information emitted. */
12127 if (current_function_calls_eh_return)
12129 unsigned int i, regno;
12131 /* In AIX ABI we need to pretend we save r2 here. */
12134 rtx addr, reg, mem;
12136 reg = gen_rtx_REG (reg_mode, 2);
12137 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12138 GEN_INT (sp_offset + 5 * reg_size));
12139 mem = gen_rtx_MEM (reg_mode, addr);
12140 set_mem_alias_set (mem, rs6000_sr_alias_set);
12142 insn = emit_move_insn (mem, reg);
12143 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12144 NULL_RTX, NULL_RTX);
12145 PATTERN (insn) = gen_blockage ();
12150 regno = EH_RETURN_DATA_REGNO (i);
12151 if (regno == INVALID_REGNUM)
12154 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12155 info->ehrd_offset + sp_offset
12156 + reg_size * (int) i,
12161 /* Save lr if we used it. */
12162 if (info->lr_save_p)
12164 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12165 GEN_INT (info->lr_save_offset + sp_offset));
12166 rtx reg = gen_rtx_REG (Pmode, 0);
12167 rtx mem = gen_rtx_MEM (Pmode, addr);
12168 /* This should not be of rs6000_sr_alias_set, because of
12169 __builtin_return_address. */
12171 insn = emit_move_insn (mem, reg);
12172 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12173 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12176 /* Save CR if we use any that must be preserved. */
12177 if (info->cr_save_p)
12179 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12180 GEN_INT (info->cr_save_offset + sp_offset));
12181 rtx mem = gen_rtx_MEM (SImode, addr);
12183 set_mem_alias_set (mem, rs6000_sr_alias_set);
12185 /* If r12 was used to hold the original sp, copy cr into r0 now
12187 if (REGNO (frame_reg_rtx) == 12)
12189 cr_save_rtx = gen_rtx_REG (SImode, 0);
12190 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12192 insn = emit_move_insn (mem, cr_save_rtx);
12194 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12195 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12196 But that's OK. All we have to do is specify that _one_ condition
12197 code register is saved in this stack slot. The thrower's epilogue
12198 will then restore all the call-saved registers.
12199 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12200 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12201 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12204 /* Update stack and set back pointer unless this is V.4,
12205 for which it was done previously. */
12207 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12208 rs6000_emit_allocate_stack (info->total_size, FALSE);
12210 /* Set frame pointer, if needed. */
12211 if (frame_pointer_needed)
12213 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12215 RTX_FRAME_RELATED_P (insn) = 1;
12218 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12219 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12220 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12221 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12223 /* If emit_load_toc_table will use the link register, we need to save
12224 it. We use R12 for this purpose because emit_load_toc_table
12225 can use register 0. This allows us to use a plain 'blr' to return
12226 from the procedure more often. */
12227 int save_LR_around_toc_setup = (TARGET_ELF
12228 && DEFAULT_ABI != ABI_AIX
12230 && ! info->lr_save_p
12231 && EXIT_BLOCK_PTR->pred != NULL);
12232 if (save_LR_around_toc_setup)
12234 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12235 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12236 rs6000_emit_load_toc_table (TRUE);
12237 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12240 rs6000_emit_load_toc_table (TRUE);
12244 if (DEFAULT_ABI == ABI_DARWIN
12245 && flag_pic && current_function_uses_pic_offset_table)
12247 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12248 const char *picbase = machopic_function_base_name ();
12249 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12251 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12253 rs6000_maybe_dead (
12254 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12255 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12260 /* Write function prologue. */
12263 rs6000_output_function_prologue (FILE *file,
12264 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12266 rs6000_stack_t *info = rs6000_stack_info ();
12268 if (TARGET_DEBUG_STACK)
12269 debug_stack_info (info);
12271 /* Write .extern for any function we will call to save and restore
12273 if (info->first_fp_reg_save < 64
12274 && !FP_SAVE_INLINE (info->first_fp_reg_save))
12275 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12276 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12277 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12278 RESTORE_FP_SUFFIX);
12280 /* Write .extern for AIX common mode routines, if needed. */
12281 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12283 fputs ("\t.extern __mulh\n", file);
12284 fputs ("\t.extern __mull\n", file);
12285 fputs ("\t.extern __divss\n", file);
12286 fputs ("\t.extern __divus\n", file);
12287 fputs ("\t.extern __quoss\n", file);
12288 fputs ("\t.extern __quous\n", file);
12289 common_mode_defined = 1;
12292 if (! HAVE_prologue)
12296 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12297 the "toplevel" insn chain. */
12298 emit_note (NOTE_INSN_DELETED);
12299 rs6000_emit_prologue ();
12300 emit_note (NOTE_INSN_DELETED);
12302 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12306 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12308 INSN_ADDRESSES_NEW (insn, addr);
12313 if (TARGET_DEBUG_STACK)
12314 debug_rtx_list (get_insns (), 100);
12315 final (get_insns (), file, FALSE, FALSE);
12319 rs6000_pic_labelno++;
12322 /* Emit function epilogue as insns.
12324 At present, dwarf2out_frame_debug_expr doesn't understand
12325 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12326 anywhere in the epilogue. Most of the insns below would in any case
12327 need special notes to explain where r11 is in relation to the stack. */
12330 rs6000_emit_epilogue (int sibcall)
12332 rs6000_stack_t *info;
12333 int restoring_FPRs_inline;
12334 int using_load_multiple;
12335 int using_mfcr_multiple;
12336 int use_backchain_to_restore_sp;
12338 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12339 rtx frame_reg_rtx = sp_reg_rtx;
12340 enum machine_mode reg_mode = Pmode;
12341 int reg_size = TARGET_32BIT ? 4 : 8;
12344 info = rs6000_stack_info ();
12346 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12348 reg_mode = V2SImode;
12352 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12353 && (!TARGET_SPE_ABI
12354 || info->spe_64bit_regs_used == 0)
12355 && info->first_gp_reg_save < 31);
12356 restoring_FPRs_inline = (sibcall
12357 || current_function_calls_eh_return
12358 || info->first_fp_reg_save == 64
12359 || FP_SAVE_INLINE (info->first_fp_reg_save));
12360 use_backchain_to_restore_sp = (frame_pointer_needed
12361 || current_function_calls_alloca
12362 || info->total_size > 32767);
12363 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12364 || rs6000_cpu == PROCESSOR_PPC603
12365 || rs6000_cpu == PROCESSOR_PPC750
12368 /* If we have a frame pointer, a call to alloca, or a large stack
12369 frame, restore the old stack pointer using the backchain. Otherwise,
12370 we know what size to update it with. */
12371 if (use_backchain_to_restore_sp)
12373 /* Under V.4, don't reset the stack pointer until after we're done
12374 loading the saved registers. */
12375 if (DEFAULT_ABI == ABI_V4)
12376 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12378 emit_move_insn (frame_reg_rtx,
12379 gen_rtx_MEM (Pmode, sp_reg_rtx));
12382 else if (info->push_p)
12384 if (DEFAULT_ABI == ABI_V4
12385 || current_function_calls_eh_return)
12386 sp_offset = info->total_size;
12389 emit_insn (TARGET_32BIT
12390 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12391 GEN_INT (info->total_size))
12392 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12393 GEN_INT (info->total_size)));
12397 /* Restore AltiVec registers if needed. */
12398 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12402 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12403 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12405 rtx addr, areg, mem;
12407 areg = gen_rtx_REG (Pmode, 0);
12409 (areg, GEN_INT (info->altivec_save_offset
12411 + 16 * (i - info->first_altivec_reg_save)));
12413 /* AltiVec addressing mode is [reg+reg]. */
12414 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12415 mem = gen_rtx_MEM (V4SImode, addr);
12416 set_mem_alias_set (mem, rs6000_sr_alias_set);
12418 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12422 /* Restore VRSAVE if needed. */
12423 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE
12424 && info->vrsave_mask != 0)
12426 rtx addr, mem, reg;
12428 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12429 GEN_INT (info->vrsave_save_offset + sp_offset));
12430 mem = gen_rtx_MEM (SImode, addr);
12431 set_mem_alias_set (mem, rs6000_sr_alias_set);
12432 reg = gen_rtx_REG (SImode, 12);
12433 emit_move_insn (reg, mem);
12435 emit_insn (generate_set_vrsave (reg, info, 1));
12438 /* Get the old lr if we saved it. */
12439 if (info->lr_save_p)
12441 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12442 info->lr_save_offset + sp_offset);
12444 set_mem_alias_set (mem, rs6000_sr_alias_set);
12446 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12449 /* Get the old cr if we saved it. */
12450 if (info->cr_save_p)
12452 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12453 GEN_INT (info->cr_save_offset + sp_offset));
12454 rtx mem = gen_rtx_MEM (SImode, addr);
12456 set_mem_alias_set (mem, rs6000_sr_alias_set);
12458 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12461 /* Set LR here to try to overlap restores below. */
12462 if (info->lr_save_p)
12463 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12464 gen_rtx_REG (Pmode, 0));
12466 /* Load exception handler data registers, if needed. */
12467 if (current_function_calls_eh_return)
12469 unsigned int i, regno;
12473 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12474 GEN_INT (sp_offset + 5 * reg_size));
12475 rtx mem = gen_rtx_MEM (reg_mode, addr);
12477 set_mem_alias_set (mem, rs6000_sr_alias_set);
12479 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12486 regno = EH_RETURN_DATA_REGNO (i);
12487 if (regno == INVALID_REGNUM)
12490 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12491 info->ehrd_offset + sp_offset
12492 + reg_size * (int) i);
12493 set_mem_alias_set (mem, rs6000_sr_alias_set);
12495 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12499 /* Restore GPRs. This is done as a PARALLEL if we are using
12500 the load-multiple instructions. */
12501 if (using_load_multiple)
12504 p = rtvec_alloc (32 - info->first_gp_reg_save);
12505 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12507 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12508 GEN_INT (info->gp_save_offset
12511 rtx mem = gen_rtx_MEM (reg_mode, addr);
12513 set_mem_alias_set (mem, rs6000_sr_alias_set);
12516 gen_rtx_SET (VOIDmode,
12517 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12520 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12523 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12524 if ((regs_ever_live[info->first_gp_reg_save+i]
12525 && ! call_used_regs[info->first_gp_reg_save+i])
12526 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12527 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12528 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12530 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12531 GEN_INT (info->gp_save_offset
12534 rtx mem = gen_rtx_MEM (reg_mode, addr);
12536 /* Restore 64-bit quantities for SPE. */
12537 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12539 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12542 if (!SPE_CONST_OFFSET_OK (offset))
12544 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12545 emit_move_insn (b, GEN_INT (offset));
12548 b = GEN_INT (offset);
12550 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12551 mem = gen_rtx_MEM (V2SImode, addr);
12554 set_mem_alias_set (mem, rs6000_sr_alias_set);
12556 emit_move_insn (gen_rtx_REG (reg_mode,
12557 info->first_gp_reg_save + i), mem);
12560 /* Restore fpr's if we need to do it without calling a function. */
12561 if (restoring_FPRs_inline)
12562 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12563 if ((regs_ever_live[info->first_fp_reg_save+i]
12564 && ! call_used_regs[info->first_fp_reg_save+i]))
12567 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12568 GEN_INT (info->fp_save_offset
12571 mem = gen_rtx_MEM (DFmode, addr);
12572 set_mem_alias_set (mem, rs6000_sr_alias_set);
12574 emit_move_insn (gen_rtx_REG (DFmode,
12575 info->first_fp_reg_save + i),
12579 /* If we saved cr, restore it here. Just those that were used. */
12580 if (info->cr_save_p)
12582 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12585 if (using_mfcr_multiple)
12587 for (i = 0; i < 8; i++)
12588 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12594 if (using_mfcr_multiple && count > 1)
12599 p = rtvec_alloc (count);
12602 for (i = 0; i < 8; i++)
12603 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12605 rtvec r = rtvec_alloc (2);
12606 RTVEC_ELT (r, 0) = r12_rtx;
12607 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12608 RTVEC_ELT (p, ndx) =
12609 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12610 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12613 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12618 for (i = 0; i < 8; i++)
12619 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12621 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12627 /* If this is V.4, unwind the stack pointer after all of the loads
12628 have been done. We need to emit a block here so that sched
12629 doesn't decide to move the sp change before the register restores
12630 (which may not have any obvious dependency on the stack). This
12631 doesn't hurt performance, because there is no scheduling that can
12632 be done after this point. */
12633 if (DEFAULT_ABI == ABI_V4
12634 || current_function_calls_eh_return)
12636 if (frame_reg_rtx != sp_reg_rtx)
12637 rs6000_emit_stack_tie ();
12639 if (use_backchain_to_restore_sp)
12641 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12643 else if (sp_offset != 0)
12645 emit_insn (TARGET_32BIT
12646 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12647 GEN_INT (sp_offset))
12648 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12649 GEN_INT (sp_offset)));
12653 if (current_function_calls_eh_return)
12655 rtx sa = EH_RETURN_STACKADJ_RTX;
12656 emit_insn (TARGET_32BIT
12657 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12658 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12664 if (! restoring_FPRs_inline)
12665 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12667 p = rtvec_alloc (2);
12669 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12670 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12671 gen_rtx_REG (Pmode,
12672 LINK_REGISTER_REGNUM));
12674 /* If we have to restore more than two FP registers, branch to the
12675 restore function. It will return to our caller. */
12676 if (! restoring_FPRs_inline)
12680 const char *alloc_rname;
12682 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12683 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12684 alloc_rname = ggc_strdup (rname);
12685 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12686 gen_rtx_SYMBOL_REF (Pmode,
12689 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12692 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12693 GEN_INT (info->fp_save_offset + 8*i));
12694 mem = gen_rtx_MEM (DFmode, addr);
12695 set_mem_alias_set (mem, rs6000_sr_alias_set);
12697 RTVEC_ELT (p, i+3) =
12698 gen_rtx_SET (VOIDmode,
12699 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12704 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12708 /* Write function epilogue. */
12711 rs6000_output_function_epilogue (FILE *file,
12712 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12714 rs6000_stack_t *info = rs6000_stack_info ();
12716 if (! HAVE_epilogue)
12718 rtx insn = get_last_insn ();
12719 /* If the last insn was a BARRIER, we don't have to write anything except
12720 the trace table. */
12721 if (GET_CODE (insn) == NOTE)
12722 insn = prev_nonnote_insn (insn);
12723 if (insn == 0 || GET_CODE (insn) != BARRIER)
12725 /* This is slightly ugly, but at least we don't have two
12726 copies of the epilogue-emitting code. */
12729 /* A NOTE_INSN_DELETED is supposed to be at the start
12730 and end of the "toplevel" insn chain. */
12731 emit_note (NOTE_INSN_DELETED);
12732 rs6000_emit_epilogue (FALSE);
12733 emit_note (NOTE_INSN_DELETED);
12735 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12739 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12741 INSN_ADDRESSES_NEW (insn, addr);
12746 if (TARGET_DEBUG_STACK)
12747 debug_rtx_list (get_insns (), 100);
12748 final (get_insns (), file, FALSE, FALSE);
12754 macho_branch_islands ();
12755 /* Mach-O doesn't support labels at the end of objects, so if
12756 it looks like we might want one, insert a NOP. */
12758 rtx insn = get_last_insn ();
12761 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12762 insn = PREV_INSN (insn);
12766 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12767 fputs ("\tnop\n", file);
12771 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12774 We don't output a traceback table if -finhibit-size-directive was
12775 used. The documentation for -finhibit-size-directive reads
12776 ``don't output a @code{.size} assembler directive, or anything
12777 else that would cause trouble if the function is split in the
12778 middle, and the two halves are placed at locations far apart in
12779 memory.'' The traceback table has this property, since it
12780 includes the offset from the start of the function to the
12781 traceback table itself.
12783 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12784 different traceback table. */
12785 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12786 && rs6000_traceback != traceback_none)
12788 const char *fname = NULL;
12789 const char *language_string = lang_hooks.name;
12790 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12792 int optional_tbtab;
12794 if (rs6000_traceback == traceback_full)
12795 optional_tbtab = 1;
12796 else if (rs6000_traceback == traceback_part)
12797 optional_tbtab = 0;
12799 optional_tbtab = !optimize_size && !TARGET_ELF;
12801 if (optional_tbtab)
12803 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12804 while (*fname == '.') /* V.4 encodes . in the name */
12807 /* Need label immediately before tbtab, so we can compute
12808 its offset from the function start. */
12809 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12810 ASM_OUTPUT_LABEL (file, fname);
12813 /* The .tbtab pseudo-op can only be used for the first eight
12814 expressions, since it can't handle the possibly variable
12815 length fields that follow. However, if you omit the optional
12816 fields, the assembler outputs zeros for all optional fields
12817 anyways, giving each variable length field is minimum length
12818 (as defined in sys/debug.h). Thus we can not use the .tbtab
12819 pseudo-op at all. */
12821 /* An all-zero word flags the start of the tbtab, for debuggers
12822 that have to find it by searching forward from the entry
12823 point or from the current pc. */
12824 fputs ("\t.long 0\n", file);
12826 /* Tbtab format type. Use format type 0. */
12827 fputs ("\t.byte 0,", file);
12829 /* Language type. Unfortunately, there does not seem to be any
12830 official way to discover the language being compiled, so we
12831 use language_string.
12832 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
12833 Java is 13. Objective-C is 14. */
12834 if (! strcmp (language_string, "GNU C"))
12836 else if (! strcmp (language_string, "GNU F77"))
12838 else if (! strcmp (language_string, "GNU Pascal"))
12840 else if (! strcmp (language_string, "GNU Ada"))
12842 else if (! strcmp (language_string, "GNU C++"))
12844 else if (! strcmp (language_string, "GNU Java"))
12846 else if (! strcmp (language_string, "GNU Objective-C"))
12850 fprintf (file, "%d,", i);
12852 /* 8 single bit fields: global linkage (not set for C extern linkage,
12853 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12854 from start of procedure stored in tbtab, internal function, function
12855 has controlled storage, function has no toc, function uses fp,
12856 function logs/aborts fp operations. */
12857 /* Assume that fp operations are used if any fp reg must be saved. */
12858 fprintf (file, "%d,",
12859 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12861 /* 6 bitfields: function is interrupt handler, name present in
12862 proc table, function calls alloca, on condition directives
12863 (controls stack walks, 3 bits), saves condition reg, saves
12865 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12866 set up as a frame pointer, even when there is no alloca call. */
12867 fprintf (file, "%d,",
12868 ((optional_tbtab << 6)
12869 | ((optional_tbtab & frame_pointer_needed) << 5)
12870 | (info->cr_save_p << 1)
12871 | (info->lr_save_p)));
12873 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12875 fprintf (file, "%d,",
12876 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12878 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12879 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12881 if (optional_tbtab)
12883 /* Compute the parameter info from the function decl argument
12886 int next_parm_info_bit = 31;
12888 for (decl = DECL_ARGUMENTS (current_function_decl);
12889 decl; decl = TREE_CHAIN (decl))
12891 rtx parameter = DECL_INCOMING_RTL (decl);
12892 enum machine_mode mode = GET_MODE (parameter);
12894 if (GET_CODE (parameter) == REG)
12896 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12902 if (mode == SFmode)
12904 else if (mode == DFmode || mode == TFmode)
12909 /* If only one bit will fit, don't or in this entry. */
12910 if (next_parm_info_bit > 0)
12911 parm_info |= (bits << (next_parm_info_bit - 1));
12912 next_parm_info_bit -= 2;
12916 fixed_parms += ((GET_MODE_SIZE (mode)
12917 + (UNITS_PER_WORD - 1))
12919 next_parm_info_bit -= 1;
12925 /* Number of fixed point parameters. */
12926 /* This is actually the number of words of fixed point parameters; thus
12927 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12928 fprintf (file, "%d,", fixed_parms);
12930 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12932 /* This is actually the number of fp registers that hold parameters;
12933 and thus the maximum value is 13. */
12934 /* Set parameters on stack bit if parameters are not in their original
12935 registers, regardless of whether they are on the stack? Xlc
12936 seems to set the bit when not optimizing. */
12937 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12939 if (! optional_tbtab)
12942 /* Optional fields follow. Some are variable length. */
12944 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12945 11 double float. */
12946 /* There is an entry for each parameter in a register, in the order that
12947 they occur in the parameter list. Any intervening arguments on the
12948 stack are ignored. If the list overflows a long (max possible length
12949 34 bits) then completely leave off all elements that don't fit. */
12950 /* Only emit this long if there was at least one parameter. */
12951 if (fixed_parms || float_parms)
12952 fprintf (file, "\t.long %d\n", parm_info);
12954 /* Offset from start of code to tb table. */
12955 fputs ("\t.long ", file);
12956 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12958 RS6000_OUTPUT_BASENAME (file, fname);
12960 assemble_name (file, fname);
12962 fputs ("-.", file);
12964 RS6000_OUTPUT_BASENAME (file, fname);
12966 assemble_name (file, fname);
12970 /* Interrupt handler mask. */
12971 /* Omit this long, since we never set the interrupt handler bit
12974 /* Number of CTL (controlled storage) anchors. */
12975 /* Omit this long, since the has_ctl bit is never set above. */
12977 /* Displacement into stack of each CTL anchor. */
12978 /* Omit this list of longs, because there are no CTL anchors. */
12980 /* Length of function name. */
12983 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12985 /* Function name. */
12986 assemble_string (fname, strlen (fname));
12988 /* Register for alloca automatic storage; this is always reg 31.
12989 Only emit this if the alloca bit was set above. */
12990 if (frame_pointer_needed)
12991 fputs ("\t.byte 31\n", file);
12993 fputs ("\t.align 2\n", file);
12997 /* A C compound statement that outputs the assembler code for a thunk
12998 function, used to implement C++ virtual function calls with
12999 multiple inheritance. The thunk acts as a wrapper around a virtual
13000 function, adjusting the implicit object parameter before handing
13001 control off to the real function.
13003 First, emit code to add the integer DELTA to the location that
13004 contains the incoming first argument. Assume that this argument
13005 contains a pointer, and is the one used to pass the `this' pointer
13006 in C++. This is the incoming argument *before* the function
13007 prologue, e.g. `%o0' on a sparc. The addition must preserve the
13008 values of all other incoming arguments.
13010 After the addition, emit code to jump to FUNCTION, which is a
13011 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
13012 not touch the return address. Hence returning from FUNCTION will
13013 return to whoever called the current `thunk'.
13015 The effect must be as if FUNCTION had been called directly with the
13016 adjusted first argument. This macro is responsible for emitting
13017 all of the code for a thunk function; output_function_prologue()
13018 and output_function_epilogue() are not invoked.
13020 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
13021 been extracted from it.) It might possibly be useful on some
13022 targets, but probably not.
13024 If you do not define this macro, the target-independent code in the
13025 C++ frontend will generate a less efficient heavyweight thunk that
13026 calls FUNCTION instead of jumping to it. The generic approach does
13027 not support varargs. */
13030 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13031 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13034 rtx this, insn, funexp;
13036 reload_completed = 1;
13037 epilogue_completed = 1;
13038 no_new_pseudos = 1;
13040 /* Mark the end of the (empty) prologue. */
13041 emit_note (NOTE_INSN_PROLOGUE_END);
13043 /* Find the "this" pointer. If the function returns a structure,
13044 the structure return pointer is in r3. */
13045 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13046 this = gen_rtx_REG (Pmode, 4);
13048 this = gen_rtx_REG (Pmode, 3);
13050 /* Apply the constant offset, if required. */
13053 rtx delta_rtx = GEN_INT (delta);
13054 emit_insn (TARGET_32BIT
13055 ? gen_addsi3 (this, this, delta_rtx)
13056 : gen_adddi3 (this, this, delta_rtx));
13059 /* Apply the offset from the vtable, if required. */
13062 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13063 rtx tmp = gen_rtx_REG (Pmode, 12);
13065 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13066 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13068 emit_insn (TARGET_32BIT
13069 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13070 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13071 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13075 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13077 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13079 emit_insn (TARGET_32BIT
13080 ? gen_addsi3 (this, this, tmp)
13081 : gen_adddi3 (this, this, tmp));
13084 /* Generate a tail call to the target function. */
13085 if (!TREE_USED (function))
13087 assemble_external (function);
13088 TREE_USED (function) = 1;
13090 funexp = XEXP (DECL_RTL (function), 0);
13091 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13094 if (MACHOPIC_INDIRECT)
13095 funexp = machopic_indirect_call_target (funexp);
13098 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13099 generate sibcall RTL explicitly to avoid constraint abort. */
13100 insn = emit_call_insn (
13101 gen_rtx_PARALLEL (VOIDmode,
13103 gen_rtx_CALL (VOIDmode,
13104 funexp, const0_rtx),
13105 gen_rtx_USE (VOIDmode, const0_rtx),
13106 gen_rtx_USE (VOIDmode,
13107 gen_rtx_REG (SImode,
13108 LINK_REGISTER_REGNUM)),
13109 gen_rtx_RETURN (VOIDmode))));
13110 SIBLING_CALL_P (insn) = 1;
13113 /* Run just enough of rest_of_compilation to get the insns emitted.
13114 There's not really enough bulk here to make other passes such as
13115 instruction scheduling worth while. Note that use_thunk calls
13116 assemble_start_function and assemble_end_function. */
13117 insn = get_insns ();
13118 insn_locators_initialize ();
13119 shorten_branches (insn);
13120 final_start_function (insn, file, 1);
13121 final (insn, file, 1, 0);
13122 final_end_function ();
13124 reload_completed = 0;
13125 epilogue_completed = 0;
13126 no_new_pseudos = 0;
13129 /* A quick summary of the various types of 'constant-pool tables'
13132 Target Flags Name One table per
13133 AIX (none) AIX TOC object file
13134 AIX -mfull-toc AIX TOC object file
13135 AIX -mminimal-toc AIX minimal TOC translation unit
13136 SVR4/EABI (none) SVR4 SDATA object file
13137 SVR4/EABI -fpic SVR4 pic object file
13138 SVR4/EABI -fPIC SVR4 PIC translation unit
13139 SVR4/EABI -mrelocatable EABI TOC function
13140 SVR4/EABI -maix AIX TOC object file
13141 SVR4/EABI -maix -mminimal-toc
13142 AIX minimal TOC translation unit
13144 Name Reg. Set by entries contains:
13145 made by addrs? fp? sum?
13147 AIX TOC 2 crt0 as Y option option
13148 AIX minimal TOC 30 prolog gcc Y Y option
13149 SVR4 SDATA 13 crt0 gcc N Y N
13150 SVR4 pic 30 prolog ld Y not yet N
13151 SVR4 PIC 30 prolog gcc Y option option
13152 EABI TOC 30 prolog gcc Y option option
13156 /* Hash functions for the hash table. */
13159 rs6000_hash_constant (rtx k)
13161 enum rtx_code code = GET_CODE (k);
13162 enum machine_mode mode = GET_MODE (k);
13163 unsigned result = (code << 3) ^ mode;
13164 const char *format;
13167 format = GET_RTX_FORMAT (code);
13168 flen = strlen (format);
13174 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13177 if (mode != VOIDmode)
13178 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13190 for (; fidx < flen; fidx++)
13191 switch (format[fidx])
13196 const char *str = XSTR (k, fidx);
13197 len = strlen (str);
13198 result = result * 613 + len;
13199 for (i = 0; i < len; i++)
13200 result = result * 613 + (unsigned) str[i];
13205 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13209 result = result * 613 + (unsigned) XINT (k, fidx);
13212 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13213 result = result * 613 + (unsigned) XWINT (k, fidx);
13217 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13218 result = result * 613 + (unsigned) (XWINT (k, fidx)
13232 toc_hash_function (const void *hash_entry)
13234 const struct toc_hash_struct *thc =
13235 (const struct toc_hash_struct *) hash_entry;
13236 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13239 /* Compare H1 and H2 for equivalence. */
13242 toc_hash_eq (const void *h1, const void *h2)
13244 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13245 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13247 if (((const struct toc_hash_struct *) h1)->key_mode
13248 != ((const struct toc_hash_struct *) h2)->key_mode)
13251 return rtx_equal_p (r1, r2);
13254 /* These are the names given by the C++ front-end to vtables, and
13255 vtable-like objects. Ideally, this logic should not be here;
13256 instead, there should be some programmatic way of inquiring as
13257 to whether or not an object is a vtable. */
13259 #define VTABLE_NAME_P(NAME) \
13260 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13261 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13262 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13263 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13266 rs6000_output_symbol_ref (FILE *file, rtx x)
13268 /* Currently C++ toc references to vtables can be emitted before it
13269 is decided whether the vtable is public or private. If this is
13270 the case, then the linker will eventually complain that there is
13271 a reference to an unknown section. Thus, for vtables only,
13272 we emit the TOC reference to reference the symbol and not the
13274 const char *name = XSTR (x, 0);
13276 if (VTABLE_NAME_P (name))
13278 RS6000_OUTPUT_BASENAME (file, name);
13281 assemble_name (file, name);
13284 /* Output a TOC entry. We derive the entry name from what is being
13288 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13291 const char *name = buf;
13292 const char *real_name;
13299 /* When the linker won't eliminate them, don't output duplicate
13300 TOC entries (this happens on AIX if there is any kind of TOC,
13301 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13303 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13305 struct toc_hash_struct *h;
13308 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13309 time because GGC is not initialized at that point. */
13310 if (toc_hash_table == NULL)
13311 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13312 toc_hash_eq, NULL);
13314 h = ggc_alloc (sizeof (*h));
13316 h->key_mode = mode;
13317 h->labelno = labelno;
13319 found = htab_find_slot (toc_hash_table, h, 1);
13320 if (*found == NULL)
13322 else /* This is indeed a duplicate.
13323 Set this label equal to that label. */
13325 fputs ("\t.set ", file);
13326 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13327 fprintf (file, "%d,", labelno);
13328 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13329 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13335 /* If we're going to put a double constant in the TOC, make sure it's
13336 aligned properly when strict alignment is on. */
13337 if (GET_CODE (x) == CONST_DOUBLE
13338 && STRICT_ALIGNMENT
13339 && GET_MODE_BITSIZE (mode) >= 64
13340 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13341 ASM_OUTPUT_ALIGN (file, 3);
13344 (*targetm.asm_out.internal_label) (file, "LC", labelno);
13346 /* Handle FP constants specially. Note that if we have a minimal
13347 TOC, things we put here aren't actually in the TOC, so we can allow
13349 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13351 REAL_VALUE_TYPE rv;
13354 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13355 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13359 if (TARGET_MINIMAL_TOC)
13360 fputs (DOUBLE_INT_ASM_OP, file);
13362 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13363 k[0] & 0xffffffff, k[1] & 0xffffffff,
13364 k[2] & 0xffffffff, k[3] & 0xffffffff);
13365 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13366 k[0] & 0xffffffff, k[1] & 0xffffffff,
13367 k[2] & 0xffffffff, k[3] & 0xffffffff);
13372 if (TARGET_MINIMAL_TOC)
13373 fputs ("\t.long ", file);
13375 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13376 k[0] & 0xffffffff, k[1] & 0xffffffff,
13377 k[2] & 0xffffffff, k[3] & 0xffffffff);
13378 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13379 k[0] & 0xffffffff, k[1] & 0xffffffff,
13380 k[2] & 0xffffffff, k[3] & 0xffffffff);
13384 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13386 REAL_VALUE_TYPE rv;
13389 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13390 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13394 if (TARGET_MINIMAL_TOC)
13395 fputs (DOUBLE_INT_ASM_OP, file);
13397 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13398 k[0] & 0xffffffff, k[1] & 0xffffffff);
13399 fprintf (file, "0x%lx%08lx\n",
13400 k[0] & 0xffffffff, k[1] & 0xffffffff);
13405 if (TARGET_MINIMAL_TOC)
13406 fputs ("\t.long ", file);
13408 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13409 k[0] & 0xffffffff, k[1] & 0xffffffff);
13410 fprintf (file, "0x%lx,0x%lx\n",
13411 k[0] & 0xffffffff, k[1] & 0xffffffff);
13415 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13417 REAL_VALUE_TYPE rv;
13420 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13421 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13425 if (TARGET_MINIMAL_TOC)
13426 fputs (DOUBLE_INT_ASM_OP, file);
13428 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13429 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13434 if (TARGET_MINIMAL_TOC)
13435 fputs ("\t.long ", file);
13437 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13438 fprintf (file, "0x%lx\n", l & 0xffffffff);
13442 else if (GET_MODE (x) == VOIDmode
13443 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13445 unsigned HOST_WIDE_INT low;
13446 HOST_WIDE_INT high;
13448 if (GET_CODE (x) == CONST_DOUBLE)
13450 low = CONST_DOUBLE_LOW (x);
13451 high = CONST_DOUBLE_HIGH (x);
13454 #if HOST_BITS_PER_WIDE_INT == 32
13457 high = (low & 0x80000000) ? ~0 : 0;
13461 low = INTVAL (x) & 0xffffffff;
13462 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13466 /* TOC entries are always Pmode-sized, but since this
13467 is a bigendian machine then if we're putting smaller
13468 integer constants in the TOC we have to pad them.
13469 (This is still a win over putting the constants in
13470 a separate constant pool, because then we'd have
13471 to have both a TOC entry _and_ the actual constant.)
13473 For a 32-bit target, CONST_INT values are loaded and shifted
13474 entirely within `low' and can be stored in one TOC entry. */
13476 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13477 abort ();/* It would be easy to make this work, but it doesn't now. */
13479 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13481 #if HOST_BITS_PER_WIDE_INT == 32
13482 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13483 POINTER_SIZE, &low, &high, 0);
13486 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13487 high = (HOST_WIDE_INT) low >> 32;
13494 if (TARGET_MINIMAL_TOC)
13495 fputs (DOUBLE_INT_ASM_OP, file);
13497 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13498 (long) high & 0xffffffff, (long) low & 0xffffffff);
13499 fprintf (file, "0x%lx%08lx\n",
13500 (long) high & 0xffffffff, (long) low & 0xffffffff);
13505 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13507 if (TARGET_MINIMAL_TOC)
13508 fputs ("\t.long ", file);
13510 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13511 (long) high & 0xffffffff, (long) low & 0xffffffff);
13512 fprintf (file, "0x%lx,0x%lx\n",
13513 (long) high & 0xffffffff, (long) low & 0xffffffff);
13517 if (TARGET_MINIMAL_TOC)
13518 fputs ("\t.long ", file);
13520 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13521 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13527 if (GET_CODE (x) == CONST)
13529 if (GET_CODE (XEXP (x, 0)) != PLUS)
13532 base = XEXP (XEXP (x, 0), 0);
13533 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13536 if (GET_CODE (base) == SYMBOL_REF)
13537 name = XSTR (base, 0);
13538 else if (GET_CODE (base) == LABEL_REF)
13539 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13540 else if (GET_CODE (base) == CODE_LABEL)
13541 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13545 real_name = (*targetm.strip_name_encoding) (name);
13546 if (TARGET_MINIMAL_TOC)
13547 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13550 fprintf (file, "\t.tc %s", real_name);
13553 fprintf (file, ".N%d", - offset);
13555 fprintf (file, ".P%d", offset);
13557 fputs ("[TC],", file);
13560 /* Currently C++ toc references to vtables can be emitted before it
13561 is decided whether the vtable is public or private. If this is
13562 the case, then the linker will eventually complain that there is
13563 a TOC reference to an unknown section. Thus, for vtables only,
13564 we emit the TOC reference to reference the symbol and not the
13566 if (VTABLE_NAME_P (name))
13568 RS6000_OUTPUT_BASENAME (file, name);
13570 fprintf (file, "%d", offset);
13571 else if (offset > 0)
13572 fprintf (file, "+%d", offset);
13575 output_addr_const (file, x);
13579 /* Output an assembler pseudo-op to write an ASCII string of N characters
13580 starting at P to FILE.
13582 On the RS/6000, we have to do this using the .byte operation and
13583 write out special characters outside the quoted string.
13584 Also, the assembler is broken; very long strings are truncated,
13585 so we must artificially break them up early. */
13588 output_ascii (FILE *file, const char *p, int n)
13591 int i, count_string;
13592 const char *for_string = "\t.byte \"";
13593 const char *for_decimal = "\t.byte ";
13594 const char *to_close = NULL;
13597 for (i = 0; i < n; i++)
13600 if (c >= ' ' && c < 0177)
13603 fputs (for_string, file);
13606 /* Write two quotes to get one. */
13614 for_decimal = "\"\n\t.byte ";
13618 if (count_string >= 512)
13620 fputs (to_close, file);
13622 for_string = "\t.byte \"";
13623 for_decimal = "\t.byte ";
13631 fputs (for_decimal, file);
13632 fprintf (file, "%d", c);
13634 for_string = "\n\t.byte \"";
13635 for_decimal = ", ";
13641 /* Now close the string if we have written one. Then end the line. */
13643 fputs (to_close, file);
13646 /* Generate a unique section name for FILENAME for a section type
13647 represented by SECTION_DESC. Output goes into BUF.
13649 SECTION_DESC can be any string, as long as it is different for each
13650 possible section type.
13652 We name the section in the same manner as xlc. The name begins with an
13653 underscore followed by the filename (after stripping any leading directory
13654 names) with the last period replaced by the string SECTION_DESC. If
13655 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13659 rs6000_gen_section_name (char **buf, const char *filename,
13660 const char *section_desc)
13662 const char *q, *after_last_slash, *last_period = 0;
13666 after_last_slash = filename;
13667 for (q = filename; *q; q++)
13670 after_last_slash = q + 1;
13671 else if (*q == '.')
13675 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13676 *buf = (char *) xmalloc (len);
13681 for (q = after_last_slash; *q; q++)
13683 if (q == last_period)
13685 strcpy (p, section_desc);
13686 p += strlen (section_desc);
13690 else if (ISALNUM (*q))
13694 if (last_period == 0)
13695 strcpy (p, section_desc);
13700 /* Emit profile function. */
13703 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13705 if (TARGET_PROFILE_KERNEL)
13708 if (DEFAULT_ABI == ABI_AIX)
13710 #ifndef NO_PROFILE_COUNTERS
13711 # define NO_PROFILE_COUNTERS 0
13713 if (NO_PROFILE_COUNTERS)
13714 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13718 const char *label_name;
13721 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13722 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13723 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13725 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13729 else if (DEFAULT_ABI == ABI_DARWIN)
13731 const char *mcount_name = RS6000_MCOUNT;
13732 int caller_addr_regno = LINK_REGISTER_REGNUM;
13734 /* Be conservative and always set this, at least for now. */
13735 current_function_uses_pic_offset_table = 1;
13738 /* For PIC code, set up a stub and collect the caller's address
13739 from r0, which is where the prologue puts it. */
13740 if (MACHOPIC_INDIRECT)
13742 mcount_name = machopic_stub_name (mcount_name);
13743 if (current_function_uses_pic_offset_table)
13744 caller_addr_regno = 0;
13747 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13749 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13753 /* Write function profiler code. */
13756 output_function_profiler (FILE *file, int labelno)
13761 switch (DEFAULT_ABI)
13770 warning ("no profiling of 64-bit code for this ABI");
13773 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13774 fprintf (file, "\tmflr %s\n", reg_names[0]);
13777 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13778 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13779 reg_names[0], save_lr, reg_names[1]);
13780 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13781 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13782 assemble_name (file, buf);
13783 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13785 else if (flag_pic > 1)
13787 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13788 reg_names[0], save_lr, reg_names[1]);
13789 /* Now, we need to get the address of the label. */
13790 fputs ("\tbl 1f\n\t.long ", file);
13791 assemble_name (file, buf);
13792 fputs ("-.\n1:", file);
13793 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13794 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13795 reg_names[0], reg_names[11]);
13796 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13797 reg_names[0], reg_names[0], reg_names[11]);
13801 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13802 assemble_name (file, buf);
13803 fputs ("@ha\n", file);
13804 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13805 reg_names[0], save_lr, reg_names[1]);
13806 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13807 assemble_name (file, buf);
13808 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13811 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13812 fprintf (file, "\tbl %s%s\n",
13813 RS6000_MCOUNT, flag_pic ? "@plt" : "");
13818 if (!TARGET_PROFILE_KERNEL)
13820 /* Don't do anything, done in output_profile_hook (). */
13827 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13828 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13830 if (current_function_needs_context)
13832 asm_fprintf (file, "\tstd %s,24(%s)\n",
13833 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13834 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13835 asm_fprintf (file, "\tld %s,24(%s)\n",
13836 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13839 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13847 rs6000_use_dfa_pipeline_interface (void)
13852 /* Power4 load update and store update instructions are cracked into a
13853 load or store and an integer insn which are executed in the same cycle.
13854 Branches have their own dispatch slot which does not count against the
13855 GCC issue rate, but it changes the program flow so there are no other
13856 instructions to issue in this cycle. */
13859 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13860 int verbose ATTRIBUTE_UNUSED,
13861 rtx insn, int more)
13863 if (GET_CODE (PATTERN (insn)) == USE
13864 || GET_CODE (PATTERN (insn)) == CLOBBER)
13867 if (rs6000_cpu == PROCESSOR_POWER4)
13869 if (is_microcoded_insn (insn))
13871 else if (is_cracked_insn (insn))
13872 return more > 2 ? more - 2 : 0;
13878 /* Adjust the cost of a scheduling dependency. Return the new cost of
13879 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13882 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13885 if (! recog_memoized (insn))
13888 if (REG_NOTE_KIND (link) != 0)
13891 if (REG_NOTE_KIND (link) == 0)
13893 /* Data dependency; DEP_INSN writes a register that INSN reads
13894 some cycles later. */
13895 switch (get_attr_type (insn))
13898 /* Tell the first scheduling pass about the latency between
13899 a mtctr and bctr (and mtlr and br/blr). The first
13900 scheduling pass will not know about this latency since
13901 the mtctr instruction, which has the latency associated
13902 to it, will be generated by reload. */
13903 return TARGET_POWER ? 5 : 4;
13905 /* Leave some extra cycles between a compare and its
13906 dependent branch, to inhibit expensive mispredicts. */
13907 if ((rs6000_cpu_attr == CPU_PPC603
13908 || rs6000_cpu_attr == CPU_PPC604
13909 || rs6000_cpu_attr == CPU_PPC604E
13910 || rs6000_cpu_attr == CPU_PPC620
13911 || rs6000_cpu_attr == CPU_PPC630
13912 || rs6000_cpu_attr == CPU_PPC750
13913 || rs6000_cpu_attr == CPU_PPC7400
13914 || rs6000_cpu_attr == CPU_PPC7450
13915 || rs6000_cpu_attr == CPU_POWER4)
13916 && recog_memoized (dep_insn)
13917 && (INSN_CODE (dep_insn) >= 0)
13918 && (get_attr_type (dep_insn) == TYPE_CMP
13919 || get_attr_type (dep_insn) == TYPE_COMPARE
13920 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13921 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13922 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13923 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13924 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13925 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13930 /* Fall out to return default cost. */
13936 /* The function returns a true if INSN is microcoded.
13937 Return false otherwise. */
13940 is_microcoded_insn (rtx insn)
13942 if (!insn || !INSN_P (insn)
13943 || GET_CODE (PATTERN (insn)) == USE
13944 || GET_CODE (PATTERN (insn)) == CLOBBER)
13947 if (rs6000_cpu == PROCESSOR_POWER4)
13949 enum attr_type type = get_attr_type (insn);
13950 if (type == TYPE_LOAD_EXT_U
13951 || type == TYPE_LOAD_EXT_UX
13952 || type == TYPE_LOAD_UX
13953 || type == TYPE_STORE_UX
13954 || type == TYPE_MFCR)
13961 /* The function returns a nonzero value if INSN can be scheduled only
13962 as the first insn in a dispatch group ("dispatch-slot restricted").
13963 In this case, the returned value indicates how many dispatch slots
13964 the insn occupies (at the beginning of the group).
13965 Return 0 otherwise. */
13968 is_dispatch_slot_restricted (rtx insn)
13970 enum attr_type type;
13972 if (rs6000_cpu != PROCESSOR_POWER4)
13976 || insn == NULL_RTX
13977 || GET_CODE (insn) == NOTE
13978 || GET_CODE (PATTERN (insn)) == USE
13979 || GET_CODE (PATTERN (insn)) == CLOBBER)
13982 type = get_attr_type (insn);
13988 case TYPE_DELAYED_CR:
13989 case TYPE_CR_LOGICAL:
14001 /* The function returns true if INSN is cracked into 2 instructions
14002 by the processor (and therefore occupies 2 issue slots). */
14005 is_cracked_insn (rtx insn)
14007 if (!insn || !INSN_P (insn)
14008 || GET_CODE (PATTERN (insn)) == USE
14009 || GET_CODE (PATTERN (insn)) == CLOBBER)
14012 if (rs6000_cpu == PROCESSOR_POWER4)
14014 enum attr_type type = get_attr_type (insn);
14015 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14016 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14017 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14018 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14019 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14020 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14021 || type == TYPE_IDIV || type == TYPE_LDIV
14022 || type == TYPE_INSERT_WORD)
14029 /* The function returns true if INSN can be issued only from
14030 the branch slot. */
14033 is_branch_slot_insn (rtx insn)
14035 if (!insn || !INSN_P (insn)
14036 || GET_CODE (PATTERN (insn)) == USE
14037 || GET_CODE (PATTERN (insn)) == CLOBBER)
14040 if (rs6000_cpu == PROCESSOR_POWER4)
14042 enum attr_type type = get_attr_type (insn);
14043 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14051 /* A C statement (sans semicolon) to update the integer scheduling
14052 priority INSN_PRIORITY (INSN). Increase the priority to execute the
14053 INSN earlier, reduce the priority to execute INSN later. Do not
14054 define this macro if you do not need to adjust the scheduling
14055 priorities of insns. */
14058 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14060 /* On machines (like the 750) which have asymmetric integer units,
14061 where one integer unit can do multiply and divides and the other
14062 can't, reduce the priority of multiply/divide so it is scheduled
14063 before other integer operations. */
14066 if (! INSN_P (insn))
14069 if (GET_CODE (PATTERN (insn)) == USE)
14072 switch (rs6000_cpu_attr) {
14074 switch (get_attr_type (insn))
14081 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14082 priority, priority);
14083 if (priority >= 0 && priority < 0x01000000)
14090 if (is_dispatch_slot_restricted (insn)
14091 && reload_completed
14092 && current_sched_info->sched_max_insns_priority
14093 && rs6000_sched_restricted_insns_priority)
14096 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
14097 if (rs6000_sched_restricted_insns_priority == 1)
14098 /* Attach highest priority to insn. This means that in
14099 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14100 precede 'priority' (critical path) considerations. */
14101 return current_sched_info->sched_max_insns_priority;
14102 else if (rs6000_sched_restricted_insns_priority == 2)
14103 /* Increase priority of insn by a minimal amount. This means that in
14104 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14105 precede dispatch-slot restriction considerations. */
14106 return (priority + 1);
14112 /* Return how many instructions the machine can issue per cycle. */
14115 rs6000_issue_rate (void)
14117 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
14118 if (!reload_completed)
14121 switch (rs6000_cpu_attr) {
14122 case CPU_RIOS1: /* ? */
14124 case CPU_PPC601: /* ? */
14146 /* Return how many instructions to look ahead for better insn
14150 rs6000_use_sched_lookahead (void)
14152 if (rs6000_cpu_attr == CPU_PPC8540)
14157 /* Determine is PAT refers to memory. */
14160 is_mem_ref (rtx pat)
14166 if (GET_CODE (pat) == MEM)
14169 /* Recursively process the pattern. */
14170 fmt = GET_RTX_FORMAT (GET_CODE (pat));
14172 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14175 ret |= is_mem_ref (XEXP (pat, i));
14176 else if (fmt[i] == 'E')
14177 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14178 ret |= is_mem_ref (XVECEXP (pat, i, j));
14184 /* Determine if PAT is a PATTERN of a load insn. */
14187 is_load_insn1 (rtx pat)
14189 if (!pat || pat == NULL_RTX)
14192 if (GET_CODE (pat) == SET)
14193 return is_mem_ref (SET_SRC (pat));
14195 if (GET_CODE (pat) == PARALLEL)
14199 for (i = 0; i < XVECLEN (pat, 0); i++)
14200 if (is_load_insn1 (XVECEXP (pat, 0, i)))
14207 /* Determine if INSN loads from memory. */
14210 is_load_insn (rtx insn)
14212 if (!insn || !INSN_P (insn))
14215 if (GET_CODE (insn) == CALL_INSN)
14218 return is_load_insn1 (PATTERN (insn));
14221 /* Determine if PAT is a PATTERN of a store insn. */
14224 is_store_insn1 (rtx pat)
14226 if (!pat || pat == NULL_RTX)
14229 if (GET_CODE (pat) == SET)
14230 return is_mem_ref (SET_DEST (pat));
14232 if (GET_CODE (pat) == PARALLEL)
14236 for (i = 0; i < XVECLEN (pat, 0); i++)
14237 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14244 /* Determine if INSN stores to memory. */
14247 is_store_insn (rtx insn)
14249 if (!insn || !INSN_P (insn))
14252 return is_store_insn1 (PATTERN (insn));
14255 /* Returns whether the dependence between INSN and NEXT is considered
14256 costly by the given target. */
14259 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14261 /* If the flag is not enbled - no dependence is considered costly;
14262 allow all dependent insns in the same group.
14263 This is the most aggressive option. */
14264 if (rs6000_sched_costly_dep == no_dep_costly)
14267 /* If the flag is set to 1 - a dependence is always considered costly;
14268 do not allow dependent instructions in the same group.
14269 This is the most conservative option. */
14270 if (rs6000_sched_costly_dep == all_deps_costly)
14273 if (rs6000_sched_costly_dep == store_to_load_dep_costly
14274 && is_load_insn (next)
14275 && is_store_insn (insn))
14276 /* Prevent load after store in the same group. */
14279 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14280 && is_load_insn (next)
14281 && is_store_insn (insn)
14282 && (!link || (int) REG_NOTE_KIND (link) == 0))
14283 /* Prevent load after store in the same group if it is a true dependence. */
14286 /* The flag is set to X; dependences with latency >= X are considered costly,
14287 and will not be scheduled in the same group. */
14288 if (rs6000_sched_costly_dep <= max_dep_latency
14289 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14295 /* Return the next insn after INSN that is found before TAIL is reached,
14296 skipping any "non-active" insns - insns that will not actually occupy
14297 an issue slot. Return NULL_RTX if such an insn is not found. */
14300 get_next_active_insn (rtx insn, rtx tail)
14304 if (!insn || insn == tail)
14307 next_insn = NEXT_INSN (insn);
14310 && next_insn != tail
14311 && (GET_CODE(next_insn) == NOTE
14312 || GET_CODE (PATTERN (next_insn)) == USE
14313 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14315 next_insn = NEXT_INSN (next_insn);
14318 if (!next_insn || next_insn == tail)
14324 /* Return whether the presence of INSN causes a dispatch group termination
14325 of group WHICH_GROUP.
14327 If WHICH_GROUP == current_group, this function will return true if INSN
14328 causes the termination of the current group (i.e, the dispatch group to
14329 which INSN belongs). This means that INSN will be the last insn in the
14330 group it belongs to.
14332 If WHICH_GROUP == previous_group, this function will return true if INSN
14333 causes the termination of the previous group (i.e, the dispatch group that
14334 precedes the group to which INSN belongs). This means that INSN will be
14335 the first insn in the group it belongs to). */
14338 insn_terminates_group_p (rtx insn, enum group_termination which_group)
14340 enum attr_type type;
14345 type = get_attr_type (insn);
14347 if (is_microcoded_insn (insn))
14350 if (which_group == current_group)
14352 if (is_branch_slot_insn (insn))
14356 else if (which_group == previous_group)
14358 if (is_dispatch_slot_restricted (insn))
14366 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14367 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14370 is_costly_group (rtx *group_insns, rtx next_insn)
14375 int issue_rate = rs6000_issue_rate ();
14377 for (i = 0; i < issue_rate; i++)
14379 rtx insn = group_insns[i];
14382 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14384 rtx next = XEXP (link, 0);
14385 if (next == next_insn)
14387 cost = insn_cost (insn, link, next_insn);
14388 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14397 /* Utility of the function redefine_groups.
14398 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14399 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14400 to keep it "far" (in a separate group) from GROUP_INSNS, following
14401 one of the following schemes, depending on the value of the flag
14402 -minsert_sched_nops = X:
14403 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14404 in order to force NEXT_INSN into a separate group.
14405 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14406 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14407 insertion (has a group just ended, how many vacant issue slots remain in the
14408 last group, and how many dispatch groups were encountered so far). */
14411 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14412 bool *group_end, int can_issue_more, int *group_count)
14416 int issue_rate = rs6000_issue_rate ();
14417 bool end = *group_end;
14420 if (next_insn == NULL_RTX)
14421 return can_issue_more;
14423 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14424 return can_issue_more;
14426 force = is_costly_group (group_insns, next_insn);
14428 return can_issue_more;
14430 if (sched_verbose > 6)
14431 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14432 *group_count ,can_issue_more);
14434 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14437 can_issue_more = 0;
14439 /* Since only a branch can be issued in the last issue_slot, it is
14440 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14441 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14442 in this case the last nop will start a new group and the branch will be
14443 forced to the new group. */
14444 if (can_issue_more && !is_branch_slot_insn (next_insn))
14447 while (can_issue_more > 0)
14450 emit_insn_before (nop, next_insn);
14458 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14460 int n_nops = rs6000_sched_insert_nops;
14462 /* Nops can't be issued from the branch slot, so the effective
14463 issue_rate for nops is 'issue_rate - 1'. */
14464 if (can_issue_more == 0)
14465 can_issue_more = issue_rate;
14467 if (can_issue_more == 0)
14469 can_issue_more = issue_rate - 1;
14472 for (i = 0; i < issue_rate; i++)
14474 group_insns[i] = 0;
14481 emit_insn_before (nop, next_insn);
14482 if (can_issue_more == issue_rate - 1) /* new group begins */
14485 if (can_issue_more == 0)
14487 can_issue_more = issue_rate - 1;
14490 for (i = 0; i < issue_rate; i++)
14492 group_insns[i] = 0;
14498 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14501 *group_end = /* Is next_insn going to start a new group? */
14503 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14504 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14505 || (can_issue_more < issue_rate &&
14506 insn_terminates_group_p (next_insn, previous_group)));
14507 if (*group_end && end)
14510 if (sched_verbose > 6)
14511 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14512 *group_count, can_issue_more);
14513 return can_issue_more;
14516 return can_issue_more;
14519 /* This function tries to synch the dispatch groups that the compiler "sees"
14520 with the dispatch groups that the processor dispatcher is expected to
14521 form in practice. It tries to achieve this synchronization by forcing the
14522 estimated processor grouping on the compiler (as opposed to the function
14523 'pad_goups' which tries to force the scheduler's grouping on the processor).
14525 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14526 examines the (estimated) dispatch groups that will be formed by the processor
14527 dispatcher. It marks these group boundaries to reflect the estimated
14528 processor grouping, overriding the grouping that the scheduler had marked.
14529 Depending on the value of the flag '-minsert-sched-nops' this function can
14530 force certain insns into separate groups or force a certain distance between
14531 them by inserting nops, for example, if there exists a "costly dependence"
14534 The function estimates the group boundaries that the processor will form as
14535 folllows: It keeps track of how many vacant issue slots are available after
14536 each insn. A subsequent insn will start a new group if one of the following
14538 - no more vacant issue slots remain in the current dispatch group.
14539 - only the last issue slot, which is the branch slot, is vacant, but the next
14540 insn is not a branch.
14541 - only the last 2 or less issue slots, including the branch slot, are vacant,
14542 which means that a cracked insn (which occupies two issue slots) can't be
14543 issued in this group.
14544 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14545 start a new group. */
14548 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14550 rtx insn, next_insn;
14552 int can_issue_more;
14555 int group_count = 0;
14559 issue_rate = rs6000_issue_rate ();
14560 group_insns = alloca (issue_rate * sizeof (rtx));
14561 for (i = 0; i < issue_rate; i++)
14563 group_insns[i] = 0;
14565 can_issue_more = issue_rate;
14567 insn = get_next_active_insn (prev_head_insn, tail);
14570 while (insn != NULL_RTX)
14572 slot = (issue_rate - can_issue_more);
14573 group_insns[slot] = insn;
14575 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14576 if (insn_terminates_group_p (insn, current_group))
14577 can_issue_more = 0;
14579 next_insn = get_next_active_insn (insn, tail);
14580 if (next_insn == NULL_RTX)
14581 return group_count + 1;
14583 group_end = /* Is next_insn going to start a new group? */
14584 (can_issue_more == 0
14585 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14586 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14587 || (can_issue_more < issue_rate &&
14588 insn_terminates_group_p (next_insn, previous_group)));
14590 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14591 next_insn, &group_end, can_issue_more, &group_count);
14596 can_issue_more = 0;
14597 for (i = 0; i < issue_rate; i++)
14599 group_insns[i] = 0;
14603 if (GET_MODE (next_insn) == TImode && can_issue_more)
14604 PUT_MODE(next_insn, VOIDmode);
14605 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14606 PUT_MODE (next_insn, TImode);
14609 if (can_issue_more == 0)
14610 can_issue_more = issue_rate;
14613 return group_count;
14616 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14617 dispatch group boundaries that the scheduler had marked. Pad with nops
14618 any dispatch groups which have vacant issue slots, in order to force the
14619 scheduler's grouping on the processor dispatcher. The function
14620 returns the number of dispatch groups found. */
14623 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14625 rtx insn, next_insn;
14628 int can_issue_more;
14630 int group_count = 0;
14632 /* Initialize issue_rate. */
14633 issue_rate = rs6000_issue_rate ();
14634 can_issue_more = issue_rate;
14636 insn = get_next_active_insn (prev_head_insn, tail);
14637 next_insn = get_next_active_insn (insn, tail);
14639 while (insn != NULL_RTX)
14642 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14644 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14646 if (next_insn == NULL_RTX)
14651 /* If the scheduler had marked group termination at this location
14652 (between insn and next_indn), and neither insn nor next_insn will
14653 force group termination, pad the group with nops to force group
14656 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14657 && !insn_terminates_group_p (insn, current_group)
14658 && !insn_terminates_group_p (next_insn, previous_group))
14660 if (!is_branch_slot_insn(next_insn))
14663 while (can_issue_more)
14666 emit_insn_before (nop, next_insn);
14671 can_issue_more = issue_rate;
14676 next_insn = get_next_active_insn (insn, tail);
14679 return group_count;
14682 /* The following function is called at the end of scheduling BB.
14683 After reload, it inserts nops at insn group bundling. */
14686 rs6000_sched_finish (FILE *dump, int sched_verbose)
14691 fprintf (dump, "=== Finishing schedule.\n");
14693 if (reload_completed && rs6000_cpu == PROCESSOR_POWER4)
14695 if (rs6000_sched_insert_nops == sched_finish_none)
14698 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14699 n_groups = pad_groups (dump, sched_verbose,
14700 current_sched_info->prev_head,
14701 current_sched_info->next_tail);
14703 n_groups = redefine_groups (dump, sched_verbose,
14704 current_sched_info->prev_head,
14705 current_sched_info->next_tail);
14707 if (sched_verbose >= 6)
14709 fprintf (dump, "ngroups = %d\n", n_groups);
14710 print_rtl (dump, current_sched_info->prev_head);
14711 fprintf (dump, "Done finish_sched\n");
14716 /* Length in units of the trampoline for entering a nested function. */
14719 rs6000_trampoline_size (void)
14723 switch (DEFAULT_ABI)
14729 ret = (TARGET_32BIT) ? 12 : 24;
14734 ret = (TARGET_32BIT) ? 40 : 48;
14741 /* Emit RTL insns to initialize the variable parts of a trampoline.
14742 FNADDR is an RTX for the address of the function's pure code.
14743 CXT is an RTX for the static chain value for the function. */
14746 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14748 enum machine_mode pmode = Pmode;
14749 int regsize = (TARGET_32BIT) ? 4 : 8;
14750 rtx ctx_reg = force_reg (pmode, cxt);
14752 switch (DEFAULT_ABI)
14757 /* Macros to shorten the code expansions below. */
14758 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14759 #define MEM_PLUS(addr,offset) \
14760 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14762 /* Under AIX, just build the 3 word function descriptor */
14765 rtx fn_reg = gen_reg_rtx (pmode);
14766 rtx toc_reg = gen_reg_rtx (pmode);
14767 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14768 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14769 emit_move_insn (MEM_DEREF (addr), fn_reg);
14770 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14771 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14775 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14778 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14779 FALSE, VOIDmode, 4,
14781 GEN_INT (rs6000_trampoline_size ()), SImode,
14791 /* Table of valid machine attributes. */
14793 const struct attribute_spec rs6000_attribute_table[] =
14795 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14796 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
14797 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14798 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14799 { NULL, 0, 0, false, false, false, NULL }
14802 /* Handle the "altivec" attribute. The attribute may have
14803 arguments as follows:
14805 __attribute__((altivec(vector__)))
14806 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
14807 __attribute__((altivec(bool__))) (always followed by 'unsigned')
14809 and may appear more than once (e.g., 'vector bool char') in a
14810 given declaration. */
14813 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
14814 int flags ATTRIBUTE_UNUSED,
14815 bool *no_add_attrs)
14817 tree type = *node, result = NULL_TREE;
14818 enum machine_mode mode;
14821 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
14822 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
14823 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
14826 while (POINTER_TYPE_P (type)
14827 || TREE_CODE (type) == FUNCTION_TYPE
14828 || TREE_CODE (type) == METHOD_TYPE
14829 || TREE_CODE (type) == ARRAY_TYPE)
14830 type = TREE_TYPE (type);
14832 mode = TYPE_MODE (type);
14834 if (rs6000_warn_altivec_long
14835 && (type == long_unsigned_type_node || type == long_integer_type_node))
14836 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
14838 switch (altivec_type)
14841 unsigned_p = TREE_UNSIGNED (type);
14845 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
14848 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
14851 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
14853 case SFmode: result = V4SF_type_node; break;
14854 /* If the user says 'vector int bool', we may be handed the 'bool'
14855 attribute _before_ the 'vector' attribute, and so select the proper
14856 type in the 'b' case below. */
14857 case V4SImode: case V8HImode: case V16QImode: result = type;
14864 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
14865 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
14866 case QImode: case V16QImode: result = bool_V16QI_type_node;
14873 case V8HImode: result = pixel_V8HI_type_node;
14879 *no_add_attrs = true; /* No need to hang on to the attribute. */
14882 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
14884 *node = reconstruct_complex_type (*node, result);
14889 /* Handle a "longcall" or "shortcall" attribute; arguments as in
14890 struct attribute_spec.handler. */
14893 rs6000_handle_longcall_attribute (tree *node, tree name,
14894 tree args ATTRIBUTE_UNUSED,
14895 int flags ATTRIBUTE_UNUSED,
14896 bool *no_add_attrs)
14898 if (TREE_CODE (*node) != FUNCTION_TYPE
14899 && TREE_CODE (*node) != FIELD_DECL
14900 && TREE_CODE (*node) != TYPE_DECL)
14902 warning ("`%s' attribute only applies to functions",
14903 IDENTIFIER_POINTER (name));
14904 *no_add_attrs = true;
14910 /* Set longcall attributes on all functions declared when
14911 rs6000_default_long_calls is true. */
14913 rs6000_set_default_type_attributes (tree type)
14915 if (rs6000_default_long_calls
14916 && (TREE_CODE (type) == FUNCTION_TYPE
14917 || TREE_CODE (type) == METHOD_TYPE))
14918 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
14920 TYPE_ATTRIBUTES (type));
14923 /* Return a reference suitable for calling a function with the
14924 longcall attribute. */
14927 rs6000_longcall_ref (rtx call_ref)
14929 const char *call_name;
14932 if (GET_CODE (call_ref) != SYMBOL_REF)
14935 /* System V adds '.' to the internal name, so skip them. */
14936 call_name = XSTR (call_ref, 0);
14937 if (*call_name == '.')
14939 while (*call_name == '.')
14942 node = get_identifier (call_name);
14943 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
14946 return force_reg (Pmode, call_ref);
14949 #ifdef USING_ELFOS_H
14951 /* A C statement or statements to switch to the appropriate section
14952 for output of RTX in mode MODE. You can assume that RTX is some
14953 kind of constant in RTL. The argument MODE is redundant except in
14954 the case of a `const_int' rtx. Select the section by calling
14955 `text_section' or one of the alternatives for other sections.
14957 Do not define this macro if you put all constants in the read-only
14961 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
14962 unsigned HOST_WIDE_INT align)
14964 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14967 default_elf_select_rtx_section (mode, x, align);
14970 /* A C statement or statements to switch to the appropriate
14971 section for output of DECL. DECL is either a `VAR_DECL' node
14972 or a constant of some sort. RELOC indicates whether forming
14973 the initial value of DECL requires link-time relocations. */
14976 rs6000_elf_select_section (tree decl, int reloc,
14977 unsigned HOST_WIDE_INT align)
14979 /* Pretend that we're always building for a shared library when
14980 ABI_AIX, because otherwise we end up with dynamic relocations
14981 in read-only sections. This happens for function pointers,
14982 references to vtables in typeinfo, and probably other cases. */
14983 default_elf_select_section_1 (decl, reloc, align,
14984 flag_pic || DEFAULT_ABI == ABI_AIX);
14987 /* A C statement to build up a unique section name, expressed as a
14988 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
14989 RELOC indicates whether the initial value of EXP requires
14990 link-time relocations. If you do not define this macro, GCC will use
14991 the symbol name prefixed by `.' as the section name. Note - this
14992 macro can now be called for uninitialized data items as well as
14993 initialized data and functions. */
14996 rs6000_elf_unique_section (tree decl, int reloc)
14998 /* As above, pretend that we're always building for a shared library
14999 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
15000 default_unique_section_1 (decl, reloc,
15001 flag_pic || DEFAULT_ABI == ABI_AIX);
15004 /* For a SYMBOL_REF, set generic flags and then perform some
15005 target-specific processing.
15007 When the AIX ABI is requested on a non-AIX system, replace the
15008 function name with the real name (with a leading .) rather than the
15009 function descriptor name. This saves a lot of overriding code to
15010 read the prefixes. */
15013 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15015 default_encode_section_info (decl, rtl, first);
15018 && TREE_CODE (decl) == FUNCTION_DECL
15020 && DEFAULT_ABI == ABI_AIX)
15022 rtx sym_ref = XEXP (rtl, 0);
15023 size_t len = strlen (XSTR (sym_ref, 0));
15024 char *str = alloca (len + 2);
15026 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15027 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15032 rs6000_elf_in_small_data_p (tree decl)
15034 if (rs6000_sdata == SDATA_NONE)
15037 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15039 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15040 if (strcmp (section, ".sdata") == 0
15041 || strcmp (section, ".sdata2") == 0
15042 || strcmp (section, ".sbss") == 0
15043 || strcmp (section, ".sbss2") == 0
15044 || strcmp (section, ".PPC.EMB.sdata0") == 0
15045 || strcmp (section, ".PPC.EMB.sbss0") == 0)
15050 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15053 && (unsigned HOST_WIDE_INT) size <= g_switch_value
15054 /* If it's not public, and we're not going to reference it there,
15055 there's no need to put it in the small data section. */
15056 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15063 #endif /* USING_ELFOS_H */
15066 /* Return a REG that occurs in ADDR with coefficient 1.
15067 ADDR can be effectively incremented by incrementing REG.
15069 r0 is special and we must not select it as an address
15070 register by this routine since our caller will try to
15071 increment the returned register via an "la" instruction. */
15074 find_addr_reg (rtx addr)
15076 while (GET_CODE (addr) == PLUS)
15078 if (GET_CODE (XEXP (addr, 0)) == REG
15079 && REGNO (XEXP (addr, 0)) != 0)
15080 addr = XEXP (addr, 0);
15081 else if (GET_CODE (XEXP (addr, 1)) == REG
15082 && REGNO (XEXP (addr, 1)) != 0)
15083 addr = XEXP (addr, 1);
15084 else if (CONSTANT_P (XEXP (addr, 0)))
15085 addr = XEXP (addr, 1);
15086 else if (CONSTANT_P (XEXP (addr, 1)))
15087 addr = XEXP (addr, 0);
15091 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15097 rs6000_fatal_bad_address (rtx op)
15099 fatal_insn ("bad address", op);
15105 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
15106 reference and a constant. */
15109 symbolic_operand (rtx op)
15111 switch (GET_CODE (op))
15118 return (GET_CODE (op) == SYMBOL_REF ||
15119 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15120 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15121 && GET_CODE (XEXP (op, 1)) == CONST_INT);
15130 static tree branch_island_list = 0;
15132 /* Remember to generate a branch island for far calls to the given
15136 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15138 tree branch_island = build_tree_list (function_name, label_name);
15139 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15140 TREE_CHAIN (branch_island) = branch_island_list;
15141 branch_island_list = branch_island;
15144 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15145 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15146 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15147 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15149 /* Generate far-jump branch islands for everything on the
15150 branch_island_list. Invoked immediately after the last instruction
15151 of the epilogue has been emitted; the branch-islands must be
15152 appended to, and contiguous with, the function body. Mach-O stubs
15153 are generated in machopic_output_stub(). */
15156 macho_branch_islands (void)
15159 tree branch_island;
15161 for (branch_island = branch_island_list;
15163 branch_island = TREE_CHAIN (branch_island))
15165 const char *label =
15166 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15168 darwin_strip_name_encoding (
15169 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15170 char name_buf[512];
15171 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
15172 if (name[0] == '*' || name[0] == '&')
15173 strcpy (name_buf, name+1);
15177 strcpy (name_buf+1, name);
15179 strcpy (tmp_buf, "\n");
15180 strcat (tmp_buf, label);
15181 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15182 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15183 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15184 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15185 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15188 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15189 strcat (tmp_buf, label);
15190 strcat (tmp_buf, "_pic\n");
15191 strcat (tmp_buf, label);
15192 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15194 strcat (tmp_buf, "\taddis r11,r11,ha16(");
15195 strcat (tmp_buf, name_buf);
15196 strcat (tmp_buf, " - ");
15197 strcat (tmp_buf, label);
15198 strcat (tmp_buf, "_pic)\n");
15200 strcat (tmp_buf, "\tmtlr r0\n");
15202 strcat (tmp_buf, "\taddi r12,r11,lo16(");
15203 strcat (tmp_buf, name_buf);
15204 strcat (tmp_buf, " - ");
15205 strcat (tmp_buf, label);
15206 strcat (tmp_buf, "_pic)\n");
15208 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15212 strcat (tmp_buf, ":\nlis r12,hi16(");
15213 strcat (tmp_buf, name_buf);
15214 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15215 strcat (tmp_buf, name_buf);
15216 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15218 output_asm_insn (tmp_buf, 0);
15219 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15220 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15221 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15222 BRANCH_ISLAND_LINE_NUMBER (branch_island));
15223 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15226 branch_island_list = 0;
15229 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
15230 already there or not. */
15233 no_previous_def (tree function_name)
15235 tree branch_island;
15236 for (branch_island = branch_island_list;
15238 branch_island = TREE_CHAIN (branch_island))
15239 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15244 /* GET_PREV_LABEL gets the label name from the previous definition of
15248 get_prev_label (tree function_name)
15250 tree branch_island;
15251 for (branch_island = branch_island_list;
15253 branch_island = TREE_CHAIN (branch_island))
15254 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15255 return BRANCH_ISLAND_LABEL_NAME (branch_island);
15259 /* INSN is either a function call or a millicode call. It may have an
15260 unconditional jump in its delay slot.
15262 CALL_DEST is the routine we are calling. */
15265 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15267 static char buf[256];
15268 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15269 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15272 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15274 if (no_previous_def (funname))
15276 int line_number = 0;
15277 rtx label_rtx = gen_label_rtx ();
15278 char *label_buf, temp_buf[256];
15279 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15280 CODE_LABEL_NUMBER (label_rtx));
15281 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15282 labelname = get_identifier (label_buf);
15283 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15285 line_number = NOTE_LINE_NUMBER (insn);
15286 add_compiler_branch_island (labelname, funname, line_number);
15289 labelname = get_prev_label (funname);
15291 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15292 instruction will reach 'foo', otherwise link as 'bl L42'".
15293 "L42" should be a 'branch island', that will do a far jump to
15294 'foo'. Branch islands are generated in
15295 macho_branch_islands(). */
15296 sprintf (buf, "jbsr %%z%d,%.246s",
15297 dest_operand_number, IDENTIFIER_POINTER (labelname));
15300 sprintf (buf, "bl %%z%d", dest_operand_number);
15304 #endif /* TARGET_MACHO */
15306 /* Generate PIC and indirect symbol stubs. */
15309 machopic_output_stub (FILE *file, const char *symb, const char *stub)
15311 unsigned int length;
15312 char *symbol_name, *lazy_ptr_name;
15313 char *local_label_0;
15314 static int label = 0;
15316 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
15317 symb = (*targetm.strip_name_encoding) (symb);
15320 length = strlen (symb);
15321 symbol_name = alloca (length + 32);
15322 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15324 lazy_ptr_name = alloca (length + 32);
15325 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15328 machopic_picsymbol_stub1_section ();
15330 machopic_symbol_stub1_section ();
15331 fprintf (file, "\t.align 2\n");
15333 fprintf (file, "%s:\n", stub);
15334 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15339 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15340 sprintf (local_label_0, "\"L%011d$spb\"", label);
15342 fprintf (file, "\tmflr r0\n");
15343 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15344 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15345 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15346 lazy_ptr_name, local_label_0);
15347 fprintf (file, "\tmtlr r0\n");
15348 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15349 lazy_ptr_name, local_label_0);
15350 fprintf (file, "\tmtctr r12\n");
15351 fprintf (file, "\tbctr\n");
15355 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15356 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15357 fprintf (file, "\tmtctr r12\n");
15358 fprintf (file, "\tbctr\n");
15361 machopic_lazy_symbol_ptr_section ();
15362 fprintf (file, "%s:\n", lazy_ptr_name);
15363 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15364 fprintf (file, "\t.long dyld_stub_binding_helper\n");
15367 /* Legitimize PIC addresses. If the address is already
15368 position-independent, we return ORIG. Newly generated
15369 position-independent addresses go into a reg. This is REG if non
15370 zero, otherwise we allocate register(s) as necessary. */
15372 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15375 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15380 if (reg == NULL && ! reload_in_progress && ! reload_completed)
15381 reg = gen_reg_rtx (Pmode);
15383 if (GET_CODE (orig) == CONST)
15385 if (GET_CODE (XEXP (orig, 0)) == PLUS
15386 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15389 if (GET_CODE (XEXP (orig, 0)) == PLUS)
15391 /* Use a different reg for the intermediate value, as
15392 it will be marked UNCHANGING. */
15393 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15396 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15399 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15405 if (GET_CODE (offset) == CONST_INT)
15407 if (SMALL_INT (offset))
15408 return plus_constant (base, INTVAL (offset));
15409 else if (! reload_in_progress && ! reload_completed)
15410 offset = force_reg (Pmode, offset);
15413 rtx mem = force_const_mem (Pmode, orig);
15414 return machopic_legitimize_pic_address (mem, Pmode, reg);
15417 return gen_rtx_PLUS (Pmode, base, offset);
15420 /* Fall back on generic machopic code. */
15421 return machopic_legitimize_pic_address (orig, mode, reg);
15424 /* This is just a placeholder to make linking work without having to
15425 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15426 ever needed for Darwin (not too likely!) this would have to get a
15427 real definition. */
15434 #endif /* TARGET_MACHO */
15437 static unsigned int
15438 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15440 return default_section_type_flags_1 (decl, name, reloc,
15441 flag_pic || DEFAULT_ABI == ABI_AIX);
15444 /* Record an element in the table of global constructors. SYMBOL is
15445 a SYMBOL_REF of the function to be called; PRIORITY is a number
15446 between 0 and MAX_INIT_PRIORITY.
15448 This differs from default_named_section_asm_out_constructor in
15449 that we have special handling for -mrelocatable. */
15452 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15454 const char *section = ".ctors";
15457 if (priority != DEFAULT_INIT_PRIORITY)
15459 sprintf (buf, ".ctors.%.5u",
15460 /* Invert the numbering so the linker puts us in the proper
15461 order; constructors are run from right to left, and the
15462 linker sorts in increasing order. */
15463 MAX_INIT_PRIORITY - priority);
15467 named_section_flags (section, SECTION_WRITE);
15468 assemble_align (POINTER_SIZE);
15470 if (TARGET_RELOCATABLE)
15472 fputs ("\t.long (", asm_out_file);
15473 output_addr_const (asm_out_file, symbol);
15474 fputs (")@fixup\n", asm_out_file);
15477 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15481 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15483 const char *section = ".dtors";
15486 if (priority != DEFAULT_INIT_PRIORITY)
15488 sprintf (buf, ".dtors.%.5u",
15489 /* Invert the numbering so the linker puts us in the proper
15490 order; constructors are run from right to left, and the
15491 linker sorts in increasing order. */
15492 MAX_INIT_PRIORITY - priority);
15496 named_section_flags (section, SECTION_WRITE);
15497 assemble_align (POINTER_SIZE);
15499 if (TARGET_RELOCATABLE)
15501 fputs ("\t.long (", asm_out_file);
15502 output_addr_const (asm_out_file, symbol);
15503 fputs (")@fixup\n", asm_out_file);
15506 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15510 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15514 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15515 ASM_OUTPUT_LABEL (file, name);
15516 fputs (DOUBLE_INT_ASM_OP, file);
15518 assemble_name (file, name);
15519 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15520 assemble_name (file, name);
15521 fputs (",24\n\t.type\t.", file);
15522 assemble_name (file, name);
15523 fputs (",@function\n", file);
15524 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15526 fputs ("\t.globl\t.", file);
15527 assemble_name (file, name);
15530 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15532 ASM_OUTPUT_LABEL (file, name);
15536 if (TARGET_RELOCATABLE
15537 && (get_pool_size () != 0 || current_function_profile)
15542 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15544 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15545 fprintf (file, "\t.long ");
15546 assemble_name (file, buf);
15548 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15549 assemble_name (file, buf);
15553 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15554 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15556 if (DEFAULT_ABI == ABI_AIX)
15558 const char *desc_name, *orig_name;
15560 orig_name = (*targetm.strip_name_encoding) (name);
15561 desc_name = orig_name;
15562 while (*desc_name == '.')
15565 if (TREE_PUBLIC (decl))
15566 fprintf (file, "\t.globl %s\n", desc_name);
15568 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15569 fprintf (file, "%s:\n", desc_name);
15570 fprintf (file, "\t.long %s\n", orig_name);
15571 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15572 if (DEFAULT_ABI == ABI_AIX)
15573 fputs ("\t.long 0\n", file);
15574 fprintf (file, "\t.previous\n");
15576 ASM_OUTPUT_LABEL (file, name);
15582 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15584 fputs (GLOBAL_ASM_OP, stream);
15585 RS6000_OUTPUT_BASENAME (stream, name);
15586 putc ('\n', stream);
15590 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15593 static const char * const suffix[3] = { "PR", "RO", "RW" };
15595 if (flags & SECTION_CODE)
15597 else if (flags & SECTION_WRITE)
15602 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15603 (flags & SECTION_CODE) ? "." : "",
15604 name, suffix[smclass], flags & SECTION_ENTSIZE);
15608 rs6000_xcoff_select_section (tree decl, int reloc,
15609 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15611 if (decl_readonly_section_1 (decl, reloc, 1))
15613 if (TREE_PUBLIC (decl))
15614 read_only_data_section ();
15616 read_only_private_data_section ();
15620 if (TREE_PUBLIC (decl))
15623 private_data_section ();
15628 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15632 /* Use select_section for private and uninitialized data. */
15633 if (!TREE_PUBLIC (decl)
15634 || DECL_COMMON (decl)
15635 || DECL_INITIAL (decl) == NULL_TREE
15636 || DECL_INITIAL (decl) == error_mark_node
15637 || (flag_zero_initialized_in_bss
15638 && initializer_zerop (DECL_INITIAL (decl))))
15641 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15642 name = (*targetm.strip_name_encoding) (name);
15643 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15646 /* Select section for constant in constant pool.
15648 On RS/6000, all constants are in the private read-only data area.
15649 However, if this is being placed in the TOC it must be output as a
15653 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15654 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15656 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15659 read_only_private_data_section ();
15662 /* Remove any trailing [DS] or the like from the symbol name. */
15664 static const char *
15665 rs6000_xcoff_strip_name_encoding (const char *name)
15670 len = strlen (name);
15671 if (name[len - 1] == ']')
15672 return ggc_alloc_string (name, len - 4);
15677 /* Section attributes. AIX is always PIC. */
15679 static unsigned int
15680 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15682 unsigned int align;
15683 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15685 /* Align to at least UNIT size. */
15686 if (flags & SECTION_CODE)
15687 align = MIN_UNITS_PER_WORD;
15689 /* Increase alignment of large objects if not already stricter. */
15690 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15691 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15692 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15694 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15697 /* Output at beginning of assembler file.
15699 Initialize the section names for the RS/6000 at this point.
15701 Specify filename, including full path, to assembler.
15703 We want to go into the TOC section so at least one .toc will be emitted.
15704 Also, in order to output proper .bs/.es pairs, we need at least one static
15705 [RW] section emitted.
15707 Finally, declare mcount when profiling to make the assembler happy. */
15710 rs6000_xcoff_file_start (void)
15712 rs6000_gen_section_name (&xcoff_bss_section_name,
15713 main_input_filename, ".bss_");
15714 rs6000_gen_section_name (&xcoff_private_data_section_name,
15715 main_input_filename, ".rw_");
15716 rs6000_gen_section_name (&xcoff_read_only_section_name,
15717 main_input_filename, ".ro_");
15719 fputs ("\t.file\t", asm_out_file);
15720 output_quoted_string (asm_out_file, main_input_filename);
15721 fputc ('\n', asm_out_file);
15723 if (write_symbols != NO_DEBUG)
15724 private_data_section ();
15727 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15728 rs6000_file_start ();
15731 /* Output at end of assembler file.
15732 On the RS/6000, referencing data should automatically pull in text. */
15735 rs6000_xcoff_file_end (void)
15738 fputs ("_section_.text:\n", asm_out_file);
15740 fputs (TARGET_32BIT
15741 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15744 #endif /* TARGET_XCOFF */
15747 /* Cross-module name binding. Darwin does not support overriding
15748 functions at dynamic-link time. */
15751 rs6000_binds_local_p (tree decl)
15753 return default_binds_local_p_1 (decl, 0);
15757 /* Compute a (partial) cost for rtx X. Return true if the complete
15758 cost has been computed, and false if subexpressions should be
15759 scanned. In either case, *TOTAL contains the cost result. */
15762 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15767 /* On the RS/6000, if it is valid in the insn, it is free.
15768 So this always returns 0. */
15779 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15780 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15781 + 0x8000) >= 0x10000)
15782 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15783 ? COSTS_N_INSNS (2)
15784 : COSTS_N_INSNS (1));
15790 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15791 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15792 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15793 ? COSTS_N_INSNS (2)
15794 : COSTS_N_INSNS (1));
15800 *total = COSTS_N_INSNS (2);
15803 switch (rs6000_cpu)
15805 case PROCESSOR_RIOS1:
15806 case PROCESSOR_PPC405:
15807 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15808 ? COSTS_N_INSNS (5)
15809 : (INTVAL (XEXP (x, 1)) >= -256
15810 && INTVAL (XEXP (x, 1)) <= 255)
15811 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15814 case PROCESSOR_PPC440:
15815 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15816 ? COSTS_N_INSNS (3)
15817 : COSTS_N_INSNS (2));
15820 case PROCESSOR_RS64A:
15821 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15822 ? GET_MODE (XEXP (x, 1)) != DImode
15823 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15824 : (INTVAL (XEXP (x, 1)) >= -256
15825 && INTVAL (XEXP (x, 1)) <= 255)
15826 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15829 case PROCESSOR_RIOS2:
15830 case PROCESSOR_MPCCORE:
15831 case PROCESSOR_PPC604e:
15832 *total = COSTS_N_INSNS (2);
15835 case PROCESSOR_PPC601:
15836 *total = COSTS_N_INSNS (5);
15839 case PROCESSOR_PPC603:
15840 case PROCESSOR_PPC7400:
15841 case PROCESSOR_PPC750:
15842 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15843 ? COSTS_N_INSNS (5)
15844 : (INTVAL (XEXP (x, 1)) >= -256
15845 && INTVAL (XEXP (x, 1)) <= 255)
15846 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15849 case PROCESSOR_PPC7450:
15850 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15851 ? COSTS_N_INSNS (4)
15852 : COSTS_N_INSNS (3));
15855 case PROCESSOR_PPC403:
15856 case PROCESSOR_PPC604:
15857 case PROCESSOR_PPC8540:
15858 *total = COSTS_N_INSNS (4);
15861 case PROCESSOR_PPC620:
15862 case PROCESSOR_PPC630:
15863 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15864 ? GET_MODE (XEXP (x, 1)) != DImode
15865 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15866 : (INTVAL (XEXP (x, 1)) >= -256
15867 && INTVAL (XEXP (x, 1)) <= 255)
15868 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15871 case PROCESSOR_POWER4:
15872 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15873 ? GET_MODE (XEXP (x, 1)) != DImode
15874 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15875 : COSTS_N_INSNS (2));
15884 if (GET_CODE (XEXP (x, 1)) == CONST_INT
15885 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
15887 *total = COSTS_N_INSNS (2);
15894 switch (rs6000_cpu)
15896 case PROCESSOR_RIOS1:
15897 *total = COSTS_N_INSNS (19);
15900 case PROCESSOR_RIOS2:
15901 *total = COSTS_N_INSNS (13);
15904 case PROCESSOR_RS64A:
15905 *total = (GET_MODE (XEXP (x, 1)) != DImode
15906 ? COSTS_N_INSNS (65)
15907 : COSTS_N_INSNS (67));
15910 case PROCESSOR_MPCCORE:
15911 *total = COSTS_N_INSNS (6);
15914 case PROCESSOR_PPC403:
15915 *total = COSTS_N_INSNS (33);
15918 case PROCESSOR_PPC405:
15919 *total = COSTS_N_INSNS (35);
15922 case PROCESSOR_PPC440:
15923 *total = COSTS_N_INSNS (34);
15926 case PROCESSOR_PPC601:
15927 *total = COSTS_N_INSNS (36);
15930 case PROCESSOR_PPC603:
15931 *total = COSTS_N_INSNS (37);
15934 case PROCESSOR_PPC604:
15935 case PROCESSOR_PPC604e:
15936 *total = COSTS_N_INSNS (20);
15939 case PROCESSOR_PPC620:
15940 case PROCESSOR_PPC630:
15941 *total = (GET_MODE (XEXP (x, 1)) != DImode
15942 ? COSTS_N_INSNS (21)
15943 : COSTS_N_INSNS (37));
15946 case PROCESSOR_PPC750:
15947 case PROCESSOR_PPC8540:
15948 case PROCESSOR_PPC7400:
15949 *total = COSTS_N_INSNS (19);
15952 case PROCESSOR_PPC7450:
15953 *total = COSTS_N_INSNS (23);
15956 case PROCESSOR_POWER4:
15957 *total = (GET_MODE (XEXP (x, 1)) != DImode
15958 ? COSTS_N_INSNS (18)
15959 : COSTS_N_INSNS (34));
15967 *total = COSTS_N_INSNS (4);
15971 /* MEM should be slightly more expensive than (plus (reg) (const)). */
15980 /* A C expression returning the cost of moving data from a register of class
15981 CLASS1 to one of CLASS2. */
15984 rs6000_register_move_cost (enum machine_mode mode,
15985 enum reg_class from, enum reg_class to)
15987 /* Moves from/to GENERAL_REGS. */
15988 if (reg_classes_intersect_p (to, GENERAL_REGS)
15989 || reg_classes_intersect_p (from, GENERAL_REGS))
15991 if (! reg_classes_intersect_p (to, GENERAL_REGS))
15994 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
15995 return (rs6000_memory_move_cost (mode, from, 0)
15996 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
15998 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
15999 else if (from == CR_REGS)
16003 /* A move will cost one instruction per GPR moved. */
16004 return 2 * HARD_REGNO_NREGS (0, mode);
16007 /* Moving between two similar registers is just one instruction. */
16008 else if (reg_classes_intersect_p (to, from))
16009 return mode == TFmode ? 4 : 2;
16011 /* Everything else has to go through GENERAL_REGS. */
16013 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16014 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16017 /* A C expressions returning the cost of moving data of MODE from a register to
16021 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16022 int in ATTRIBUTE_UNUSED)
16024 if (reg_classes_intersect_p (class, GENERAL_REGS))
16025 return 4 * HARD_REGNO_NREGS (0, mode);
16026 else if (reg_classes_intersect_p (class, FLOAT_REGS))
16027 return 4 * HARD_REGNO_NREGS (32, mode);
16028 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16029 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16031 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16034 /* Return an RTX representing where to find the function value of a
16035 function returning MODE. */
16037 rs6000_complex_function_value (enum machine_mode mode)
16039 unsigned int regno;
16041 enum machine_mode inner = GET_MODE_INNER (mode);
16042 unsigned int inner_bytes = GET_MODE_SIZE (inner);
16044 if (FLOAT_MODE_P (mode))
16045 regno = FP_ARG_RETURN;
16048 regno = GP_ARG_RETURN;
16050 /* 32-bit is OK since it'll go in r3/r4. */
16051 if (TARGET_32BIT && inner_bytes >= 4)
16052 return gen_rtx_REG (mode, regno);
16055 if (inner_bytes >= 8)
16056 return gen_rtx_REG (mode, regno);
16058 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16060 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16061 GEN_INT (inner_bytes));
16062 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16065 /* Define how to find the value returned by a function.
16066 VALTYPE is the data type of the value (as a tree).
16067 If the precise function being called is known, FUNC is its FUNCTION_DECL;
16068 otherwise, FUNC is 0.
16070 On the SPE, both FPs and vectors are returned in r3.
16072 On RS/6000 an integer value is in r3 and a floating-point value is in
16073 fp1, unless -msoft-float. */
16076 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16078 enum machine_mode mode;
16079 unsigned int regno;
16081 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16083 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
16084 return gen_rtx_PARALLEL (DImode,
16086 gen_rtx_EXPR_LIST (VOIDmode,
16087 gen_rtx_REG (SImode, GP_ARG_RETURN),
16089 gen_rtx_EXPR_LIST (VOIDmode,
16090 gen_rtx_REG (SImode,
16091 GP_ARG_RETURN + 1),
16095 if ((INTEGRAL_TYPE_P (valtype)
16096 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16097 || POINTER_TYPE_P (valtype))
16098 mode = TARGET_32BIT ? SImode : DImode;
16100 mode = TYPE_MODE (valtype);
16102 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
16103 regno = FP_ARG_RETURN;
16104 else if (TREE_CODE (valtype) == COMPLEX_TYPE
16105 && TARGET_HARD_FLOAT
16106 && targetm.calls.split_complex_arg)
16107 return rs6000_complex_function_value (mode);
16108 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
16109 regno = ALTIVEC_ARG_RETURN;
16111 regno = GP_ARG_RETURN;
16113 return gen_rtx_REG (mode, regno);
16116 /* Define how to find the value returned by a library function
16117 assuming the value has mode MODE. */
16119 rs6000_libcall_value (enum machine_mode mode)
16121 unsigned int regno;
16123 if (GET_MODE_CLASS (mode) == MODE_FLOAT
16124 && TARGET_HARD_FLOAT && TARGET_FPRS)
16125 regno = FP_ARG_RETURN;
16126 else if (ALTIVEC_VECTOR_MODE (mode))
16127 regno = ALTIVEC_ARG_RETURN;
16128 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16129 return rs6000_complex_function_value (mode);
16131 regno = GP_ARG_RETURN;
16133 return gen_rtx_REG (mode, regno);
16136 /* Define the offset between two registers, FROM to be eliminated and its
16137 replacement TO, at the start of a routine. */
16139 rs6000_initial_elimination_offset (int from, int to)
16141 rs6000_stack_t *info = rs6000_stack_info ();
16142 HOST_WIDE_INT offset;
16144 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16145 offset = info->push_p ? 0 : -info->total_size;
16146 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16147 offset = info->total_size;
16148 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16149 offset = info->push_p ? info->total_size : 0;
16150 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16158 /* Return true if TYPE is of type __ev64_opaque__. */
16161 is_ev64_opaque_type (tree type)
16164 && (type == opaque_V2SI_type_node
16165 || type == opaque_V2SF_type_node
16166 || type == opaque_p_V2SI_type_node));
16170 rs6000_dwarf_register_span (rtx reg)
16174 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16177 regno = REGNO (reg);
16179 /* The duality of the SPE register size wreaks all kinds of havoc.
16180 This is a way of distinguishing r0 in 32-bits from r0 in
16183 gen_rtx_PARALLEL (VOIDmode,
16186 gen_rtx_REG (SImode, regno + 1200),
16187 gen_rtx_REG (SImode, regno))
16189 gen_rtx_REG (SImode, regno),
16190 gen_rtx_REG (SImode, regno + 1200)));
16193 /* Map internal gcc register numbers to DWARF2 register numbers. */
16196 rs6000_dbx_register_number (unsigned int regno)
16198 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16200 if (regno == MQ_REGNO)
16202 if (regno == LINK_REGISTER_REGNUM)
16204 if (regno == COUNT_REGISTER_REGNUM)
16206 if (CR_REGNO_P (regno))
16207 return regno - CR0_REGNO + 86;
16208 if (regno == XER_REGNO)
16210 if (ALTIVEC_REGNO_P (regno))
16211 return regno - FIRST_ALTIVEC_REGNO + 1124;
16212 if (regno == VRSAVE_REGNO)
16214 if (regno == VSCR_REGNO)
16216 if (regno == SPE_ACC_REGNO)
16218 if (regno == SPEFSCR_REGNO)
16220 /* SPE high reg number. We get these values of regno from
16221 rs6000_dwarf_register_span. */
16222 if (regno >= 1200 && regno < 1232)
16228 #include "gt-rs6000.h"