1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
75 enum processor_type rs6000_cpu;
76 struct rs6000_cpu_select rs6000_select[3] =
78 /* switch name, tune arch */
79 { (const char *)0, "--with-cpu=", 1, 1 },
80 { (const char *)0, "-mcpu=", 1, 1 },
81 { (const char *)0, "-mtune=", 1, 0 },
84 /* Support adjust_priority scheduler hook
85 and -mprioritize-restricted-insns= option. */
86 const char *rs6000_sched_restricted_insns_priority_str;
87 int rs6000_sched_restricted_insns_priority;
89 /* Support for -msched-costly-dep option. */
90 const char *rs6000_sched_costly_dep_str;
91 enum rs6000_dependence_cost rs6000_sched_costly_dep;
93 /* Support for -minsert-sched-nops option. */
94 const char *rs6000_sched_insert_nops_str;
95 enum rs6000_nop_insertion rs6000_sched_insert_nops;
97 /* Size of long double */
98 const char *rs6000_long_double_size_string;
99 int rs6000_long_double_type_size;
101 /* Whether -mabi=altivec has appeared */
102 int rs6000_altivec_abi;
104 /* Whether VRSAVE instructions should be generated. */
105 int rs6000_altivec_vrsave;
107 /* String from -mvrsave= option. */
108 const char *rs6000_altivec_vrsave_string;
110 /* Nonzero if we want SPE ABI extensions. */
113 /* Whether isel instructions should be generated. */
116 /* Whether SPE simd instructions should be generated. */
119 /* Nonzero if floating point operations are done in the GPRs. */
120 int rs6000_float_gprs = 0;
122 /* String from -mfloat-gprs=. */
123 const char *rs6000_float_gprs_string;
125 /* String from -misel=. */
126 const char *rs6000_isel_string;
128 /* String from -mspe=. */
129 const char *rs6000_spe_string;
131 /* Set to nonzero once AIX common-mode calls have been defined. */
132 static GTY(()) int common_mode_defined;
134 /* Save information from a "cmpxx" operation until the branch or scc is
136 rtx rs6000_compare_op0, rs6000_compare_op1;
137 int rs6000_compare_fp_p;
139 /* Label number of label created for -mrelocatable, to call to so we can
140 get the address of the GOT section */
141 int rs6000_pic_labelno;
144 /* Which abi to adhere to */
145 const char *rs6000_abi_name;
147 /* Semantics of the small data area */
148 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
150 /* Which small data model to use */
151 const char *rs6000_sdata_name = (char *)0;
153 /* Counter for labels which are to be placed in .fixup. */
154 int fixuplabelno = 0;
157 /* Bit size of immediate TLS offsets and string from which it is decoded. */
158 int rs6000_tls_size = 32;
159 const char *rs6000_tls_size_string;
161 /* ABI enumeration available for subtarget to use. */
162 enum rs6000_abi rs6000_current_abi;
164 /* ABI string from -mabi= option. */
165 const char *rs6000_abi_string;
168 const char *rs6000_debug_name;
169 int rs6000_debug_stack; /* debug stack applications */
170 int rs6000_debug_arg; /* debug argument handling */
173 static GTY(()) tree opaque_V2SI_type_node;
174 static GTY(()) tree opaque_V2SF_type_node;
175 static GTY(()) tree opaque_p_V2SI_type_node;
177 const char *rs6000_traceback_name;
179 traceback_default = 0,
185 /* Flag to say the TOC is initialized */
187 char toc_label_name[10];
189 /* Alias set for saves and restores from the rs6000 stack. */
190 static int rs6000_sr_alias_set;
192 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
193 The only place that looks at this is rs6000_set_default_type_attributes;
194 everywhere else should rely on the presence or absence of a longcall
195 attribute on the function declaration. */
196 int rs6000_default_long_calls;
197 const char *rs6000_longcall_switch;
199 /* Control alignment for fields within structures. */
200 /* String from -malign-XXXXX. */
201 const char *rs6000_alignment_string;
202 int rs6000_alignment_flags;
204 struct builtin_description
206 /* mask is not const because we're going to alter it below. This
207 nonsense will go away when we rewrite the -march infrastructure
208 to give us more target flag bits. */
210 const enum insn_code icode;
211 const char *const name;
212 const enum rs6000_builtins code;
215 static bool rs6000_function_ok_for_sibcall (tree, tree);
216 static int num_insns_constant_wide (HOST_WIDE_INT);
217 static void validate_condition_mode (enum rtx_code, enum machine_mode);
218 static rtx rs6000_generate_compare (enum rtx_code);
219 static void rs6000_maybe_dead (rtx);
220 static void rs6000_emit_stack_tie (void);
221 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
222 static rtx spe_synthesize_frame_save (rtx);
223 static bool spe_func_has_64bit_regs_p (void);
224 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
226 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
227 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
228 static unsigned rs6000_hash_constant (rtx);
229 static unsigned toc_hash_function (const void *);
230 static int toc_hash_eq (const void *, const void *);
231 static int constant_pool_expr_1 (rtx, int *, int *);
232 static bool constant_pool_expr_p (rtx);
233 static bool toc_relative_expr_p (rtx);
234 static bool legitimate_small_data_p (enum machine_mode, rtx);
235 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
236 static bool legitimate_indexed_address_p (rtx, int);
237 static bool legitimate_indirect_address_p (rtx, int);
238 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
239 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
240 static struct machine_function * rs6000_init_machine_status (void);
241 static bool rs6000_assemble_integer (rtx, unsigned int, int);
242 #ifdef HAVE_GAS_HIDDEN
243 static void rs6000_assemble_visibility (tree, int);
245 static int rs6000_ra_ever_killed (void);
246 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
247 extern const struct attribute_spec rs6000_attribute_table[];
248 static void rs6000_set_default_type_attributes (tree);
249 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
250 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
251 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
253 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
254 static bool rs6000_return_in_memory (tree, tree);
255 static void rs6000_file_start (void);
257 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
258 static void rs6000_elf_asm_out_constructor (rtx, int);
259 static void rs6000_elf_asm_out_destructor (rtx, int);
260 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
261 static void rs6000_elf_unique_section (tree, int);
262 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
263 unsigned HOST_WIDE_INT);
264 static void rs6000_elf_encode_section_info (tree, rtx, int)
266 static bool rs6000_elf_in_small_data_p (tree);
269 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
270 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
271 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
272 static void rs6000_xcoff_unique_section (tree, int);
273 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
274 unsigned HOST_WIDE_INT);
275 static const char * rs6000_xcoff_strip_name_encoding (const char *);
276 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
277 static void rs6000_xcoff_file_start (void);
278 static void rs6000_xcoff_file_end (void);
281 static bool rs6000_binds_local_p (tree);
283 static int rs6000_use_dfa_pipeline_interface (void);
284 static int rs6000_variable_issue (FILE *, int, rtx, int);
285 static bool rs6000_rtx_costs (rtx, int, int, int *);
286 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
287 static bool is_microcoded_insn (rtx);
288 static int is_dispatch_slot_restricted (rtx);
289 static bool is_cracked_insn (rtx);
290 static bool is_branch_slot_insn (rtx);
291 static int rs6000_adjust_priority (rtx, int);
292 static int rs6000_issue_rate (void);
293 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
294 static rtx get_next_active_insn (rtx, rtx);
295 static bool insn_terminates_group_p (rtx , enum group_termination);
296 static bool is_costly_group (rtx *, rtx);
297 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
298 static int redefine_groups (FILE *, int, rtx, rtx);
299 static int pad_groups (FILE *, int, rtx, rtx);
300 static void rs6000_sched_finish (FILE *, int);
301 static int rs6000_use_sched_lookahead (void);
303 static void rs6000_init_builtins (void);
304 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
305 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
306 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
307 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
308 static void altivec_init_builtins (void);
309 static void rs6000_common_init_builtins (void);
310 static void rs6000_init_libfuncs (void);
312 static void enable_mask_for_builtins (struct builtin_description *, int,
313 enum rs6000_builtins,
314 enum rs6000_builtins);
315 static void spe_init_builtins (void);
316 static rtx spe_expand_builtin (tree, rtx, bool *);
317 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
318 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
319 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
321 static rtx altivec_expand_builtin (tree, rtx, bool *);
322 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
323 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
324 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
325 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
326 static rtx altivec_expand_predicate_builtin (enum insn_code,
327 const char *, tree, rtx);
328 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
329 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
330 static void rs6000_parse_abi_options (void);
331 static void rs6000_parse_alignment_option (void);
332 static void rs6000_parse_tls_size_option (void);
333 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
334 static int first_altivec_reg_to_save (void);
335 static unsigned int compute_vrsave_mask (void);
336 static void is_altivec_return_reg (rtx, void *);
337 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
338 int easy_vector_constant (rtx, enum machine_mode);
339 static int easy_vector_same (rtx, enum machine_mode);
340 static bool is_ev64_opaque_type (tree);
341 static rtx rs6000_dwarf_register_span (rtx);
342 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
343 static rtx rs6000_tls_get_addr (void);
344 static rtx rs6000_got_sym (void);
345 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
346 static const char *rs6000_get_some_local_dynamic_name (void);
347 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
348 static rtx rs6000_complex_function_value (enum machine_mode);
349 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
350 enum machine_mode, tree);
351 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
352 enum machine_mode, tree, int);
353 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
354 enum machine_mode, tree,
356 static tree rs6000_build_builtin_va_list (void);
358 /* Hash table stuff for keeping track of TOC entries. */
360 struct toc_hash_struct GTY(())
362 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
363 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
365 enum machine_mode key_mode;
369 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
371 /* Default register names. */
372 char rs6000_reg_names[][8] =
374 "0", "1", "2", "3", "4", "5", "6", "7",
375 "8", "9", "10", "11", "12", "13", "14", "15",
376 "16", "17", "18", "19", "20", "21", "22", "23",
377 "24", "25", "26", "27", "28", "29", "30", "31",
378 "0", "1", "2", "3", "4", "5", "6", "7",
379 "8", "9", "10", "11", "12", "13", "14", "15",
380 "16", "17", "18", "19", "20", "21", "22", "23",
381 "24", "25", "26", "27", "28", "29", "30", "31",
382 "mq", "lr", "ctr","ap",
383 "0", "1", "2", "3", "4", "5", "6", "7",
385 /* AltiVec registers. */
386 "0", "1", "2", "3", "4", "5", "6", "7",
387 "8", "9", "10", "11", "12", "13", "14", "15",
388 "16", "17", "18", "19", "20", "21", "22", "23",
389 "24", "25", "26", "27", "28", "29", "30", "31",
395 #ifdef TARGET_REGNAMES
396 static const char alt_reg_names[][8] =
398 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
399 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
400 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
401 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
402 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
403 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
404 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
405 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
406 "mq", "lr", "ctr", "ap",
407 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
409 /* AltiVec registers. */
410 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
411 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
412 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
413 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
420 #ifndef MASK_STRICT_ALIGN
421 #define MASK_STRICT_ALIGN 0
423 #ifndef TARGET_PROFILE_KERNEL
424 #define TARGET_PROFILE_KERNEL 0
427 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
428 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
430 /* Return 1 for a symbol ref for a thread-local storage symbol. */
431 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
432 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
434 /* Initialize the GCC target structure. */
435 #undef TARGET_ATTRIBUTE_TABLE
436 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
437 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
438 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
440 #undef TARGET_ASM_ALIGNED_DI_OP
441 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
443 /* Default unaligned ops are only provided for ELF. Find the ops needed
444 for non-ELF systems. */
445 #ifndef OBJECT_FORMAT_ELF
447 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
449 #undef TARGET_ASM_UNALIGNED_HI_OP
450 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
451 #undef TARGET_ASM_UNALIGNED_SI_OP
452 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
453 #undef TARGET_ASM_UNALIGNED_DI_OP
454 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
457 #undef TARGET_ASM_UNALIGNED_HI_OP
458 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
459 #undef TARGET_ASM_UNALIGNED_SI_OP
460 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
464 /* This hook deals with fixups for relocatable code and DI-mode objects
466 #undef TARGET_ASM_INTEGER
467 #define TARGET_ASM_INTEGER rs6000_assemble_integer
469 #ifdef HAVE_GAS_HIDDEN
470 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
471 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
474 #undef TARGET_HAVE_TLS
475 #define TARGET_HAVE_TLS HAVE_AS_TLS
477 #undef TARGET_CANNOT_FORCE_CONST_MEM
478 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
480 #undef TARGET_ASM_FUNCTION_PROLOGUE
481 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
482 #undef TARGET_ASM_FUNCTION_EPILOGUE
483 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
485 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
486 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
487 #undef TARGET_SCHED_VARIABLE_ISSUE
488 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
490 #undef TARGET_SCHED_ISSUE_RATE
491 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
492 #undef TARGET_SCHED_ADJUST_COST
493 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
494 #undef TARGET_SCHED_ADJUST_PRIORITY
495 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
496 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
497 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
498 #undef TARGET_SCHED_FINISH
499 #define TARGET_SCHED_FINISH rs6000_sched_finish
501 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
502 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
504 #undef TARGET_INIT_BUILTINS
505 #define TARGET_INIT_BUILTINS rs6000_init_builtins
507 #undef TARGET_EXPAND_BUILTIN
508 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
510 #undef TARGET_INIT_LIBFUNCS
511 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
514 #undef TARGET_BINDS_LOCAL_P
515 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
518 #undef TARGET_ASM_OUTPUT_MI_THUNK
519 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
521 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
522 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
524 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
525 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
527 #undef TARGET_RTX_COSTS
528 #define TARGET_RTX_COSTS rs6000_rtx_costs
529 #undef TARGET_ADDRESS_COST
530 #define TARGET_ADDRESS_COST hook_int_rtx_0
532 #undef TARGET_VECTOR_OPAQUE_P
533 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
535 #undef TARGET_DWARF_REGISTER_SPAN
536 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
538 /* On rs6000, function arguments are promoted, as are function return
540 #undef TARGET_PROMOTE_FUNCTION_ARGS
541 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
542 #undef TARGET_PROMOTE_FUNCTION_RETURN
543 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
545 /* Structure return values are passed as an extra parameter. */
546 #undef TARGET_STRUCT_VALUE_RTX
547 #define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
549 #undef TARGET_RETURN_IN_MEMORY
550 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
552 #undef TARGET_SETUP_INCOMING_VARARGS
553 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
555 /* Always strict argument naming on rs6000. */
556 #undef TARGET_STRICT_ARGUMENT_NAMING
557 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
558 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
559 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
561 #undef TARGET_BUILD_BUILTIN_VA_LIST
562 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
564 struct gcc_target targetm = TARGET_INITIALIZER;
566 /* Override command line options. Mostly we process the processor
567 type and sometimes adjust other TARGET_ options. */
570 rs6000_override_options (const char *default_cpu)
573 struct rs6000_cpu_select *ptr;
575 /* Simplify the entries below by making a mask for any POWER
576 variant and any PowerPC variant. */
578 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
579 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
580 | MASK_PPC_GFXOPT | MASK_POWERPC64)
581 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
585 const char *const name; /* Canonical processor name. */
586 const enum processor_type processor; /* Processor type enum value. */
587 const int target_enable; /* Target flags to enable. */
588 const int target_disable; /* Target flags to disable. */
589 } const processor_target_table[]
590 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
591 POWER_MASKS | POWERPC_MASKS},
592 {"power", PROCESSOR_POWER,
593 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
594 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
595 {"power2", PROCESSOR_POWER,
596 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
597 POWERPC_MASKS | MASK_NEW_MNEMONICS},
598 {"power3", PROCESSOR_PPC630,
599 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
601 {"power4", PROCESSOR_POWER4,
602 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
604 {"powerpc", PROCESSOR_POWERPC,
605 MASK_POWERPC | MASK_NEW_MNEMONICS,
606 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
607 {"powerpc64", PROCESSOR_POWERPC64,
608 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
609 POWER_MASKS | POWERPC_OPT_MASKS},
610 {"rios", PROCESSOR_RIOS1,
611 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
612 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
613 {"rios1", PROCESSOR_RIOS1,
614 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
615 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
616 {"rsc", PROCESSOR_PPC601,
617 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
618 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
619 {"rsc1", PROCESSOR_PPC601,
620 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
621 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
622 {"rios2", PROCESSOR_RIOS2,
623 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
624 POWERPC_MASKS | MASK_NEW_MNEMONICS},
625 {"rs64a", PROCESSOR_RS64A,
626 MASK_POWERPC | MASK_NEW_MNEMONICS,
627 POWER_MASKS | POWERPC_OPT_MASKS},
628 {"401", PROCESSOR_PPC403,
629 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
630 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
631 {"403", PROCESSOR_PPC403,
632 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
633 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
634 {"405", PROCESSOR_PPC405,
635 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
636 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
637 {"405fp", PROCESSOR_PPC405,
638 MASK_POWERPC | MASK_NEW_MNEMONICS,
639 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
640 {"440", PROCESSOR_PPC440,
641 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
642 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
643 {"440fp", PROCESSOR_PPC440,
644 MASK_POWERPC | MASK_NEW_MNEMONICS,
645 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
646 {"505", PROCESSOR_MPCCORE,
647 MASK_POWERPC | MASK_NEW_MNEMONICS,
648 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
649 {"601", PROCESSOR_PPC601,
650 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
651 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
652 {"602", PROCESSOR_PPC603,
653 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
654 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
655 {"603", PROCESSOR_PPC603,
656 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
657 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
658 {"603e", PROCESSOR_PPC603,
659 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
660 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
661 {"ec603e", PROCESSOR_PPC603,
662 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
663 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
664 {"604", PROCESSOR_PPC604,
665 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
666 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
667 {"604e", PROCESSOR_PPC604e,
668 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
669 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
670 {"620", PROCESSOR_PPC620,
671 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
673 {"630", PROCESSOR_PPC630,
674 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
676 {"740", PROCESSOR_PPC750,
677 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
678 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
679 {"750", PROCESSOR_PPC750,
680 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
681 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
682 {"G3", PROCESSOR_PPC750,
683 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
684 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
685 {"7400", PROCESSOR_PPC7400,
686 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
687 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
688 {"7450", PROCESSOR_PPC7450,
689 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
690 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
691 {"G4", PROCESSOR_PPC7450,
692 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
693 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
694 {"8540", PROCESSOR_PPC8540,
695 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
696 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
697 {"801", PROCESSOR_MPCCORE,
698 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
699 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
700 {"821", PROCESSOR_MPCCORE,
701 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
702 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
703 {"823", PROCESSOR_MPCCORE,
704 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
705 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
706 {"860", PROCESSOR_MPCCORE,
707 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
708 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
709 {"970", PROCESSOR_POWER4,
710 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
712 {"G5", PROCESSOR_POWER4,
713 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
716 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
718 /* Save current -mmultiple/-mno-multiple status. */
719 int multiple = TARGET_MULTIPLE;
720 /* Save current -mstring/-mno-string status. */
721 int string = TARGET_STRING;
723 /* Identify the processor type. */
724 rs6000_select[0].string = default_cpu;
725 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
727 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
729 ptr = &rs6000_select[i];
730 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
732 for (j = 0; j < ptt_size; j++)
733 if (! strcmp (ptr->string, processor_target_table[j].name))
736 rs6000_cpu = processor_target_table[j].processor;
740 target_flags |= processor_target_table[j].target_enable;
741 target_flags &= ~processor_target_table[j].target_disable;
747 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
754 /* If we are optimizing big endian systems for space, use the load/store
755 multiple and string instructions. */
756 if (BYTES_BIG_ENDIAN && optimize_size)
757 target_flags |= MASK_MULTIPLE | MASK_STRING;
759 /* If -mmultiple or -mno-multiple was explicitly used, don't
760 override with the processor default */
761 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
762 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
764 /* If -mstring or -mno-string was explicitly used, don't override
765 with the processor default. */
766 if ((target_flags_explicit & MASK_STRING) != 0)
767 target_flags = (target_flags & ~MASK_STRING) | string;
769 /* Don't allow -mmultiple or -mstring on little endian systems
770 unless the cpu is a 750, because the hardware doesn't support the
771 instructions used in little endian mode, and causes an alignment
772 trap. The 750 does not cause an alignment trap (except when the
773 target is unaligned). */
775 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
779 target_flags &= ~MASK_MULTIPLE;
780 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
781 warning ("-mmultiple is not supported on little endian systems");
786 target_flags &= ~MASK_STRING;
787 if ((target_flags_explicit & MASK_STRING) != 0)
788 warning ("-mstring is not supported on little endian systems");
792 /* Set debug flags */
793 if (rs6000_debug_name)
795 if (! strcmp (rs6000_debug_name, "all"))
796 rs6000_debug_stack = rs6000_debug_arg = 1;
797 else if (! strcmp (rs6000_debug_name, "stack"))
798 rs6000_debug_stack = 1;
799 else if (! strcmp (rs6000_debug_name, "arg"))
800 rs6000_debug_arg = 1;
802 error ("unknown -mdebug-%s switch", rs6000_debug_name);
805 if (rs6000_traceback_name)
807 if (! strncmp (rs6000_traceback_name, "full", 4))
808 rs6000_traceback = traceback_full;
809 else if (! strncmp (rs6000_traceback_name, "part", 4))
810 rs6000_traceback = traceback_part;
811 else if (! strncmp (rs6000_traceback_name, "no", 2))
812 rs6000_traceback = traceback_none;
814 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
815 rs6000_traceback_name);
818 /* Set size of long double */
819 rs6000_long_double_type_size = 64;
820 if (rs6000_long_double_size_string)
823 int size = strtol (rs6000_long_double_size_string, &tail, 10);
824 if (*tail != '\0' || (size != 64 && size != 128))
825 error ("Unknown switch -mlong-double-%s",
826 rs6000_long_double_size_string);
828 rs6000_long_double_type_size = size;
831 /* Handle -mabi= options. */
832 rs6000_parse_abi_options ();
834 /* Handle -malign-XXXXX option. */
835 rs6000_parse_alignment_option ();
837 /* Handle generic -mFOO=YES/NO options. */
838 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
839 &rs6000_altivec_vrsave);
840 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
842 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
843 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
846 /* Handle -mtls-size option. */
847 rs6000_parse_tls_size_option ();
849 #ifdef SUBTARGET_OVERRIDE_OPTIONS
850 SUBTARGET_OVERRIDE_OPTIONS;
852 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
853 SUBSUBTARGET_OVERRIDE_OPTIONS;
858 /* The e500 does not have string instructions, and we set
859 MASK_STRING above when optimizing for size. */
860 if ((target_flags & MASK_STRING) != 0)
861 target_flags = target_flags & ~MASK_STRING;
863 /* No SPE means 64-bit long doubles, even if an E500. */
864 if (rs6000_spe_string != 0
865 && !strcmp (rs6000_spe_string, "no"))
866 rs6000_long_double_type_size = 64;
868 else if (rs6000_select[1].string != NULL)
870 /* For the powerpc-eabispe configuration, we set all these by
871 default, so let's unset them if we manually set another
872 CPU that is not the E500. */
873 if (rs6000_abi_string == 0)
875 if (rs6000_spe_string == 0)
877 if (rs6000_float_gprs_string == 0)
878 rs6000_float_gprs = 0;
879 if (rs6000_isel_string == 0)
881 if (rs6000_long_double_size_string == 0)
882 rs6000_long_double_type_size = 64;
885 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
886 using TARGET_OPTIONS to handle a toggle switch, but we're out of
887 bits in target_flags so TARGET_SWITCHES cannot be used.
888 Assumption here is that rs6000_longcall_switch points into the
889 text of the complete option, rather than being a copy, so we can
890 scan back for the presence or absence of the no- modifier. */
891 if (rs6000_longcall_switch)
893 const char *base = rs6000_longcall_switch;
894 while (base[-1] != 'm') base--;
896 if (*rs6000_longcall_switch != '\0')
897 error ("invalid option `%s'", base);
898 rs6000_default_long_calls = (base[0] != 'n');
901 /* Handle -mprioritize-restricted-insns option. */
902 rs6000_sched_restricted_insns_priority = DEFAULT_RESTRICTED_INSNS_PRIORITY;
903 if (rs6000_sched_restricted_insns_priority_str)
904 rs6000_sched_restricted_insns_priority =
905 atoi (rs6000_sched_restricted_insns_priority_str);
907 /* Handle -msched-costly-dep option. */
908 rs6000_sched_costly_dep = DEFAULT_SCHED_COSTLY_DEP;
909 if (rs6000_sched_costly_dep_str)
911 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
912 rs6000_sched_costly_dep = no_dep_costly;
913 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
914 rs6000_sched_costly_dep = all_deps_costly;
915 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
916 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
917 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
918 rs6000_sched_costly_dep = store_to_load_dep_costly;
920 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
923 /* Handle -minsert-sched-nops option. */
924 rs6000_sched_insert_nops = DEFAULT_SCHED_FINISH_NOP_INSERTION_SCHEME;
925 if (rs6000_sched_insert_nops_str)
927 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
928 rs6000_sched_insert_nops = sched_finish_none;
929 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
930 rs6000_sched_insert_nops = sched_finish_pad_groups;
931 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
932 rs6000_sched_insert_nops = sched_finish_regroup_exact;
934 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
937 #ifdef TARGET_REGNAMES
938 /* If the user desires alternate register names, copy in the
939 alternate names now. */
941 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
944 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
945 If -maix-struct-return or -msvr4-struct-return was explicitly
946 used, don't override with the ABI default. */
947 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
949 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
950 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
952 target_flags |= MASK_AIX_STRUCT_RET;
955 if (TARGET_LONG_DOUBLE_128
956 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
957 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
959 /* Allocate an alias set for register saves & restores from stack. */
960 rs6000_sr_alias_set = new_alias_set ();
963 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
965 /* We can only guarantee the availability of DI pseudo-ops when
966 assembling for 64-bit targets. */
969 targetm.asm_out.aligned_op.di = NULL;
970 targetm.asm_out.unaligned_op.di = NULL;
973 /* Set maximum branch target alignment at two instructions, eight bytes. */
974 align_jumps_max_skip = 8;
975 align_loops_max_skip = 8;
977 /* Arrange to save and restore machine status around nested functions. */
978 init_machine_status = rs6000_init_machine_status;
981 /* Handle generic options of the form -mfoo=yes/no.
982 NAME is the option name.
983 VALUE is the option value.
984 FLAG is the pointer to the flag where to store a 1 or 0, depending on
985 whether the option value is 'yes' or 'no' respectively. */
987 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
991 else if (!strcmp (value, "yes"))
993 else if (!strcmp (value, "no"))
996 error ("unknown -m%s= option specified: '%s'", name, value);
999 /* Handle -mabi= options. */
1001 rs6000_parse_abi_options (void)
1003 if (rs6000_abi_string == 0)
1005 else if (! strcmp (rs6000_abi_string, "altivec"))
1006 rs6000_altivec_abi = 1;
1007 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1008 rs6000_altivec_abi = 0;
1009 else if (! strcmp (rs6000_abi_string, "spe"))
1012 if (!TARGET_SPE_ABI)
1013 error ("not configured for ABI: '%s'", rs6000_abi_string);
1016 else if (! strcmp (rs6000_abi_string, "no-spe"))
1019 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1022 /* Handle -malign-XXXXXX options. */
1024 rs6000_parse_alignment_option (void)
1026 if (rs6000_alignment_string == 0)
1028 else if (! strcmp (rs6000_alignment_string, "power"))
1029 rs6000_alignment_flags = MASK_ALIGN_POWER;
1030 else if (! strcmp (rs6000_alignment_string, "natural"))
1031 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1033 error ("unknown -malign-XXXXX option specified: '%s'",
1034 rs6000_alignment_string);
1037 /* Validate and record the size specified with the -mtls-size option. */
1040 rs6000_parse_tls_size_option (void)
1042 if (rs6000_tls_size_string == 0)
1044 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1045 rs6000_tls_size = 16;
1046 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1047 rs6000_tls_size = 32;
1048 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1049 rs6000_tls_size = 64;
1051 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1055 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1059 /* Do anything needed at the start of the asm file. */
1062 rs6000_file_start (void)
1066 const char *start = buffer;
1067 struct rs6000_cpu_select *ptr;
1068 const char *default_cpu = TARGET_CPU_DEFAULT;
1069 FILE *file = asm_out_file;
1071 default_file_start ();
1073 #ifdef TARGET_BI_ARCH
1074 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1078 if (flag_verbose_asm)
1080 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1081 rs6000_select[0].string = default_cpu;
1083 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1085 ptr = &rs6000_select[i];
1086 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1088 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1093 #ifdef USING_ELFOS_H
1094 switch (rs6000_sdata)
1096 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1097 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1098 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1099 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1102 if (rs6000_sdata && g_switch_value)
1104 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1115 /* Return nonzero if this function is known to have a null epilogue. */
1118 direct_return (void)
1120 if (reload_completed)
1122 rs6000_stack_t *info = rs6000_stack_info ();
1124 if (info->first_gp_reg_save == 32
1125 && info->first_fp_reg_save == 64
1126 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1127 && ! info->lr_save_p
1128 && ! info->cr_save_p
1129 && info->vrsave_mask == 0
1137 /* Returns 1 always. */
1140 any_operand (rtx op ATTRIBUTE_UNUSED,
1141 enum machine_mode mode ATTRIBUTE_UNUSED)
1146 /* Returns 1 if op is the count register. */
1148 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1150 if (GET_CODE (op) != REG)
1153 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1156 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1162 /* Returns 1 if op is an altivec register. */
1164 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1167 return (register_operand (op, mode)
1168 && (GET_CODE (op) != REG
1169 || REGNO (op) > FIRST_PSEUDO_REGISTER
1170 || ALTIVEC_REGNO_P (REGNO (op))));
1174 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1176 if (GET_CODE (op) != REG)
1179 if (XER_REGNO_P (REGNO (op)))
1185 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1186 by such constants completes more quickly. */
1189 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1191 return ( GET_CODE (op) == CONST_INT
1192 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1195 /* Return 1 if OP is a constant that can fit in a D field. */
1198 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1200 return (GET_CODE (op) == CONST_INT
1201 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1204 /* Similar for an unsigned D field. */
1207 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1209 return (GET_CODE (op) == CONST_INT
1210 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1213 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1216 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1218 return (GET_CODE (op) == CONST_INT
1219 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1222 /* Returns 1 if OP is a CONST_INT that is a positive value
1223 and an exact power of 2. */
1226 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1228 return (GET_CODE (op) == CONST_INT
1230 && exact_log2 (INTVAL (op)) >= 0);
1233 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1237 gpc_reg_operand (rtx op, enum machine_mode mode)
1239 return (register_operand (op, mode)
1240 && (GET_CODE (op) != REG
1241 || (REGNO (op) >= ARG_POINTER_REGNUM
1242 && !XER_REGNO_P (REGNO (op)))
1243 || REGNO (op) < MQ_REGNO));
1246 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1250 cc_reg_operand (rtx op, enum machine_mode mode)
1252 return (register_operand (op, mode)
1253 && (GET_CODE (op) != REG
1254 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1255 || CR_REGNO_P (REGNO (op))));
1258 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1259 CR field that isn't CR0. */
1262 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1264 return (register_operand (op, mode)
1265 && (GET_CODE (op) != REG
1266 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1267 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1270 /* Returns 1 if OP is either a constant integer valid for a D-field or
1271 a non-special register. If a register, it must be in the proper
1272 mode unless MODE is VOIDmode. */
1275 reg_or_short_operand (rtx op, enum machine_mode mode)
1277 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1280 /* Similar, except check if the negation of the constant would be
1281 valid for a D-field. */
1284 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1286 if (GET_CODE (op) == CONST_INT)
1287 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1289 return gpc_reg_operand (op, mode);
1292 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1293 a non-special register. If a register, it must be in the proper
1294 mode unless MODE is VOIDmode. */
1297 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1299 if (gpc_reg_operand (op, mode))
1301 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1308 /* Return 1 if the operand is either a register or an integer whose
1309 high-order 16 bits are zero. */
1312 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1314 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1317 /* Return 1 is the operand is either a non-special register or ANY
1318 constant integer. */
1321 reg_or_cint_operand (rtx op, enum machine_mode mode)
1323 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1326 /* Return 1 is the operand is either a non-special register or ANY
1327 32-bit signed constant integer. */
1330 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1332 return (gpc_reg_operand (op, mode)
1333 || (GET_CODE (op) == CONST_INT
1334 #if HOST_BITS_PER_WIDE_INT != 32
1335 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1336 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1341 /* Return 1 is the operand is either a non-special register or a 32-bit
1342 signed constant integer valid for 64-bit addition. */
1345 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1347 return (gpc_reg_operand (op, mode)
1348 || (GET_CODE (op) == CONST_INT
1349 #if HOST_BITS_PER_WIDE_INT == 32
1350 && INTVAL (op) < 0x7fff8000
1352 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1358 /* Return 1 is the operand is either a non-special register or a 32-bit
1359 signed constant integer valid for 64-bit subtraction. */
1362 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1364 return (gpc_reg_operand (op, mode)
1365 || (GET_CODE (op) == CONST_INT
1366 #if HOST_BITS_PER_WIDE_INT == 32
1367 && (- INTVAL (op)) < 0x7fff8000
1369 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1375 /* Return 1 is the operand is either a non-special register or ANY
1376 32-bit unsigned constant integer. */
1379 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1381 if (GET_CODE (op) == CONST_INT)
1383 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1385 if (GET_MODE_BITSIZE (mode) <= 32)
1388 if (INTVAL (op) < 0)
1392 return ((INTVAL (op) & GET_MODE_MASK (mode)
1393 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1395 else if (GET_CODE (op) == CONST_DOUBLE)
1397 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1401 return CONST_DOUBLE_HIGH (op) == 0;
1404 return gpc_reg_operand (op, mode);
1407 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1410 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1412 return (GET_CODE (op) == SYMBOL_REF
1413 || GET_CODE (op) == CONST
1414 || GET_CODE (op) == LABEL_REF);
1417 /* Return 1 if the operand is a simple references that can be loaded via
1418 the GOT (labels involving addition aren't allowed). */
1421 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1423 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1426 /* Return the number of instructions it takes to form a constant in an
1427 integer register. */
1430 num_insns_constant_wide (HOST_WIDE_INT value)
1432 /* signed constant loadable with {cal|addi} */
1433 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1436 /* constant loadable with {cau|addis} */
1437 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1440 #if HOST_BITS_PER_WIDE_INT == 64
1441 else if (TARGET_POWERPC64)
1443 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1444 HOST_WIDE_INT high = value >> 31;
1446 if (high == 0 || high == -1)
1452 return num_insns_constant_wide (high) + 1;
1454 return (num_insns_constant_wide (high)
1455 + num_insns_constant_wide (low) + 1);
1464 num_insns_constant (rtx op, enum machine_mode mode)
1466 if (GET_CODE (op) == CONST_INT)
1468 #if HOST_BITS_PER_WIDE_INT == 64
1469 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1470 && mask64_operand (op, mode))
1474 return num_insns_constant_wide (INTVAL (op));
1477 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1482 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1483 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1484 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1487 else if (GET_CODE (op) == CONST_DOUBLE)
1493 int endian = (WORDS_BIG_ENDIAN == 0);
1495 if (mode == VOIDmode || mode == DImode)
1497 high = CONST_DOUBLE_HIGH (op);
1498 low = CONST_DOUBLE_LOW (op);
1502 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1503 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1505 low = l[1 - endian];
1509 return (num_insns_constant_wide (low)
1510 + num_insns_constant_wide (high));
1514 if (high == 0 && low >= 0)
1515 return num_insns_constant_wide (low);
1517 else if (high == -1 && low < 0)
1518 return num_insns_constant_wide (low);
1520 else if (mask64_operand (op, mode))
1524 return num_insns_constant_wide (high) + 1;
1527 return (num_insns_constant_wide (high)
1528 + num_insns_constant_wide (low) + 1);
1536 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1537 register with one instruction per word. We only do this if we can
1538 safely read CONST_DOUBLE_{LOW,HIGH}. */
1541 easy_fp_constant (rtx op, enum machine_mode mode)
1543 if (GET_CODE (op) != CONST_DOUBLE
1544 || GET_MODE (op) != mode
1545 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1548 /* Consider all constants with -msoft-float to be easy. */
1549 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1553 /* If we are using V.4 style PIC, consider all constants to be hard. */
1554 if (flag_pic && DEFAULT_ABI == ABI_V4)
1557 #ifdef TARGET_RELOCATABLE
1558 /* Similarly if we are using -mrelocatable, consider all constants
1560 if (TARGET_RELOCATABLE)
1569 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1570 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1572 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1573 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1574 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1575 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1578 else if (mode == DFmode)
1583 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1584 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1586 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1587 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1590 else if (mode == SFmode)
1595 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1596 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1598 return num_insns_constant_wide (l) == 1;
1601 else if (mode == DImode)
1602 return ((TARGET_POWERPC64
1603 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1604 || (num_insns_constant (op, DImode) <= 2));
1606 else if (mode == SImode)
1612 /* Return nonzero if all elements of a vector have the same value. */
1615 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1619 units = CONST_VECTOR_NUNITS (op);
1621 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1622 for (i = 1; i < units; ++i)
1623 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1630 /* Return 1 if the operand is a CONST_INT and can be put into a
1631 register without using memory. */
1634 easy_vector_constant (rtx op, enum machine_mode mode)
1638 if (GET_CODE (op) != CONST_VECTOR
1643 if (zero_constant (op, mode)
1644 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1645 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1648 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1651 if (TARGET_SPE && mode == V1DImode)
1654 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1655 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1657 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1659 evmergelo r0, r0, r0
1662 I don't know how efficient it would be to allow bigger constants,
1663 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1664 instructions is better than a 64-bit memory load, but I don't
1665 have the e500 timing specs. */
1666 if (TARGET_SPE && mode == V2SImode
1667 && cst >= -0x7fff && cst <= 0x7fff
1668 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1671 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1674 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1680 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1683 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1687 if (!easy_vector_constant (op, mode))
1690 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1692 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1696 output_vec_const_move (rtx *operands)
1699 enum machine_mode mode;
1705 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1706 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1707 mode = GET_MODE (dest);
1711 if (zero_constant (vec, mode))
1712 return "vxor %0,%0,%0";
1713 else if (EASY_VECTOR_15 (cst, vec, mode))
1715 operands[1] = GEN_INT (cst);
1719 return "vspltisw %0,%1";
1721 return "vspltish %0,%1";
1723 return "vspltisb %0,%1";
1728 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1736 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1737 pattern of V1DI, V4HI, and V2SF.
1739 FIXME: We should probably return # and add post reload
1740 splitters for these, but this way is so easy ;-).
1742 operands[1] = GEN_INT (cst);
1743 operands[2] = GEN_INT (cst2);
1745 return "li %0,%1\n\tevmergelo %0,%0,%0";
1747 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1753 /* Return 1 if the operand is the constant 0. This works for scalars
1754 as well as vectors. */
1756 zero_constant (rtx op, enum machine_mode mode)
1758 return op == CONST0_RTX (mode);
1761 /* Return 1 if the operand is 0.0. */
1763 zero_fp_constant (rtx op, enum machine_mode mode)
1765 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1768 /* Return 1 if the operand is in volatile memory. Note that during
1769 the RTL generation phase, memory_operand does not return TRUE for
1770 volatile memory references. So this function allows us to
1771 recognize volatile references where its safe. */
1774 volatile_mem_operand (rtx op, enum machine_mode mode)
1776 if (GET_CODE (op) != MEM)
1779 if (!MEM_VOLATILE_P (op))
1782 if (mode != GET_MODE (op))
1785 if (reload_completed)
1786 return memory_operand (op, mode);
1788 if (reload_in_progress)
1789 return strict_memory_address_p (mode, XEXP (op, 0));
1791 return memory_address_p (mode, XEXP (op, 0));
1794 /* Return 1 if the operand is an offsettable memory operand. */
1797 offsettable_mem_operand (rtx op, enum machine_mode mode)
1799 return ((GET_CODE (op) == MEM)
1800 && offsettable_address_p (reload_completed || reload_in_progress,
1801 mode, XEXP (op, 0)));
1804 /* Return 1 if the operand is either an easy FP constant (see above) or
1808 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1810 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1813 /* Return 1 if the operand is either a non-special register or an item
1814 that can be used as the operand of a `mode' add insn. */
1817 add_operand (rtx op, enum machine_mode mode)
1819 if (GET_CODE (op) == CONST_INT)
1820 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1821 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1823 return gpc_reg_operand (op, mode);
1826 /* Return 1 if OP is a constant but not a valid add_operand. */
1829 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1831 return (GET_CODE (op) == CONST_INT
1832 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1833 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1836 /* Return 1 if the operand is a non-special register or a constant that
1837 can be used as the operand of an OR or XOR insn on the RS/6000. */
1840 logical_operand (rtx op, enum machine_mode mode)
1842 HOST_WIDE_INT opl, oph;
1844 if (gpc_reg_operand (op, mode))
1847 if (GET_CODE (op) == CONST_INT)
1849 opl = INTVAL (op) & GET_MODE_MASK (mode);
1851 #if HOST_BITS_PER_WIDE_INT <= 32
1852 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1856 else if (GET_CODE (op) == CONST_DOUBLE)
1858 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1861 opl = CONST_DOUBLE_LOW (op);
1862 oph = CONST_DOUBLE_HIGH (op);
1869 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1870 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1873 /* Return 1 if C is a constant that is not a logical operand (as
1874 above), but could be split into one. */
1877 non_logical_cint_operand (rtx op, enum machine_mode mode)
1879 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1880 && ! logical_operand (op, mode)
1881 && reg_or_logical_cint_operand (op, mode));
1884 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1885 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1886 Reject all ones and all zeros, since these should have been optimized
1887 away and confuse the making of MB and ME. */
1890 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1892 HOST_WIDE_INT c, lsb;
1894 if (GET_CODE (op) != CONST_INT)
1899 /* Fail in 64-bit mode if the mask wraps around because the upper
1900 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1901 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1904 /* We don't change the number of transitions by inverting,
1905 so make sure we start with the LS bit zero. */
1909 /* Reject all zeros or all ones. */
1913 /* Find the first transition. */
1916 /* Invert to look for a second transition. */
1919 /* Erase first transition. */
1922 /* Find the second transition (if any). */
1925 /* Match if all the bits above are 1's (or c is zero). */
1929 /* Return 1 for the PowerPC64 rlwinm corner case. */
1932 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1934 HOST_WIDE_INT c, lsb;
1936 if (GET_CODE (op) != CONST_INT)
1941 if ((c & 0x80000001) != 0x80000001)
1955 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1956 It is if there are no more than one 1->0 or 0->1 transitions.
1957 Reject all zeros, since zero should have been optimized away and
1958 confuses the making of MB and ME. */
1961 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1963 if (GET_CODE (op) == CONST_INT)
1965 HOST_WIDE_INT c, lsb;
1969 /* Reject all zeros. */
1973 /* We don't change the number of transitions by inverting,
1974 so make sure we start with the LS bit zero. */
1978 /* Find the transition, and check that all bits above are 1's. */
1981 /* Match if all the bits above are 1's (or c is zero). */
1987 /* Like mask64_operand, but allow up to three transitions. This
1988 predicate is used by insn patterns that generate two rldicl or
1989 rldicr machine insns. */
1992 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1994 if (GET_CODE (op) == CONST_INT)
1996 HOST_WIDE_INT c, lsb;
2000 /* Disallow all zeros. */
2004 /* We don't change the number of transitions by inverting,
2005 so make sure we start with the LS bit zero. */
2009 /* Find the first transition. */
2012 /* Invert to look for a second transition. */
2015 /* Erase first transition. */
2018 /* Find the second transition. */
2021 /* Invert to look for a third transition. */
2024 /* Erase second transition. */
2027 /* Find the third transition (if any). */
2030 /* Match if all the bits above are 1's (or c is zero). */
2036 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2037 implement ANDing by the mask IN. */
2039 build_mask64_2_operands (rtx in, rtx *out)
2041 #if HOST_BITS_PER_WIDE_INT >= 64
2042 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2045 if (GET_CODE (in) != CONST_INT)
2051 /* Assume c initially something like 0x00fff000000fffff. The idea
2052 is to rotate the word so that the middle ^^^^^^ group of zeros
2053 is at the MS end and can be cleared with an rldicl mask. We then
2054 rotate back and clear off the MS ^^ group of zeros with a
2056 c = ~c; /* c == 0xff000ffffff00000 */
2057 lsb = c & -c; /* lsb == 0x0000000000100000 */
2058 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2059 c = ~c; /* c == 0x00fff000000fffff */
2060 c &= -lsb; /* c == 0x00fff00000000000 */
2061 lsb = c & -c; /* lsb == 0x0000100000000000 */
2062 c = ~c; /* c == 0xff000fffffffffff */
2063 c &= -lsb; /* c == 0xff00000000000000 */
2065 while ((lsb >>= 1) != 0)
2066 shift++; /* shift == 44 on exit from loop */
2067 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2068 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2069 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2073 /* Assume c initially something like 0xff000f0000000000. The idea
2074 is to rotate the word so that the ^^^ middle group of zeros
2075 is at the LS end and can be cleared with an rldicr mask. We then
2076 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2078 lsb = c & -c; /* lsb == 0x0000010000000000 */
2079 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2080 c = ~c; /* c == 0x00fff0ffffffffff */
2081 c &= -lsb; /* c == 0x00fff00000000000 */
2082 lsb = c & -c; /* lsb == 0x0000100000000000 */
2083 c = ~c; /* c == 0xff000fffffffffff */
2084 c &= -lsb; /* c == 0xff00000000000000 */
2086 while ((lsb >>= 1) != 0)
2087 shift++; /* shift == 44 on exit from loop */
2088 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2089 m1 >>= shift; /* m1 == 0x0000000000000fff */
2090 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2093 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2094 masks will be all 1's. We are guaranteed more than one transition. */
2095 out[0] = GEN_INT (64 - shift);
2096 out[1] = GEN_INT (m1);
2097 out[2] = GEN_INT (shift);
2098 out[3] = GEN_INT (m2);
2106 /* Return 1 if the operand is either a non-special register or a constant
2107 that can be used as the operand of a PowerPC64 logical AND insn. */
2110 and64_operand (rtx op, enum machine_mode mode)
2112 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2113 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2115 return (logical_operand (op, mode) || mask64_operand (op, mode));
2118 /* Like the above, but also match constants that can be implemented
2119 with two rldicl or rldicr insns. */
2122 and64_2_operand (rtx op, enum machine_mode mode)
2124 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2125 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2127 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2130 /* Return 1 if the operand is either a non-special register or a
2131 constant that can be used as the operand of an RS/6000 logical AND insn. */
2134 and_operand (rtx op, enum machine_mode mode)
2136 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2137 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2139 return (logical_operand (op, mode) || mask_operand (op, mode));
2142 /* Return 1 if the operand is a general register or memory operand. */
2145 reg_or_mem_operand (rtx op, enum machine_mode mode)
2147 return (gpc_reg_operand (op, mode)
2148 || memory_operand (op, mode)
2149 || macho_lo_sum_memory_operand (op, mode)
2150 || volatile_mem_operand (op, mode));
2153 /* Return 1 if the operand is a general register or memory operand without
2154 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2158 lwa_operand (rtx op, enum machine_mode mode)
2162 if (reload_completed && GET_CODE (inner) == SUBREG)
2163 inner = SUBREG_REG (inner);
2165 return gpc_reg_operand (inner, mode)
2166 || (memory_operand (inner, mode)
2167 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2168 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2169 && (GET_CODE (XEXP (inner, 0)) != PLUS
2170 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2171 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2174 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2177 symbol_ref_operand (rtx op, enum machine_mode mode)
2179 if (mode != VOIDmode && GET_MODE (op) != mode)
2182 return (GET_CODE (op) == SYMBOL_REF
2183 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2186 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2187 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2190 call_operand (rtx op, enum machine_mode mode)
2192 if (mode != VOIDmode && GET_MODE (op) != mode)
2195 return (GET_CODE (op) == SYMBOL_REF
2196 || (GET_CODE (op) == REG
2197 && (REGNO (op) == LINK_REGISTER_REGNUM
2198 || REGNO (op) == COUNT_REGISTER_REGNUM
2199 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2202 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2206 current_file_function_operand (rtx op,
2207 enum machine_mode mode ATTRIBUTE_UNUSED)
2209 return (GET_CODE (op) == SYMBOL_REF
2210 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2211 && (SYMBOL_REF_LOCAL_P (op)
2212 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2215 /* Return 1 if this operand is a valid input for a move insn. */
2218 input_operand (rtx op, enum machine_mode mode)
2220 /* Memory is always valid. */
2221 if (memory_operand (op, mode))
2224 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2225 if (GET_CODE (op) == CONSTANT_P_RTX)
2228 /* For floating-point, easy constants are valid. */
2229 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2231 && easy_fp_constant (op, mode))
2234 /* Allow any integer constant. */
2235 if (GET_MODE_CLASS (mode) == MODE_INT
2236 && (GET_CODE (op) == CONST_INT
2237 || GET_CODE (op) == CONST_DOUBLE))
2240 /* Allow easy vector constants. */
2241 if (GET_CODE (op) == CONST_VECTOR
2242 && easy_vector_constant (op, mode))
2245 /* For floating-point or multi-word mode, the only remaining valid type
2247 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2248 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2249 return register_operand (op, mode);
2251 /* The only cases left are integral modes one word or smaller (we
2252 do not get called for MODE_CC values). These can be in any
2254 if (register_operand (op, mode))
2257 /* A SYMBOL_REF referring to the TOC is valid. */
2258 if (legitimate_constant_pool_address_p (op))
2261 /* A constant pool expression (relative to the TOC) is valid */
2262 if (toc_relative_expr_p (op))
2265 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2267 if (DEFAULT_ABI == ABI_V4
2268 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2269 && small_data_operand (op, Pmode))
2275 /* Return 1 for an operand in small memory on V.4/eabi. */
2278 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2279 enum machine_mode mode ATTRIBUTE_UNUSED)
2284 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2287 if (DEFAULT_ABI != ABI_V4)
2290 if (GET_CODE (op) == SYMBOL_REF)
2293 else if (GET_CODE (op) != CONST
2294 || GET_CODE (XEXP (op, 0)) != PLUS
2295 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2296 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2301 rtx sum = XEXP (op, 0);
2302 HOST_WIDE_INT summand;
2304 /* We have to be careful here, because it is the referenced address
2305 that must be 32k from _SDA_BASE_, not just the symbol. */
2306 summand = INTVAL (XEXP (sum, 1));
2307 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2310 sym_ref = XEXP (sum, 0);
2313 return SYMBOL_REF_SMALL_P (sym_ref);
2319 /* Return true if either operand is a general purpose register. */
2322 gpr_or_gpr_p (rtx op0, rtx op1)
2324 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2325 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2329 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2332 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2334 switch (GET_CODE(op))
2337 if (RS6000_SYMBOL_REF_TLS_P (op))
2339 else if (CONSTANT_POOL_ADDRESS_P (op))
2341 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2349 else if (! strcmp (XSTR (op, 0), toc_label_name))
2358 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2359 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2361 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2370 constant_pool_expr_p (rtx op)
2374 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2378 toc_relative_expr_p (rtx op)
2382 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2385 /* SPE offset addressing is limited to 5-bits worth of double words. */
2386 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2389 legitimate_constant_pool_address_p (rtx x)
2392 && GET_CODE (x) == PLUS
2393 && GET_CODE (XEXP (x, 0)) == REG
2394 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2395 && constant_pool_expr_p (XEXP (x, 1)));
2399 legitimate_small_data_p (enum machine_mode mode, rtx x)
2401 return (DEFAULT_ABI == ABI_V4
2402 && !flag_pic && !TARGET_TOC
2403 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2404 && small_data_operand (x, mode));
2408 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2410 unsigned HOST_WIDE_INT offset, extra;
2412 if (GET_CODE (x) != PLUS)
2414 if (GET_CODE (XEXP (x, 0)) != REG)
2416 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2418 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2421 offset = INTVAL (XEXP (x, 1));
2429 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2430 which leaves the only valid constant offset of zero, which by
2431 canonicalization rules is also invalid. */
2438 /* SPE vector modes. */
2439 return SPE_CONST_OFFSET_OK (offset);
2445 else if (offset & 3)
2453 else if (offset & 3)
2463 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2467 legitimate_indexed_address_p (rtx x, int strict)
2471 if (GET_CODE (x) != PLUS)
2476 if (!REG_P (op0) || !REG_P (op1))
2479 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2480 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2481 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2482 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2486 legitimate_indirect_address_p (rtx x, int strict)
2488 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2492 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2494 if (!TARGET_MACHO || !flag_pic
2495 || mode != SImode || GET_CODE(x) != MEM)
2499 if (GET_CODE (x) != LO_SUM)
2501 if (GET_CODE (XEXP (x, 0)) != REG)
2503 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2507 return CONSTANT_P (x);
2511 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2513 if (GET_CODE (x) != LO_SUM)
2515 if (GET_CODE (XEXP (x, 0)) != REG)
2517 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2523 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2527 if (GET_MODE_NUNITS (mode) != 1)
2529 if (GET_MODE_BITSIZE (mode) > 32
2530 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2533 return CONSTANT_P (x);
2540 /* Try machine-dependent ways of modifying an illegitimate address
2541 to be legitimate. If we find one, return the new, valid address.
2542 This is used from only one place: `memory_address' in explow.c.
2544 OLDX is the address as it was before break_out_memory_refs was
2545 called. In some cases it is useful to look at this to decide what
2548 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2550 It is always safe for this function to do nothing. It exists to
2551 recognize opportunities to optimize the output.
2553 On RS/6000, first check for the sum of a register with a constant
2554 integer that is out of range. If so, generate code to add the
2555 constant with the low-order 16 bits masked to the register and force
2556 this result into another register (this can be done with `cau').
2557 Then generate an address of REG+(CONST&0xffff), allowing for the
2558 possibility of bit 16 being a one.
2560 Then check for the sum of a register and something not constant, try to
2561 load the other things into a register and return the sum. */
2564 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2565 enum machine_mode mode)
2567 if (GET_CODE (x) == SYMBOL_REF)
2569 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2571 return rs6000_legitimize_tls_address (x, model);
2574 if (GET_CODE (x) == PLUS
2575 && GET_CODE (XEXP (x, 0)) == REG
2576 && GET_CODE (XEXP (x, 1)) == CONST_INT
2577 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2579 HOST_WIDE_INT high_int, low_int;
2581 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2582 high_int = INTVAL (XEXP (x, 1)) - low_int;
2583 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2584 GEN_INT (high_int)), 0);
2585 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2587 else if (GET_CODE (x) == PLUS
2588 && GET_CODE (XEXP (x, 0)) == REG
2589 && GET_CODE (XEXP (x, 1)) != CONST_INT
2590 && GET_MODE_NUNITS (mode) == 1
2591 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2593 || (mode != DFmode && mode != TFmode))
2594 && (TARGET_POWERPC64 || mode != DImode)
2597 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2598 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2600 else if (ALTIVEC_VECTOR_MODE (mode))
2604 /* Make sure both operands are registers. */
2605 if (GET_CODE (x) == PLUS)
2606 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2607 force_reg (Pmode, XEXP (x, 1)));
2609 reg = force_reg (Pmode, x);
2612 else if (SPE_VECTOR_MODE (mode))
2614 /* We accept [reg + reg] and [reg + OFFSET]. */
2616 if (GET_CODE (x) == PLUS)
2618 rtx op1 = XEXP (x, 0);
2619 rtx op2 = XEXP (x, 1);
2621 op1 = force_reg (Pmode, op1);
2623 if (GET_CODE (op2) != REG
2624 && (GET_CODE (op2) != CONST_INT
2625 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2626 op2 = force_reg (Pmode, op2);
2628 return gen_rtx_PLUS (Pmode, op1, op2);
2631 return force_reg (Pmode, x);
2637 && GET_CODE (x) != CONST_INT
2638 && GET_CODE (x) != CONST_DOUBLE
2640 && GET_MODE_NUNITS (mode) == 1
2641 && (GET_MODE_BITSIZE (mode) <= 32
2642 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2644 rtx reg = gen_reg_rtx (Pmode);
2645 emit_insn (gen_elf_high (reg, x));
2646 return gen_rtx_LO_SUM (Pmode, reg, x);
2648 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2651 && ! MACHO_DYNAMIC_NO_PIC_P
2653 && GET_CODE (x) != CONST_INT
2654 && GET_CODE (x) != CONST_DOUBLE
2656 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2660 rtx reg = gen_reg_rtx (Pmode);
2661 emit_insn (gen_macho_high (reg, x));
2662 return gen_rtx_LO_SUM (Pmode, reg, x);
2665 && constant_pool_expr_p (x)
2666 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2668 return create_TOC_reference (x);
2674 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2676 static GTY(()) rtx rs6000_tls_symbol;
2678 rs6000_tls_get_addr (void)
2680 if (!rs6000_tls_symbol)
2681 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2683 return rs6000_tls_symbol;
2686 /* Construct the SYMBOL_REF for TLS GOT references. */
2688 static GTY(()) rtx rs6000_got_symbol;
2690 rs6000_got_sym (void)
2692 if (!rs6000_got_symbol)
2694 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2695 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2696 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2699 return rs6000_got_symbol;
2702 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2703 this (thread-local) address. */
2706 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2710 dest = gen_reg_rtx (Pmode);
2711 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2717 tlsreg = gen_rtx_REG (Pmode, 13);
2718 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2722 tlsreg = gen_rtx_REG (Pmode, 2);
2723 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2727 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2731 tmp = gen_reg_rtx (Pmode);
2734 tlsreg = gen_rtx_REG (Pmode, 13);
2735 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2739 tlsreg = gen_rtx_REG (Pmode, 2);
2740 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2744 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2746 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2751 rtx r3, got, tga, tmp1, tmp2, eqv;
2754 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2758 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2761 rtx gsym = rs6000_got_sym ();
2762 got = gen_reg_rtx (Pmode);
2764 rs6000_emit_move (got, gsym, Pmode);
2768 static int tls_got_labelno = 0;
2769 rtx tempLR, lab, tmp3, mem;
2772 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2773 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2774 tempLR = gen_reg_rtx (Pmode);
2775 tmp1 = gen_reg_rtx (Pmode);
2776 tmp2 = gen_reg_rtx (Pmode);
2777 tmp3 = gen_reg_rtx (Pmode);
2778 mem = gen_rtx_MEM (Pmode, tmp1);
2779 RTX_UNCHANGING_P (mem) = 1;
2781 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2783 emit_move_insn (tmp1, tempLR);
2784 emit_move_insn (tmp2, mem);
2785 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2786 last = emit_move_insn (got, tmp3);
2787 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2789 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2791 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2797 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2799 r3 = gen_rtx_REG (Pmode, 3);
2801 insn = gen_tls_gd_64 (r3, got, addr);
2803 insn = gen_tls_gd_32 (r3, got, addr);
2806 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2807 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2808 insn = emit_call_insn (insn);
2809 CONST_OR_PURE_CALL_P (insn) = 1;
2810 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2811 insn = get_insns ();
2813 emit_libcall_block (insn, dest, r3, addr);
2815 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2817 r3 = gen_rtx_REG (Pmode, 3);
2819 insn = gen_tls_ld_64 (r3, got);
2821 insn = gen_tls_ld_32 (r3, got);
2824 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2825 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2826 insn = emit_call_insn (insn);
2827 CONST_OR_PURE_CALL_P (insn) = 1;
2828 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2829 insn = get_insns ();
2831 tmp1 = gen_reg_rtx (Pmode);
2832 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2834 emit_libcall_block (insn, tmp1, r3, eqv);
2835 if (rs6000_tls_size == 16)
2838 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2840 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2842 else if (rs6000_tls_size == 32)
2844 tmp2 = gen_reg_rtx (Pmode);
2846 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2848 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2851 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2853 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2857 tmp2 = gen_reg_rtx (Pmode);
2859 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2861 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2863 insn = gen_rtx_SET (Pmode, dest,
2864 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2870 /* IE, or 64 bit offset LE. */
2871 tmp2 = gen_reg_rtx (Pmode);
2873 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2875 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2878 insn = gen_tls_tls_64 (dest, tmp2, addr);
2880 insn = gen_tls_tls_32 (dest, tmp2, addr);
2888 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2889 instruction definitions. */
2892 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
2894 return RS6000_SYMBOL_REF_TLS_P (x);
2897 /* Return 1 if X contains a thread-local symbol. */
2900 rs6000_tls_referenced_p (rtx x)
2902 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2905 /* Return 1 if *X is a thread-local symbol. This is the same as
2906 rs6000_tls_symbol_ref except for the type of the unused argument. */
2909 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
2911 return RS6000_SYMBOL_REF_TLS_P (*x);
2914 /* The convention appears to be to define this wherever it is used.
2915 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2916 is now used here. */
2917 #ifndef REG_MODE_OK_FOR_BASE_P
2918 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2921 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2922 replace the input X, or the original X if no replacement is called for.
2923 The output parameter *WIN is 1 if the calling macro should goto WIN,
2926 For RS/6000, we wish to handle large displacements off a base
2927 register by splitting the addend across an addiu/addis and the mem insn.
2928 This cuts number of extra insns needed from 3 to 1.
2930 On Darwin, we use this to generate code for floating point constants.
2931 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2932 The Darwin code is inside #if TARGET_MACHO because only then is
2933 machopic_function_base_name() defined. */
2935 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
2936 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
2938 /* We must recognize output that we have already generated ourselves. */
2939 if (GET_CODE (x) == PLUS
2940 && GET_CODE (XEXP (x, 0)) == PLUS
2941 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2942 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2943 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2945 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2946 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2947 opnum, (enum reload_type)type);
2953 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2954 && GET_CODE (x) == LO_SUM
2955 && GET_CODE (XEXP (x, 0)) == PLUS
2956 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2957 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2958 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2959 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2960 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2961 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2962 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2964 /* Result of previous invocation of this function on Darwin
2965 floating point constant. */
2966 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2967 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2968 opnum, (enum reload_type)type);
2973 if (GET_CODE (x) == PLUS
2974 && GET_CODE (XEXP (x, 0)) == REG
2975 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2976 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2977 && GET_CODE (XEXP (x, 1)) == CONST_INT
2978 && !SPE_VECTOR_MODE (mode)
2979 && !ALTIVEC_VECTOR_MODE (mode))
2981 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2982 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2984 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2986 /* Check for 32-bit overflow. */
2987 if (high + low != val)
2993 /* Reload the high part into a base reg; leave the low part
2994 in the mem directly. */
2996 x = gen_rtx_PLUS (GET_MODE (x),
2997 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3001 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3002 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3003 opnum, (enum reload_type)type);
3008 if (GET_CODE (x) == SYMBOL_REF
3009 && DEFAULT_ABI == ABI_DARWIN
3010 && !ALTIVEC_VECTOR_MODE (mode)
3013 /* Darwin load of floating point constant. */
3014 rtx offset = gen_rtx (CONST, Pmode,
3015 gen_rtx (MINUS, Pmode, x,
3016 gen_rtx (SYMBOL_REF, Pmode,
3017 machopic_function_base_name ())));
3018 x = gen_rtx (LO_SUM, GET_MODE (x),
3019 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
3020 gen_rtx (HIGH, Pmode, offset)), offset);
3021 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3022 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3023 opnum, (enum reload_type)type);
3027 if (GET_CODE (x) == SYMBOL_REF
3028 && DEFAULT_ABI == ABI_DARWIN
3029 && !ALTIVEC_VECTOR_MODE (mode)
3030 && MACHO_DYNAMIC_NO_PIC_P)
3032 /* Darwin load of floating point constant. */
3033 x = gen_rtx (LO_SUM, GET_MODE (x),
3034 gen_rtx (HIGH, Pmode, x), x);
3035 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3036 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3037 opnum, (enum reload_type)type);
3043 && constant_pool_expr_p (x)
3044 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3046 (x) = create_TOC_reference (x);
3054 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3055 that is a valid memory address for an instruction.
3056 The MODE argument is the machine mode for the MEM expression
3057 that wants to use this address.
3059 On the RS/6000, there are four valid address: a SYMBOL_REF that
3060 refers to a constant pool entry of an address (or the sum of it
3061 plus a constant), a short (16-bit signed) constant plus a register,
3062 the sum of two registers, or a register indirect, possibly with an
3063 auto-increment. For DFmode and DImode with a constant plus register,
3064 we must ensure that both words are addressable or PowerPC64 with offset
3067 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3068 32-bit DImode, TImode), indexed addressing cannot be used because
3069 adjacent memory cells are accessed by adding word-sized offsets
3070 during assembly output. */
3072 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3074 if (RS6000_SYMBOL_REF_TLS_P (x))
3076 if (legitimate_indirect_address_p (x, reg_ok_strict))
3078 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3079 && !ALTIVEC_VECTOR_MODE (mode)
3080 && !SPE_VECTOR_MODE (mode)
3082 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3084 if (legitimate_small_data_p (mode, x))
3086 if (legitimate_constant_pool_address_p (x))
3088 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3090 && GET_CODE (x) == PLUS
3091 && GET_CODE (XEXP (x, 0)) == REG
3092 && XEXP (x, 0) == virtual_stack_vars_rtx
3093 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3095 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3098 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3100 || (mode != DFmode && mode != TFmode))
3101 && (TARGET_POWERPC64 || mode != DImode)
3102 && legitimate_indexed_address_p (x, reg_ok_strict))
3104 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3109 /* Go to LABEL if ADDR (a legitimate address expression)
3110 has an effect that depends on the machine mode it is used for.
3112 On the RS/6000 this is true of all integral offsets (since AltiVec
3113 modes don't allow them) or is a pre-increment or decrement.
3115 ??? Except that due to conceptual problems in offsettable_address_p
3116 we can't really report the problems of integral offsets. So leave
3117 this assuming that the adjustable offset must be valid for the
3118 sub-words of a TFmode operand, which is what we had before. */
3121 rs6000_mode_dependent_address (rtx addr)
3123 switch (GET_CODE (addr))
3126 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3128 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3129 return val + 12 + 0x8000 >= 0x10000;
3138 return TARGET_UPDATE;
3147 /* Try to output insns to set TARGET equal to the constant C if it can
3148 be done in less than N insns. Do all computations in MODE.
3149 Returns the place where the output has been placed if it can be
3150 done and the insns have been emitted. If it would take more than N
3151 insns, zero is returned and no insns and emitted. */
3154 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3155 rtx source, int n ATTRIBUTE_UNUSED)
3157 rtx result, insn, set;
3158 HOST_WIDE_INT c0, c1;
3160 if (mode == QImode || mode == HImode)
3163 dest = gen_reg_rtx (mode);
3164 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3167 else if (mode == SImode)
3169 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3171 emit_insn (gen_rtx_SET (VOIDmode, result,
3172 GEN_INT (INTVAL (source)
3173 & (~ (HOST_WIDE_INT) 0xffff))));
3174 emit_insn (gen_rtx_SET (VOIDmode, dest,
3175 gen_rtx_IOR (SImode, result,
3176 GEN_INT (INTVAL (source) & 0xffff))));
3179 else if (mode == DImode)
3181 if (GET_CODE (source) == CONST_INT)
3183 c0 = INTVAL (source);
3186 else if (GET_CODE (source) == CONST_DOUBLE)
3188 #if HOST_BITS_PER_WIDE_INT >= 64
3189 c0 = CONST_DOUBLE_LOW (source);
3192 c0 = CONST_DOUBLE_LOW (source);
3193 c1 = CONST_DOUBLE_HIGH (source);
3199 result = rs6000_emit_set_long_const (dest, c0, c1);
3204 insn = get_last_insn ();
3205 set = single_set (insn);
3206 if (! CONSTANT_P (SET_SRC (set)))
3207 set_unique_reg_note (insn, REG_EQUAL, source);
3212 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3213 fall back to a straight forward decomposition. We do this to avoid
3214 exponential run times encountered when looking for longer sequences
3215 with rs6000_emit_set_const. */
3217 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3219 if (!TARGET_POWERPC64)
3221 rtx operand1, operand2;
3223 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3225 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3227 emit_move_insn (operand1, GEN_INT (c1));
3228 emit_move_insn (operand2, GEN_INT (c2));
3232 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3235 ud2 = (c1 & 0xffff0000) >> 16;
3236 #if HOST_BITS_PER_WIDE_INT >= 64
3240 ud4 = (c2 & 0xffff0000) >> 16;
3242 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3243 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3246 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3248 emit_move_insn (dest, GEN_INT (ud1));
3251 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3252 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3255 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3258 emit_move_insn (dest, GEN_INT (ud2 << 16));
3260 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3262 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3263 || (ud4 == 0 && ! (ud3 & 0x8000)))
3266 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3269 emit_move_insn (dest, GEN_INT (ud3 << 16));
3272 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3273 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3275 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3280 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3283 emit_move_insn (dest, GEN_INT (ud4 << 16));
3286 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3288 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3290 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3291 GEN_INT (ud2 << 16)));
3293 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3299 /* Emit a move from SOURCE to DEST in mode MODE. */
3301 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3305 operands[1] = source;
3307 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3308 if (GET_CODE (operands[1]) == CONST_DOUBLE
3309 && ! FLOAT_MODE_P (mode)
3310 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3312 /* FIXME. This should never happen. */
3313 /* Since it seems that it does, do the safe thing and convert
3315 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3317 if (GET_CODE (operands[1]) == CONST_DOUBLE
3318 && ! FLOAT_MODE_P (mode)
3319 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3320 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3321 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3322 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3325 /* Check if GCC is setting up a block move that will end up using FP
3326 registers as temporaries. We must make sure this is acceptable. */
3327 if (GET_CODE (operands[0]) == MEM
3328 && GET_CODE (operands[1]) == MEM
3330 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3331 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3332 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3333 ? 32 : MEM_ALIGN (operands[0])))
3334 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3336 : MEM_ALIGN (operands[1]))))
3337 && ! MEM_VOLATILE_P (operands [0])
3338 && ! MEM_VOLATILE_P (operands [1]))
3340 emit_move_insn (adjust_address (operands[0], SImode, 0),
3341 adjust_address (operands[1], SImode, 0));
3342 emit_move_insn (adjust_address (operands[0], SImode, 4),
3343 adjust_address (operands[1], SImode, 4));
3347 if (!no_new_pseudos)
3349 if (GET_CODE (operands[1]) == MEM && optimize > 0
3350 && (mode == QImode || mode == HImode || mode == SImode)
3351 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3353 rtx reg = gen_reg_rtx (word_mode);
3355 emit_insn (gen_rtx_SET (word_mode, reg,
3356 gen_rtx_ZERO_EXTEND (word_mode,
3358 operands[1] = gen_lowpart (mode, reg);
3360 if (GET_CODE (operands[0]) != REG)
3361 operands[1] = force_reg (mode, operands[1]);
3364 if (mode == SFmode && ! TARGET_POWERPC
3365 && TARGET_HARD_FLOAT && TARGET_FPRS
3366 && GET_CODE (operands[0]) == MEM)
3370 if (reload_in_progress || reload_completed)
3371 regnum = true_regnum (operands[1]);
3372 else if (GET_CODE (operands[1]) == REG)
3373 regnum = REGNO (operands[1]);
3377 /* If operands[1] is a register, on POWER it may have
3378 double-precision data in it, so truncate it to single
3380 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3383 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3384 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3385 operands[1] = newreg;
3389 /* Recognize the case where operand[1] is a reference to thread-local
3390 data and load its address to a register. */
3391 if (GET_CODE (operands[1]) == SYMBOL_REF)
3393 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3395 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3398 /* Handle the case where reload calls us with an invalid address. */
3399 if (reload_in_progress && mode == Pmode
3400 && (! general_operand (operands[1], mode)
3401 || ! nonimmediate_operand (operands[0], mode)))
3404 /* Handle the case of CONSTANT_P_RTX. */
3405 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3408 /* FIXME: In the long term, this switch statement should go away
3409 and be replaced by a sequence of tests based on things like
3415 if (CONSTANT_P (operands[1])
3416 && GET_CODE (operands[1]) != CONST_INT)
3417 operands[1] = force_const_mem (mode, operands[1]);
3423 if (CONSTANT_P (operands[1])
3424 && ! easy_fp_constant (operands[1], mode))
3425 operands[1] = force_const_mem (mode, operands[1]);
3436 if (CONSTANT_P (operands[1])
3437 && !easy_vector_constant (operands[1], mode))
3438 operands[1] = force_const_mem (mode, operands[1]);
3443 /* Use default pattern for address of ELF small data */
3446 && DEFAULT_ABI == ABI_V4
3447 && (GET_CODE (operands[1]) == SYMBOL_REF
3448 || GET_CODE (operands[1]) == CONST)
3449 && small_data_operand (operands[1], mode))
3451 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3455 if (DEFAULT_ABI == ABI_V4
3456 && mode == Pmode && mode == SImode
3457 && flag_pic == 1 && got_operand (operands[1], mode))
3459 emit_insn (gen_movsi_got (operands[0], operands[1]));
3463 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3467 && CONSTANT_P (operands[1])
3468 && GET_CODE (operands[1]) != HIGH
3469 && GET_CODE (operands[1]) != CONST_INT)
3471 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3473 /* If this is a function address on -mcall-aixdesc,
3474 convert it to the address of the descriptor. */
3475 if (DEFAULT_ABI == ABI_AIX
3476 && GET_CODE (operands[1]) == SYMBOL_REF
3477 && XSTR (operands[1], 0)[0] == '.')
3479 const char *name = XSTR (operands[1], 0);
3481 while (*name == '.')
3483 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3484 CONSTANT_POOL_ADDRESS_P (new_ref)
3485 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3486 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3487 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3488 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3489 operands[1] = new_ref;
3492 if (DEFAULT_ABI == ABI_DARWIN)
3495 if (MACHO_DYNAMIC_NO_PIC_P)
3497 /* Take care of any required data indirection. */
3498 operands[1] = rs6000_machopic_legitimize_pic_address (
3499 operands[1], mode, operands[0]);
3500 if (operands[0] != operands[1])
3501 emit_insn (gen_rtx_SET (VOIDmode,
3502 operands[0], operands[1]));
3506 emit_insn (gen_macho_high (target, operands[1]));
3507 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3511 emit_insn (gen_elf_high (target, operands[1]));
3512 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3516 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3517 and we have put it in the TOC, we just need to make a TOC-relative
3520 && GET_CODE (operands[1]) == SYMBOL_REF
3521 && constant_pool_expr_p (operands[1])
3522 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3523 get_pool_mode (operands[1])))
3525 operands[1] = create_TOC_reference (operands[1]);
3527 else if (mode == Pmode
3528 && CONSTANT_P (operands[1])
3529 && ((GET_CODE (operands[1]) != CONST_INT
3530 && ! easy_fp_constant (operands[1], mode))
3531 || (GET_CODE (operands[1]) == CONST_INT
3532 && num_insns_constant (operands[1], mode) > 2)
3533 || (GET_CODE (operands[0]) == REG
3534 && FP_REGNO_P (REGNO (operands[0]))))
3535 && GET_CODE (operands[1]) != HIGH
3536 && ! legitimate_constant_pool_address_p (operands[1])
3537 && ! toc_relative_expr_p (operands[1]))
3539 /* Emit a USE operation so that the constant isn't deleted if
3540 expensive optimizations are turned on because nobody
3541 references it. This should only be done for operands that
3542 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3543 This should not be done for operands that contain LABEL_REFs.
3544 For now, we just handle the obvious case. */
3545 if (GET_CODE (operands[1]) != LABEL_REF)
3546 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3549 /* Darwin uses a special PIC legitimizer. */
3550 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3553 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3555 if (operands[0] != operands[1])
3556 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3561 /* If we are to limit the number of things we put in the TOC and
3562 this is a symbol plus a constant we can add in one insn,
3563 just put the symbol in the TOC and add the constant. Don't do
3564 this if reload is in progress. */
3565 if (GET_CODE (operands[1]) == CONST
3566 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3567 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3568 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3569 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3570 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3571 && ! side_effects_p (operands[0]))
3574 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3575 rtx other = XEXP (XEXP (operands[1], 0), 1);
3577 sym = force_reg (mode, sym);
3579 emit_insn (gen_addsi3 (operands[0], sym, other));
3581 emit_insn (gen_adddi3 (operands[0], sym, other));
3585 operands[1] = force_const_mem (mode, operands[1]);
3588 && constant_pool_expr_p (XEXP (operands[1], 0))
3589 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3590 get_pool_constant (XEXP (operands[1], 0)),
3591 get_pool_mode (XEXP (operands[1], 0))))
3594 = gen_rtx_MEM (mode,
3595 create_TOC_reference (XEXP (operands[1], 0)));
3596 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3597 RTX_UNCHANGING_P (operands[1]) = 1;
3603 if (GET_CODE (operands[0]) == MEM
3604 && GET_CODE (XEXP (operands[0], 0)) != REG
3605 && ! reload_in_progress)
3607 = replace_equiv_address (operands[0],
3608 copy_addr_to_reg (XEXP (operands[0], 0)));
3610 if (GET_CODE (operands[1]) == MEM
3611 && GET_CODE (XEXP (operands[1], 0)) != REG
3612 && ! reload_in_progress)
3614 = replace_equiv_address (operands[1],
3615 copy_addr_to_reg (XEXP (operands[1], 0)));
3618 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3620 gen_rtx_SET (VOIDmode,
3621 operands[0], operands[1]),
3622 gen_rtx_CLOBBER (VOIDmode,
3623 gen_rtx_SCRATCH (SImode)))));
3632 /* Above, we may have called force_const_mem which may have returned
3633 an invalid address. If we can, fix this up; otherwise, reload will
3634 have to deal with it. */
3635 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3636 operands[1] = validize_mem (operands[1]);
3639 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3642 /* Nonzero if we can use a floating-point register to pass this arg. */
3643 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3644 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3645 && (CUM)->fregno <= FP_ARG_MAX_REG \
3646 && TARGET_HARD_FLOAT && TARGET_FPRS)
3648 /* Nonzero if we can use an AltiVec register to pass this arg. */
3649 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3650 (ALTIVEC_VECTOR_MODE (MODE) \
3651 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3652 && TARGET_ALTIVEC_ABI \
3653 && (DEFAULT_ABI == ABI_V4 || (NAMED)))
3655 /* Return a nonzero value to say to return the function value in
3656 memory, just as large structures are always returned. TYPE will be
3657 the data type of the value, and FNTYPE will be the type of the
3658 function doing the returning, or @code{NULL} for libcalls.
3660 The AIX ABI for the RS/6000 specifies that all structures are
3661 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3662 specifies that structures <= 8 bytes are returned in r3/r4, but a
3663 draft put them in memory, and GCC used to implement the draft
3664 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3665 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3666 compatibility can change DRAFT_V4_STRUCT_RET to override the
3667 default, and -m switches get the final word. See
3668 rs6000_override_options for more details.
3670 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3671 long double support is enabled. These values are returned in memory.
3673 int_size_in_bytes returns -1 for variable size objects, which go in
3674 memory always. The cast to unsigned makes -1 > 8. */
3677 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3679 if (AGGREGATE_TYPE_P (type)
3680 && (TARGET_AIX_STRUCT_RET
3681 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3683 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3688 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3689 for a call to a function whose data type is FNTYPE.
3690 For a library call, FNTYPE is 0.
3692 For incoming args we set the number of arguments in the prototype large
3693 so we never return a PARALLEL. */
3696 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3697 rtx libname ATTRIBUTE_UNUSED, int incoming, int libcall)
3699 static CUMULATIVE_ARGS zero_cumulative;
3701 *cum = zero_cumulative;
3703 cum->fregno = FP_ARG_MIN_REG;
3704 cum->vregno = ALTIVEC_ARG_MIN_REG;
3705 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3706 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3707 ? CALL_LIBCALL : CALL_NORMAL);
3708 cum->sysv_gregno = GP_ARG_MIN_REG;
3709 cum->stdarg = fntype
3710 && (TYPE_ARG_TYPES (fntype) != 0
3711 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3712 != void_type_node));
3715 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3717 else if (cum->prototype)
3718 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3719 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3720 || rs6000_return_in_memory (TREE_TYPE (fntype),
3724 cum->nargs_prototype = 0;
3726 /* Check for a longcall attribute. */
3728 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3729 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3730 cum->call_cookie = CALL_LONG;
3732 if (TARGET_DEBUG_ARG)
3734 fprintf (stderr, "\ninit_cumulative_args:");
3737 tree ret_type = TREE_TYPE (fntype);
3738 fprintf (stderr, " ret code = %s,",
3739 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3742 if (cum->call_cookie & CALL_LONG)
3743 fprintf (stderr, " longcall,");
3745 fprintf (stderr, " proto = %d, nargs = %d\n",
3746 cum->prototype, cum->nargs_prototype);
3750 /* If defined, a C expression which determines whether, and in which
3751 direction, to pad out an argument with extra space. The value
3752 should be of type `enum direction': either `upward' to pad above
3753 the argument, `downward' to pad below, or `none' to inhibit
3756 For the AIX ABI structs are always stored left shifted in their
3760 function_arg_padding (enum machine_mode mode, tree type)
3762 #ifndef AGGREGATE_PADDING_FIXED
3763 #define AGGREGATE_PADDING_FIXED 0
3765 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3766 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3769 if (!AGGREGATE_PADDING_FIXED)
3771 /* GCC used to pass structures of the same size as integer types as
3772 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3773 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3774 passed padded downward, except that -mstrict-align further
3775 muddied the water in that multi-component structures of 2 and 4
3776 bytes in size were passed padded upward.
3778 The following arranges for best compatibility with previous
3779 versions of gcc, but removes the -mstrict-align dependency. */
3780 if (BYTES_BIG_ENDIAN)
3782 HOST_WIDE_INT size = 0;
3784 if (mode == BLKmode)
3786 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3787 size = int_size_in_bytes (type);
3790 size = GET_MODE_SIZE (mode);
3792 if (size == 1 || size == 2 || size == 4)
3798 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3800 if (type != 0 && AGGREGATE_TYPE_P (type))
3804 /* Fall back to the default. */
3805 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
3808 /* If defined, a C expression that gives the alignment boundary, in bits,
3809 of an argument with the specified mode and type. If it is not defined,
3810 PARM_BOUNDARY is used for all arguments.
3812 V.4 wants long longs to be double word aligned. */
3815 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
3817 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3819 else if (SPE_VECTOR_MODE (mode))
3821 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3824 return PARM_BOUNDARY;
3827 /* Update the data in CUM to advance over an argument
3828 of mode MODE and data type TYPE.
3829 (TYPE is null for libcalls where that information may not be available.) */
3832 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3833 tree type, int named)
3835 cum->nargs_prototype--;
3837 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3839 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
3842 /* In variable-argument functions, vector arguments get GPRs allocated
3843 even if they are going to be passed in a vector register. */
3844 if (cum->stdarg && DEFAULT_ABI != ABI_V4)
3848 /* Vector parameters must be 16-byte aligned. This places
3849 them at 2 mod 4 in terms of words in 32-bit mode, since
3850 the parameter save area starts at offset 24 from the
3851 stack. In 64-bit mode, they just have to start on an
3852 even word, since the parameter save area is 16-byte
3853 aligned. Space for GPRs is reserved even if the argument
3854 will be passed in memory. */
3856 align = ((6 - (cum->words & 3)) & 3);
3858 align = cum->words & 1;
3859 cum->words += align + RS6000_ARG_SIZE (mode, type);
3861 if (TARGET_DEBUG_ARG)
3863 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
3865 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
3866 cum->nargs_prototype, cum->prototype,
3867 GET_MODE_NAME (mode));
3871 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3873 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3875 else if (DEFAULT_ABI == ABI_V4)
3877 if (TARGET_HARD_FLOAT && TARGET_FPRS
3878 && (mode == SFmode || mode == DFmode))
3880 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3885 cum->words += cum->words & 1;
3886 cum->words += RS6000_ARG_SIZE (mode, type);
3892 int gregno = cum->sysv_gregno;
3894 /* Aggregates and IEEE quad get passed by reference. */
3895 if ((type && AGGREGATE_TYPE_P (type))
3899 n_words = RS6000_ARG_SIZE (mode, type);
3901 /* Long long and SPE vectors are put in odd registers. */
3902 if (n_words == 2 && (gregno & 1) == 0)
3905 /* Long long and SPE vectors are not split between registers
3907 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3909 /* Long long is aligned on the stack. */
3911 cum->words += cum->words & 1;
3912 cum->words += n_words;
3915 /* Note: continuing to accumulate gregno past when we've started
3916 spilling to the stack indicates the fact that we've started
3917 spilling to the stack to expand_builtin_saveregs. */
3918 cum->sysv_gregno = gregno + n_words;
3921 if (TARGET_DEBUG_ARG)
3923 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3924 cum->words, cum->fregno);
3925 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3926 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3927 fprintf (stderr, "mode = %4s, named = %d\n",
3928 GET_MODE_NAME (mode), named);
3933 int align = (TARGET_32BIT && (cum->words & 1) != 0
3934 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3936 cum->words += align + RS6000_ARG_SIZE (mode, type);
3938 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3939 && TARGET_HARD_FLOAT && TARGET_FPRS)
3940 cum->fregno += (mode == TFmode ? 2 : 1);
3942 if (TARGET_DEBUG_ARG)
3944 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3945 cum->words, cum->fregno);
3946 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3947 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3948 fprintf (stderr, "named = %d, align = %d\n", named, align);
3953 /* Determine where to put a SIMD argument on the SPE. */
3956 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3961 int gregno = cum->sysv_gregno;
3962 int n_words = RS6000_ARG_SIZE (mode, type);
3964 /* SPE vectors are put in odd registers. */
3965 if (n_words == 2 && (gregno & 1) == 0)
3968 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3971 enum machine_mode m = SImode;
3973 r1 = gen_rtx_REG (m, gregno);
3974 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3975 r2 = gen_rtx_REG (m, gregno + 1);
3976 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3977 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3984 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3985 return gen_rtx_REG (mode, cum->sysv_gregno);
3991 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
3994 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3995 tree type, int align_words)
3999 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4000 in vararg list into zero, one or two GPRs */
4001 if (align_words >= GP_ARG_NUM_REG)
4002 return gen_rtx_PARALLEL (DFmode,
4004 gen_rtx_EXPR_LIST (VOIDmode,
4005 NULL_RTX, const0_rtx),
4006 gen_rtx_EXPR_LIST (VOIDmode,
4010 else if (align_words + RS6000_ARG_SIZE (mode, type)
4012 /* If this is partially on the stack, then we only
4013 include the portion actually in registers here. */
4014 return gen_rtx_PARALLEL (DFmode,
4016 gen_rtx_EXPR_LIST (VOIDmode,
4017 gen_rtx_REG (SImode,
4021 gen_rtx_EXPR_LIST (VOIDmode,
4026 /* split a DFmode arg into two GPRs */
4027 return gen_rtx_PARALLEL (DFmode,
4029 gen_rtx_EXPR_LIST (VOIDmode,
4030 gen_rtx_REG (SImode,
4034 gen_rtx_EXPR_LIST (VOIDmode,
4035 gen_rtx_REG (SImode,
4039 gen_rtx_EXPR_LIST (VOIDmode,
4040 gen_rtx_REG (mode, cum->fregno),
4043 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4045 else if (mode == DImode)
4047 if (align_words < GP_ARG_NUM_REG - 1)
4048 return gen_rtx_PARALLEL (DImode,
4050 gen_rtx_EXPR_LIST (VOIDmode,
4051 gen_rtx_REG (SImode,
4055 gen_rtx_EXPR_LIST (VOIDmode,
4056 gen_rtx_REG (SImode,
4060 else if (align_words == GP_ARG_NUM_REG - 1)
4061 return gen_rtx_PARALLEL (DImode,
4063 gen_rtx_EXPR_LIST (VOIDmode,
4064 NULL_RTX, const0_rtx),
4065 gen_rtx_EXPR_LIST (VOIDmode,
4066 gen_rtx_REG (SImode,
4071 else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4074 int size = int_size_in_bytes (type);
4075 int no_units = ((size - 1) / 4) + 1;
4076 int max_no_words = GP_ARG_NUM_REG - align_words;
4077 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4078 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4080 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4082 for (k=0; k < rtlvec_len; k++)
4083 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4084 gen_rtx_REG (SImode,
4087 k == 0 ? const0_rtx : GEN_INT (k*4));
4089 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4095 /* Determine where to put an argument to a function.
4096 Value is zero to push the argument on the stack,
4097 or a hard register in which to store the argument.
4099 MODE is the argument's machine mode.
4100 TYPE is the data type of the argument (as a tree).
4101 This is null for libcalls where that information may
4103 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4104 the preceding args and about the function being called.
4105 NAMED is nonzero if this argument is a named parameter
4106 (otherwise it is an extra parameter matching an ellipsis).
4108 On RS/6000 the first eight words of non-FP are normally in registers
4109 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4110 Under V.4, the first 8 FP args are in registers.
4112 If this is floating-point and no prototype is specified, we use
4113 both an FP and integer register (or possibly FP reg and stack). Library
4114 functions (when CALL_LIBCALL is set) always have the proper types for args,
4115 so we can pass the FP value just in one register. emit_library_function
4116 doesn't support PARALLEL anyway. */
4119 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4120 tree type, int named)
4122 enum rs6000_abi abi = DEFAULT_ABI;
4124 /* Return a marker to indicate whether CR1 needs to set or clear the
4125 bit that V.4 uses to say fp args were passed in registers.
4126 Assume that we don't need the marker for software floating point,
4127 or compiler generated library calls. */
4128 if (mode == VOIDmode)
4131 && cum->nargs_prototype < 0
4132 && (cum->call_cookie & CALL_LIBCALL) == 0
4133 && (cum->prototype || TARGET_NO_PROTOTYPE))
4135 /* For the SPE, we need to crxor CR6 always. */
4137 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4138 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4139 return GEN_INT (cum->call_cookie
4140 | ((cum->fregno == FP_ARG_MIN_REG)
4141 ? CALL_V4_SET_FP_ARGS
4142 : CALL_V4_CLEAR_FP_ARGS));
4145 return GEN_INT (cum->call_cookie);
4148 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4149 return gen_rtx_REG (mode, cum->vregno);
4150 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4152 if (named || abi == ABI_V4)
4156 /* Vector parameters to varargs functions under AIX or Darwin
4157 get passed in memory and possibly also in GPRs. */
4158 int align, align_words;
4159 enum machine_mode part_mode = mode;
4161 /* Vector parameters must be 16-byte aligned. This places them at
4162 2 mod 4 in terms of words in 32-bit mode, since the parameter
4163 save area starts at offset 24 from the stack. In 64-bit mode,
4164 they just have to start on an even word, since the parameter
4165 save area is 16-byte aligned. */
4167 align = ((6 - (cum->words & 3)) & 3);
4169 align = cum->words & 1;
4170 align_words = cum->words + align;
4172 /* Out of registers? Memory, then. */
4173 if (align_words >= GP_ARG_NUM_REG)
4176 /* The vector value goes in GPRs. Only the part of the
4177 value in GPRs is reported here. */
4178 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4180 /* Fortunately, there are only two possibilites, the value
4181 is either wholly in GPRs or half in GPRs and half not. */
4184 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4187 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4188 return rs6000_spe_function_arg (cum, mode, type);
4189 else if (abi == ABI_V4)
4191 if (TARGET_HARD_FLOAT && TARGET_FPRS
4192 && (mode == SFmode || mode == DFmode))
4194 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4195 return gen_rtx_REG (mode, cum->fregno);
4202 int gregno = cum->sysv_gregno;
4204 /* Aggregates and IEEE quad get passed by reference. */
4205 if ((type && AGGREGATE_TYPE_P (type))
4209 n_words = RS6000_ARG_SIZE (mode, type);
4211 /* Long long and SPE vectors are put in odd registers. */
4212 if (n_words == 2 && (gregno & 1) == 0)
4215 /* Long long do not split between registers and stack. */
4216 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4217 return gen_rtx_REG (mode, gregno);
4224 int align = (TARGET_32BIT && (cum->words & 1) != 0
4225 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4226 int align_words = cum->words + align;
4228 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4231 if (TARGET_32BIT && TARGET_POWERPC64
4232 && (mode == DFmode || mode == DImode || mode == BLKmode))
4233 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4235 if (USE_FP_FOR_ARG_P (cum, mode, type))
4238 || ((cum->nargs_prototype > 0)
4239 /* IBM AIX extended its linkage convention definition always
4240 to require FP args after register save area hole on the
4242 && (DEFAULT_ABI != ABI_AIX
4244 || (align_words < GP_ARG_NUM_REG))))
4245 return gen_rtx_REG (mode, cum->fregno);
4247 return gen_rtx_PARALLEL (mode,
4249 gen_rtx_EXPR_LIST (VOIDmode,
4250 ((align_words >= GP_ARG_NUM_REG)
4253 + RS6000_ARG_SIZE (mode, type)
4255 /* If this is partially on the stack, then
4256 we only include the portion actually
4257 in registers here. */
4258 ? gen_rtx_REG (SImode,
4259 GP_ARG_MIN_REG + align_words)
4260 : gen_rtx_REG (mode,
4261 GP_ARG_MIN_REG + align_words))),
4263 gen_rtx_EXPR_LIST (VOIDmode,
4264 gen_rtx_REG (mode, cum->fregno),
4267 else if (align_words < GP_ARG_NUM_REG)
4268 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4274 /* For an arg passed partly in registers and partly in memory,
4275 this is the number of registers used.
4276 For args passed entirely in registers or entirely in memory, zero. */
4279 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4280 tree type, int named)
4282 if (DEFAULT_ABI == ABI_V4)
4285 if (USE_FP_FOR_ARG_P (cum, mode, type)
4286 || USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4288 if (cum->nargs_prototype >= 0)
4292 if (cum->words < GP_ARG_NUM_REG
4293 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4295 int ret = GP_ARG_NUM_REG - cum->words;
4296 if (ret && TARGET_DEBUG_ARG)
4297 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4305 /* A C expression that indicates when an argument must be passed by
4306 reference. If nonzero for an argument, a copy of that argument is
4307 made in memory and a pointer to the argument is passed instead of
4308 the argument itself. The pointer is passed in whatever way is
4309 appropriate for passing a pointer to that type.
4311 Under V.4, structures and unions are passed by reference.
4313 As an extension to all ABIs, variable sized types are passed by
4317 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4318 enum machine_mode mode ATTRIBUTE_UNUSED,
4319 tree type, int named ATTRIBUTE_UNUSED)
4321 if (DEFAULT_ABI == ABI_V4
4322 && ((type && AGGREGATE_TYPE_P (type))
4325 if (TARGET_DEBUG_ARG)
4326 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4330 return type && int_size_in_bytes (type) < 0;
4333 /* Perform any needed actions needed for a function that is receiving a
4334 variable number of arguments.
4338 MODE and TYPE are the mode and type of the current parameter.
4340 PRETEND_SIZE is a variable that should be set to the amount of stack
4341 that must be pushed by the prolog to pretend that our caller pushed
4344 Normally, this macro will push all remaining incoming registers on the
4345 stack and set PRETEND_SIZE to the length of the registers pushed. */
4348 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4349 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4351 CUMULATIVE_ARGS next_cum;
4352 int reg_size = TARGET_32BIT ? 4 : 8;
4353 rtx save_area = NULL_RTX, mem;
4354 int first_reg_offset, set;
4356 /* Skip the last named argument. */
4358 function_arg_advance (&next_cum, mode, type, 1);
4360 if (DEFAULT_ABI == ABI_V4)
4362 /* Indicate to allocate space on the stack for varargs save area. */
4363 cfun->machine->sysv_varargs_p = 1;
4365 save_area = plus_constant (virtual_stack_vars_rtx,
4366 - RS6000_VARARGS_SIZE);
4368 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4372 first_reg_offset = next_cum.words;
4373 save_area = virtual_incoming_args_rtx;
4374 cfun->machine->sysv_varargs_p = 0;
4376 if (MUST_PASS_IN_STACK (mode, type))
4377 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4380 set = get_varargs_alias_set ();
4381 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4383 mem = gen_rtx_MEM (BLKmode,
4384 plus_constant (save_area,
4385 first_reg_offset * reg_size)),
4386 set_mem_alias_set (mem, set);
4387 set_mem_align (mem, BITS_PER_WORD);
4389 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4390 GP_ARG_NUM_REG - first_reg_offset);
4393 /* Save FP registers if needed. */
4394 if (DEFAULT_ABI == ABI_V4
4395 && TARGET_HARD_FLOAT && TARGET_FPRS
4397 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4399 int fregno = next_cum.fregno;
4400 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4401 rtx lab = gen_label_rtx ();
4402 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4404 emit_jump_insn (gen_rtx_SET (VOIDmode,
4406 gen_rtx_IF_THEN_ELSE (VOIDmode,
4407 gen_rtx_NE (VOIDmode, cr1,
4409 gen_rtx_LABEL_REF (VOIDmode, lab),
4412 while (fregno <= FP_ARG_V4_MAX_REG)
4414 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4415 set_mem_alias_set (mem, set);
4416 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4425 /* Create the va_list data type. */
4428 rs6000_build_builtin_va_list (void)
4430 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4432 /* For AIX, prefer 'char *' because that's what the system
4433 header files like. */
4434 if (DEFAULT_ABI != ABI_V4)
4435 return build_pointer_type (char_type_node);
4437 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4438 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4440 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4441 unsigned_char_type_node);
4442 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4443 unsigned_char_type_node);
4444 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4446 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4447 short_unsigned_type_node);
4448 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4450 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4453 DECL_FIELD_CONTEXT (f_gpr) = record;
4454 DECL_FIELD_CONTEXT (f_fpr) = record;
4455 DECL_FIELD_CONTEXT (f_res) = record;
4456 DECL_FIELD_CONTEXT (f_ovf) = record;
4457 DECL_FIELD_CONTEXT (f_sav) = record;
4459 TREE_CHAIN (record) = type_decl;
4460 TYPE_NAME (record) = type_decl;
4461 TYPE_FIELDS (record) = f_gpr;
4462 TREE_CHAIN (f_gpr) = f_fpr;
4463 TREE_CHAIN (f_fpr) = f_res;
4464 TREE_CHAIN (f_res) = f_ovf;
4465 TREE_CHAIN (f_ovf) = f_sav;
4467 layout_type (record);
4469 /* The correct type is an array type of one element. */
4470 return build_array_type (record, build_index_type (size_zero_node));
4473 /* Implement va_start. */
4476 rs6000_va_start (tree valist, rtx nextarg)
4478 HOST_WIDE_INT words, n_gpr, n_fpr;
4479 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4480 tree gpr, fpr, ovf, sav, t;
4482 /* Only SVR4 needs something special. */
4483 if (DEFAULT_ABI != ABI_V4)
4485 std_expand_builtin_va_start (valist, nextarg);
4489 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4490 f_fpr = TREE_CHAIN (f_gpr);
4491 f_res = TREE_CHAIN (f_fpr);
4492 f_ovf = TREE_CHAIN (f_res);
4493 f_sav = TREE_CHAIN (f_ovf);
4495 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4496 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4497 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4498 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4499 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4501 /* Count number of gp and fp argument registers used. */
4502 words = current_function_args_info.words;
4503 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4504 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4506 if (TARGET_DEBUG_ARG)
4507 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4508 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4509 words, n_gpr, n_fpr);
4511 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4512 TREE_SIDE_EFFECTS (t) = 1;
4513 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4515 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4516 TREE_SIDE_EFFECTS (t) = 1;
4517 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4519 /* Find the overflow area. */
4520 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4522 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4523 build_int_2 (words * UNITS_PER_WORD, 0));
4524 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4525 TREE_SIDE_EFFECTS (t) = 1;
4526 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4528 /* Find the register save area. */
4529 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4530 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4531 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4532 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4533 TREE_SIDE_EFFECTS (t) = 1;
4534 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4537 /* Implement va_arg. */
4540 rs6000_va_arg (tree valist, tree type)
4542 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4543 tree gpr, fpr, ovf, sav, reg, t, u;
4544 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4545 rtx lab_false, lab_over, addr_rtx, r;
4547 if (DEFAULT_ABI != ABI_V4)
4549 /* Variable sized types are passed by reference. */
4550 if (int_size_in_bytes (type) < 0)
4552 u = build_pointer_type (type);
4554 /* Args grow upward. */
4555 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4556 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4557 TREE_SIDE_EFFECTS (t) = 1;
4559 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4560 TREE_SIDE_EFFECTS (t) = 1;
4562 t = build1 (INDIRECT_REF, u, t);
4563 TREE_SIDE_EFFECTS (t) = 1;
4565 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4569 /* Altivec arguments must be aligned to a 128-bit boundary. */
4570 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4572 tree vtype = TREE_TYPE (valist);
4573 tree new_valist, modify;
4575 /* Round address up to multiple of 16. Computes
4577 new_valist = fold (build (BIT_AND_EXPR, vtype,
4578 fold (build (PLUS_EXPR, vtype, valist,
4579 build_int_2 (15, 0))),
4580 build_int_2 (~15, -1)));
4582 /* Update valist. */
4583 modify = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
4585 TREE_SIDE_EFFECTS (modify) = 1;
4586 expand_expr (modify, const0_rtx, VOIDmode, EXPAND_NORMAL);
4589 return std_expand_builtin_va_arg (valist, type);
4593 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4594 f_fpr = TREE_CHAIN (f_gpr);
4595 f_res = TREE_CHAIN (f_fpr);
4596 f_ovf = TREE_CHAIN (f_res);
4597 f_sav = TREE_CHAIN (f_ovf);
4599 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4600 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4601 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4602 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4603 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4605 size = int_size_in_bytes (type);
4606 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4608 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4610 /* Aggregates and long doubles are passed by reference. */
4616 size = UNITS_PER_WORD;
4619 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4621 /* FP args go in FP registers, if present. */
4630 /* Otherwise into GP registers. */
4638 /* Pull the value out of the saved registers ... */
4640 lab_false = gen_label_rtx ();
4641 lab_over = gen_label_rtx ();
4642 addr_rtx = gen_reg_rtx (Pmode);
4644 /* AltiVec vectors never go in registers. */
4645 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4647 TREE_THIS_VOLATILE (reg) = 1;
4648 emit_cmp_and_jump_insns
4649 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4650 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4653 /* Long long is aligned in the registers. */
4656 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4657 build_int_2 (n_reg - 1, 0));
4658 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4659 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4660 TREE_SIDE_EFFECTS (u) = 1;
4661 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4665 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4669 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4670 build_int_2 (n_reg, 0));
4671 TREE_SIDE_EFFECTS (u) = 1;
4673 u = build1 (CONVERT_EXPR, integer_type_node, u);
4674 TREE_SIDE_EFFECTS (u) = 1;
4676 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4677 TREE_SIDE_EFFECTS (u) = 1;
4679 t = build (PLUS_EXPR, ptr_type_node, t, u);
4680 TREE_SIDE_EFFECTS (t) = 1;
4682 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4684 emit_move_insn (addr_rtx, r);
4686 emit_jump_insn (gen_jump (lab_over));
4690 emit_label (lab_false);
4692 /* ... otherwise out of the overflow area. */
4694 /* Make sure we don't find reg 7 for the next int arg.
4696 All AltiVec vectors go in the overflow area. So in the AltiVec
4697 case we need to get the vectors from the overflow area, but
4698 remember where the GPRs and FPRs are. */
4699 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4700 || !TARGET_ALTIVEC))
4702 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4703 TREE_SIDE_EFFECTS (t) = 1;
4704 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4707 /* Care for on-stack alignment if needed. */
4714 /* AltiVec vectors are 16 byte aligned. */
4715 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4720 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4721 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4725 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4727 emit_move_insn (addr_rtx, r);
4729 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4730 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4731 TREE_SIDE_EFFECTS (t) = 1;
4732 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4734 emit_label (lab_over);
4738 r = gen_rtx_MEM (Pmode, addr_rtx);
4739 set_mem_alias_set (r, get_varargs_alias_set ());
4740 emit_move_insn (addr_rtx, r);
4748 #define def_builtin(MASK, NAME, TYPE, CODE) \
4750 if ((MASK) & target_flags) \
4751 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4755 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4757 static const struct builtin_description bdesc_3arg[] =
4759 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4760 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4761 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4762 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4763 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4764 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4765 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4766 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4767 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4768 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4769 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4770 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4771 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4772 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4773 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4774 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4775 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4776 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4777 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4778 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4779 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4780 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4781 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4784 /* DST operations: void foo (void *, const int, const char). */
4786 static const struct builtin_description bdesc_dst[] =
4788 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4789 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4790 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4791 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4794 /* Simple binary operations: VECc = foo (VECa, VECb). */
4796 static struct builtin_description bdesc_2arg[] =
4798 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4799 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4800 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4801 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4802 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4803 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4804 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4805 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4806 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4807 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4808 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4809 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4810 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4811 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4812 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4813 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4814 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4815 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4816 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4817 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4818 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4819 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4820 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4821 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4822 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4823 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4824 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4825 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4826 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4827 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4828 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4829 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4830 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4831 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4832 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4833 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4834 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4835 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4836 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4837 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4838 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4839 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4840 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4841 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4842 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4843 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4844 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4845 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4846 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4847 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4848 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4849 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4850 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4851 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4852 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4853 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4854 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4855 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4856 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4857 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4858 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4859 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4860 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4861 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4862 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4863 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4864 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4865 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4866 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4867 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4868 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4869 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4870 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4871 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4872 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4873 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4874 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4875 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4876 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4877 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4878 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4879 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4880 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4881 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4882 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4883 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4884 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4885 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4886 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4887 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4888 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4889 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4890 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4891 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4892 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4893 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4894 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4895 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4896 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4897 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4898 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4899 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4900 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4901 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4902 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4903 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4904 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4905 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4906 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4907 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4908 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4909 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4910 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4912 /* Place holder, leave as first spe builtin. */
4913 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4914 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4915 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4916 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4917 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4918 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4919 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4920 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4921 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4922 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4923 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4924 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4925 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4926 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4927 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4928 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4929 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4930 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4931 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4932 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4933 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4934 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4935 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4936 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4937 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4938 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4939 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4940 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4941 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4942 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4943 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4944 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4945 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4946 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4947 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4948 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4949 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4950 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4951 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4952 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4953 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4954 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4955 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4956 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4957 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4958 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4959 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4960 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4961 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4962 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4963 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4964 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4965 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4966 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4967 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4968 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4969 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4970 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4971 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4972 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4973 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4974 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4975 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4976 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4977 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4978 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4979 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4980 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4981 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4982 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4983 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4984 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4985 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4986 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4987 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4988 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4989 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4990 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4991 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4992 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4993 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4994 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4995 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4996 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4997 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4998 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4999 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5000 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5001 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5002 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5003 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5004 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5005 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5006 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5007 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5008 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5009 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5010 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5011 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5012 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5013 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5014 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5015 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5016 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5017 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5018 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5019 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5020 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5021 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5023 /* SPE binary operations expecting a 5-bit unsigned literal. */
5024 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5026 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5027 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5028 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5029 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5030 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5031 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5032 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5033 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5034 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5035 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5036 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5037 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5038 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5039 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5040 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5041 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5042 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5043 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5044 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5045 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5046 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5047 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5048 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5049 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5050 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5051 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5053 /* Place-holder. Leave as last binary SPE builtin. */
5054 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5057 /* AltiVec predicates. */
5059 struct builtin_description_predicates
5061 const unsigned int mask;
5062 const enum insn_code icode;
5064 const char *const name;
5065 const enum rs6000_builtins code;
5068 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5070 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5071 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5072 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5073 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5074 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5075 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5076 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5077 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5078 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5079 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5080 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5081 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5082 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5085 /* SPE predicates. */
5086 static struct builtin_description bdesc_spe_predicates[] =
5088 /* Place-holder. Leave as first. */
5089 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5090 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5091 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5092 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5093 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5094 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5095 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5096 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5097 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5098 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5099 /* Place-holder. Leave as last. */
5100 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5103 /* SPE evsel predicates. */
5104 static struct builtin_description bdesc_spe_evsel[] =
5106 /* Place-holder. Leave as first. */
5107 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5108 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5109 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5110 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5111 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5112 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5113 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5114 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5115 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5116 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5117 /* Place-holder. Leave as last. */
5118 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5121 /* ABS* operations. */
5123 static const struct builtin_description bdesc_abs[] =
5125 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5126 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5127 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5128 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5129 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5130 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5131 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5134 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5137 static struct builtin_description bdesc_1arg[] =
5139 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5140 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5141 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5142 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5143 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5144 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5145 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5146 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5147 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5148 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5149 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5150 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5151 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5152 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5153 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5154 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5155 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5157 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5158 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5159 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5160 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5161 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5162 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5163 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5164 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5165 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5166 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5167 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5168 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5169 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5170 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5171 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5172 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5173 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5174 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5175 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5176 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5177 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5178 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5179 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5180 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5181 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5182 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5183 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5184 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5185 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5186 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5188 /* Place-holder. Leave as last unary SPE builtin. */
5189 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5193 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5196 tree arg0 = TREE_VALUE (arglist);
5197 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5198 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5199 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5201 if (icode == CODE_FOR_nothing)
5202 /* Builtin not supported on this processor. */
5205 /* If we got invalid arguments bail out before generating bad rtl. */
5206 if (arg0 == error_mark_node)
5209 if (icode == CODE_FOR_altivec_vspltisb
5210 || icode == CODE_FOR_altivec_vspltish
5211 || icode == CODE_FOR_altivec_vspltisw
5212 || icode == CODE_FOR_spe_evsplatfi
5213 || icode == CODE_FOR_spe_evsplati)
5215 /* Only allow 5-bit *signed* literals. */
5216 if (GET_CODE (op0) != CONST_INT
5217 || INTVAL (op0) > 0x1f
5218 || INTVAL (op0) < -0x1f)
5220 error ("argument 1 must be a 5-bit signed literal");
5226 || GET_MODE (target) != tmode
5227 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5228 target = gen_reg_rtx (tmode);
5230 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5231 op0 = copy_to_mode_reg (mode0, op0);
5233 pat = GEN_FCN (icode) (target, op0);
5242 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5244 rtx pat, scratch1, scratch2;
5245 tree arg0 = TREE_VALUE (arglist);
5246 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5247 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5248 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5250 /* If we have invalid arguments, bail out before generating bad rtl. */
5251 if (arg0 == error_mark_node)
5255 || GET_MODE (target) != tmode
5256 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5257 target = gen_reg_rtx (tmode);
5259 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5260 op0 = copy_to_mode_reg (mode0, op0);
5262 scratch1 = gen_reg_rtx (mode0);
5263 scratch2 = gen_reg_rtx (mode0);
5265 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5274 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5277 tree arg0 = TREE_VALUE (arglist);
5278 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5279 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5280 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5281 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5282 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5283 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5285 if (icode == CODE_FOR_nothing)
5286 /* Builtin not supported on this processor. */
5289 /* If we got invalid arguments bail out before generating bad rtl. */
5290 if (arg0 == error_mark_node || arg1 == error_mark_node)
5293 if (icode == CODE_FOR_altivec_vcfux
5294 || icode == CODE_FOR_altivec_vcfsx
5295 || icode == CODE_FOR_altivec_vctsxs
5296 || icode == CODE_FOR_altivec_vctuxs
5297 || icode == CODE_FOR_altivec_vspltb
5298 || icode == CODE_FOR_altivec_vsplth
5299 || icode == CODE_FOR_altivec_vspltw
5300 || icode == CODE_FOR_spe_evaddiw
5301 || icode == CODE_FOR_spe_evldd
5302 || icode == CODE_FOR_spe_evldh
5303 || icode == CODE_FOR_spe_evldw
5304 || icode == CODE_FOR_spe_evlhhesplat
5305 || icode == CODE_FOR_spe_evlhhossplat
5306 || icode == CODE_FOR_spe_evlhhousplat
5307 || icode == CODE_FOR_spe_evlwhe
5308 || icode == CODE_FOR_spe_evlwhos
5309 || icode == CODE_FOR_spe_evlwhou
5310 || icode == CODE_FOR_spe_evlwhsplat
5311 || icode == CODE_FOR_spe_evlwwsplat
5312 || icode == CODE_FOR_spe_evrlwi
5313 || icode == CODE_FOR_spe_evslwi
5314 || icode == CODE_FOR_spe_evsrwis
5315 || icode == CODE_FOR_spe_evsubifw
5316 || icode == CODE_FOR_spe_evsrwiu)
5318 /* Only allow 5-bit unsigned literals. */
5319 if (TREE_CODE (arg1) != INTEGER_CST
5320 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5322 error ("argument 2 must be a 5-bit unsigned literal");
5328 || GET_MODE (target) != tmode
5329 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5330 target = gen_reg_rtx (tmode);
5332 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5333 op0 = copy_to_mode_reg (mode0, op0);
5334 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5335 op1 = copy_to_mode_reg (mode1, op1);
5337 pat = GEN_FCN (icode) (target, op0, op1);
5346 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5347 tree arglist, rtx target)
5350 tree cr6_form = TREE_VALUE (arglist);
5351 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5352 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5353 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5354 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5355 enum machine_mode tmode = SImode;
5356 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5357 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5360 if (TREE_CODE (cr6_form) != INTEGER_CST)
5362 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5366 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5371 /* If we have invalid arguments, bail out before generating bad rtl. */
5372 if (arg0 == error_mark_node || arg1 == error_mark_node)
5376 || GET_MODE (target) != tmode
5377 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5378 target = gen_reg_rtx (tmode);
5380 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5381 op0 = copy_to_mode_reg (mode0, op0);
5382 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5383 op1 = copy_to_mode_reg (mode1, op1);
5385 scratch = gen_reg_rtx (mode0);
5387 pat = GEN_FCN (icode) (scratch, op0, op1,
5388 gen_rtx (SYMBOL_REF, Pmode, opcode));
5393 /* The vec_any* and vec_all* predicates use the same opcodes for two
5394 different operations, but the bits in CR6 will be different
5395 depending on what information we want. So we have to play tricks
5396 with CR6 to get the right bits out.
5398 If you think this is disgusting, look at the specs for the
5399 AltiVec predicates. */
5401 switch (cr6_form_int)
5404 emit_insn (gen_cr6_test_for_zero (target));
5407 emit_insn (gen_cr6_test_for_zero_reverse (target));
5410 emit_insn (gen_cr6_test_for_lt (target));
5413 emit_insn (gen_cr6_test_for_lt_reverse (target));
5416 error ("argument 1 of __builtin_altivec_predicate is out of range");
5424 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5427 tree arg0 = TREE_VALUE (arglist);
5428 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5429 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5430 enum machine_mode mode0 = Pmode;
5431 enum machine_mode mode1 = Pmode;
5432 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5433 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5435 if (icode == CODE_FOR_nothing)
5436 /* Builtin not supported on this processor. */
5439 /* If we got invalid arguments bail out before generating bad rtl. */
5440 if (arg0 == error_mark_node || arg1 == error_mark_node)
5444 || GET_MODE (target) != tmode
5445 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5446 target = gen_reg_rtx (tmode);
5448 op1 = copy_to_mode_reg (mode1, op1);
5450 if (op0 == const0_rtx)
5452 addr = gen_rtx_MEM (tmode, op1);
5456 op0 = copy_to_mode_reg (mode0, op0);
5457 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5460 pat = GEN_FCN (icode) (target, addr);
5470 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5472 tree arg0 = TREE_VALUE (arglist);
5473 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5474 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5475 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5476 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5477 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5479 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5480 enum machine_mode mode1 = Pmode;
5481 enum machine_mode mode2 = Pmode;
5483 /* Invalid arguments. Bail before doing anything stoopid! */
5484 if (arg0 == error_mark_node
5485 || arg1 == error_mark_node
5486 || arg2 == error_mark_node)
5489 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5490 op0 = copy_to_mode_reg (tmode, op0);
5492 op2 = copy_to_mode_reg (mode2, op2);
5494 if (op1 == const0_rtx)
5496 addr = gen_rtx_MEM (tmode, op2);
5500 op1 = copy_to_mode_reg (mode1, op1);
5501 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5504 pat = GEN_FCN (icode) (addr, op0);
5511 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5514 tree arg0 = TREE_VALUE (arglist);
5515 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5516 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5517 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5518 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5519 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5520 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5521 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5522 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5523 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5525 if (icode == CODE_FOR_nothing)
5526 /* Builtin not supported on this processor. */
5529 /* If we got invalid arguments bail out before generating bad rtl. */
5530 if (arg0 == error_mark_node
5531 || arg1 == error_mark_node
5532 || arg2 == error_mark_node)
5535 if (icode == CODE_FOR_altivec_vsldoi_4sf
5536 || icode == CODE_FOR_altivec_vsldoi_4si
5537 || icode == CODE_FOR_altivec_vsldoi_8hi
5538 || icode == CODE_FOR_altivec_vsldoi_16qi)
5540 /* Only allow 4-bit unsigned literals. */
5541 if (TREE_CODE (arg2) != INTEGER_CST
5542 || TREE_INT_CST_LOW (arg2) & ~0xf)
5544 error ("argument 3 must be a 4-bit unsigned literal");
5550 || GET_MODE (target) != tmode
5551 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5552 target = gen_reg_rtx (tmode);
5554 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5555 op0 = copy_to_mode_reg (mode0, op0);
5556 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5557 op1 = copy_to_mode_reg (mode1, op1);
5558 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5559 op2 = copy_to_mode_reg (mode2, op2);
5561 pat = GEN_FCN (icode) (target, op0, op1, op2);
5569 /* Expand the lvx builtins. */
5571 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5573 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5574 tree arglist = TREE_OPERAND (exp, 1);
5575 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5577 enum machine_mode tmode, mode0;
5579 enum insn_code icode;
5583 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5584 icode = CODE_FOR_altivec_lvx_16qi;
5586 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5587 icode = CODE_FOR_altivec_lvx_8hi;
5589 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5590 icode = CODE_FOR_altivec_lvx_4si;
5592 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5593 icode = CODE_FOR_altivec_lvx_4sf;
5602 arg0 = TREE_VALUE (arglist);
5603 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5604 tmode = insn_data[icode].operand[0].mode;
5605 mode0 = insn_data[icode].operand[1].mode;
5608 || GET_MODE (target) != tmode
5609 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5610 target = gen_reg_rtx (tmode);
5612 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5613 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5615 pat = GEN_FCN (icode) (target, op0);
5622 /* Expand the stvx builtins. */
5624 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5627 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5628 tree arglist = TREE_OPERAND (exp, 1);
5629 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5631 enum machine_mode mode0, mode1;
5633 enum insn_code icode;
5637 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5638 icode = CODE_FOR_altivec_stvx_16qi;
5640 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5641 icode = CODE_FOR_altivec_stvx_8hi;
5643 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5644 icode = CODE_FOR_altivec_stvx_4si;
5646 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5647 icode = CODE_FOR_altivec_stvx_4sf;
5654 arg0 = TREE_VALUE (arglist);
5655 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5656 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5657 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5658 mode0 = insn_data[icode].operand[0].mode;
5659 mode1 = insn_data[icode].operand[1].mode;
5661 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5662 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5663 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5664 op1 = copy_to_mode_reg (mode1, op1);
5666 pat = GEN_FCN (icode) (op0, op1);
5674 /* Expand the dst builtins. */
5676 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5679 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5680 tree arglist = TREE_OPERAND (exp, 1);
5681 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5682 tree arg0, arg1, arg2;
5683 enum machine_mode mode0, mode1, mode2;
5684 rtx pat, op0, op1, op2;
5685 struct builtin_description *d;
5690 /* Handle DST variants. */
5691 d = (struct builtin_description *) bdesc_dst;
5692 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5693 if (d->code == fcode)
5695 arg0 = TREE_VALUE (arglist);
5696 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5697 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5698 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5699 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5700 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5701 mode0 = insn_data[d->icode].operand[0].mode;
5702 mode1 = insn_data[d->icode].operand[1].mode;
5703 mode2 = insn_data[d->icode].operand[2].mode;
5705 /* Invalid arguments, bail out before generating bad rtl. */
5706 if (arg0 == error_mark_node
5707 || arg1 == error_mark_node
5708 || arg2 == error_mark_node)
5711 if (TREE_CODE (arg2) != INTEGER_CST
5712 || TREE_INT_CST_LOW (arg2) & ~0x3)
5714 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5718 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5719 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5720 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5721 op1 = copy_to_mode_reg (mode1, op1);
5723 pat = GEN_FCN (d->icode) (op0, op1, op2);
5734 /* Expand the builtin in EXP and store the result in TARGET. Store
5735 true in *EXPANDEDP if we found a builtin to expand. */
5737 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
5739 struct builtin_description *d;
5740 struct builtin_description_predicates *dp;
5742 enum insn_code icode;
5743 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5744 tree arglist = TREE_OPERAND (exp, 1);
5747 enum machine_mode tmode, mode0;
5748 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5750 target = altivec_expand_ld_builtin (exp, target, expandedp);
5754 target = altivec_expand_st_builtin (exp, target, expandedp);
5758 target = altivec_expand_dst_builtin (exp, target, expandedp);
5766 case ALTIVEC_BUILTIN_STVX:
5767 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5768 case ALTIVEC_BUILTIN_STVEBX:
5769 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5770 case ALTIVEC_BUILTIN_STVEHX:
5771 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5772 case ALTIVEC_BUILTIN_STVEWX:
5773 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5774 case ALTIVEC_BUILTIN_STVXL:
5775 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5777 case ALTIVEC_BUILTIN_MFVSCR:
5778 icode = CODE_FOR_altivec_mfvscr;
5779 tmode = insn_data[icode].operand[0].mode;
5782 || GET_MODE (target) != tmode
5783 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5784 target = gen_reg_rtx (tmode);
5786 pat = GEN_FCN (icode) (target);
5792 case ALTIVEC_BUILTIN_MTVSCR:
5793 icode = CODE_FOR_altivec_mtvscr;
5794 arg0 = TREE_VALUE (arglist);
5795 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5796 mode0 = insn_data[icode].operand[0].mode;
5798 /* If we got invalid arguments bail out before generating bad rtl. */
5799 if (arg0 == error_mark_node)
5802 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5803 op0 = copy_to_mode_reg (mode0, op0);
5805 pat = GEN_FCN (icode) (op0);
5810 case ALTIVEC_BUILTIN_DSSALL:
5811 emit_insn (gen_altivec_dssall ());
5814 case ALTIVEC_BUILTIN_DSS:
5815 icode = CODE_FOR_altivec_dss;
5816 arg0 = TREE_VALUE (arglist);
5817 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5818 mode0 = insn_data[icode].operand[0].mode;
5820 /* If we got invalid arguments bail out before generating bad rtl. */
5821 if (arg0 == error_mark_node)
5824 if (TREE_CODE (arg0) != INTEGER_CST
5825 || TREE_INT_CST_LOW (arg0) & ~0x3)
5827 error ("argument to dss must be a 2-bit unsigned literal");
5831 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5832 op0 = copy_to_mode_reg (mode0, op0);
5834 emit_insn (gen_altivec_dss (op0));
5838 /* Expand abs* operations. */
5839 d = (struct builtin_description *) bdesc_abs;
5840 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5841 if (d->code == fcode)
5842 return altivec_expand_abs_builtin (d->icode, arglist, target);
5844 /* Expand the AltiVec predicates. */
5845 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5846 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5847 if (dp->code == fcode)
5848 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5850 /* LV* are funky. We initialized them differently. */
5853 case ALTIVEC_BUILTIN_LVSL:
5854 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5856 case ALTIVEC_BUILTIN_LVSR:
5857 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5859 case ALTIVEC_BUILTIN_LVEBX:
5860 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5862 case ALTIVEC_BUILTIN_LVEHX:
5863 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5865 case ALTIVEC_BUILTIN_LVEWX:
5866 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5868 case ALTIVEC_BUILTIN_LVXL:
5869 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5871 case ALTIVEC_BUILTIN_LVX:
5872 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5883 /* Binops that need to be initialized manually, but can be expanded
5884 automagically by rs6000_expand_binop_builtin. */
5885 static struct builtin_description bdesc_2arg_spe[] =
5887 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5888 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5889 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5890 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5891 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5892 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5893 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5894 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5895 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5896 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5897 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5898 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5899 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5900 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5901 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5902 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5903 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5904 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5905 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5906 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5907 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5908 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5911 /* Expand the builtin in EXP and store the result in TARGET. Store
5912 true in *EXPANDEDP if we found a builtin to expand.
5914 This expands the SPE builtins that are not simple unary and binary
5917 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
5919 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5920 tree arglist = TREE_OPERAND (exp, 1);
5922 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5923 enum insn_code icode;
5924 enum machine_mode tmode, mode0;
5926 struct builtin_description *d;
5931 /* Syntax check for a 5-bit unsigned immediate. */
5934 case SPE_BUILTIN_EVSTDD:
5935 case SPE_BUILTIN_EVSTDH:
5936 case SPE_BUILTIN_EVSTDW:
5937 case SPE_BUILTIN_EVSTWHE:
5938 case SPE_BUILTIN_EVSTWHO:
5939 case SPE_BUILTIN_EVSTWWE:
5940 case SPE_BUILTIN_EVSTWWO:
5941 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5942 if (TREE_CODE (arg1) != INTEGER_CST
5943 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5945 error ("argument 2 must be a 5-bit unsigned literal");
5953 /* The evsplat*i instructions are not quite generic. */
5956 case SPE_BUILTIN_EVSPLATFI:
5957 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5959 case SPE_BUILTIN_EVSPLATI:
5960 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5966 d = (struct builtin_description *) bdesc_2arg_spe;
5967 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5968 if (d->code == fcode)
5969 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5971 d = (struct builtin_description *) bdesc_spe_predicates;
5972 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5973 if (d->code == fcode)
5974 return spe_expand_predicate_builtin (d->icode, arglist, target);
5976 d = (struct builtin_description *) bdesc_spe_evsel;
5977 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5978 if (d->code == fcode)
5979 return spe_expand_evsel_builtin (d->icode, arglist, target);
5983 case SPE_BUILTIN_EVSTDDX:
5984 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5985 case SPE_BUILTIN_EVSTDHX:
5986 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5987 case SPE_BUILTIN_EVSTDWX:
5988 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5989 case SPE_BUILTIN_EVSTWHEX:
5990 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5991 case SPE_BUILTIN_EVSTWHOX:
5992 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5993 case SPE_BUILTIN_EVSTWWEX:
5994 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5995 case SPE_BUILTIN_EVSTWWOX:
5996 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5997 case SPE_BUILTIN_EVSTDD:
5998 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5999 case SPE_BUILTIN_EVSTDH:
6000 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6001 case SPE_BUILTIN_EVSTDW:
6002 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6003 case SPE_BUILTIN_EVSTWHE:
6004 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6005 case SPE_BUILTIN_EVSTWHO:
6006 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6007 case SPE_BUILTIN_EVSTWWE:
6008 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6009 case SPE_BUILTIN_EVSTWWO:
6010 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6011 case SPE_BUILTIN_MFSPEFSCR:
6012 icode = CODE_FOR_spe_mfspefscr;
6013 tmode = insn_data[icode].operand[0].mode;
6016 || GET_MODE (target) != tmode
6017 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6018 target = gen_reg_rtx (tmode);
6020 pat = GEN_FCN (icode) (target);
6025 case SPE_BUILTIN_MTSPEFSCR:
6026 icode = CODE_FOR_spe_mtspefscr;
6027 arg0 = TREE_VALUE (arglist);
6028 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6029 mode0 = insn_data[icode].operand[0].mode;
6031 if (arg0 == error_mark_node)
6034 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6035 op0 = copy_to_mode_reg (mode0, op0);
6037 pat = GEN_FCN (icode) (op0);
6050 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6052 rtx pat, scratch, tmp;
6053 tree form = TREE_VALUE (arglist);
6054 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6055 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6056 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6057 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6058 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6059 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6063 if (TREE_CODE (form) != INTEGER_CST)
6065 error ("argument 1 of __builtin_spe_predicate must be a constant");
6069 form_int = TREE_INT_CST_LOW (form);
6074 if (arg0 == error_mark_node || arg1 == error_mark_node)
6078 || GET_MODE (target) != SImode
6079 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6080 target = gen_reg_rtx (SImode);
6082 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6083 op0 = copy_to_mode_reg (mode0, op0);
6084 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6085 op1 = copy_to_mode_reg (mode1, op1);
6087 scratch = gen_reg_rtx (CCmode);
6089 pat = GEN_FCN (icode) (scratch, op0, op1);
6094 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6095 _lower_. We use one compare, but look in different bits of the
6096 CR for each variant.
6098 There are 2 elements in each SPE simd type (upper/lower). The CR
6099 bits are set as follows:
6101 BIT0 | BIT 1 | BIT 2 | BIT 3
6102 U | L | (U | L) | (U & L)
6104 So, for an "all" relationship, BIT 3 would be set.
6105 For an "any" relationship, BIT 2 would be set. Etc.
6107 Following traditional nomenclature, these bits map to:
6109 BIT0 | BIT 1 | BIT 2 | BIT 3
6112 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6117 /* All variant. OV bit. */
6119 /* We need to get to the OV bit, which is the ORDERED bit. We
6120 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6121 that's ugly and will trigger a validate_condition_mode abort.
6122 So let's just use another pattern. */
6123 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6125 /* Any variant. EQ bit. */
6129 /* Upper variant. LT bit. */
6133 /* Lower variant. GT bit. */
6138 error ("argument 1 of __builtin_spe_predicate is out of range");
6142 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6143 emit_move_insn (target, tmp);
6148 /* The evsel builtins look like this:
6150 e = __builtin_spe_evsel_OP (a, b, c, d);
6154 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6155 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6159 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6162 tree arg0 = TREE_VALUE (arglist);
6163 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6164 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6165 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6166 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6167 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6168 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6169 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6170 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6171 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6176 if (arg0 == error_mark_node || arg1 == error_mark_node
6177 || arg2 == error_mark_node || arg3 == error_mark_node)
6181 || GET_MODE (target) != mode0
6182 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6183 target = gen_reg_rtx (mode0);
6185 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6186 op0 = copy_to_mode_reg (mode0, op0);
6187 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6188 op1 = copy_to_mode_reg (mode0, op1);
6189 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6190 op2 = copy_to_mode_reg (mode0, op2);
6191 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6192 op3 = copy_to_mode_reg (mode0, op3);
6194 /* Generate the compare. */
6195 scratch = gen_reg_rtx (CCmode);
6196 pat = GEN_FCN (icode) (scratch, op0, op1);
6201 if (mode0 == V2SImode)
6202 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6204 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6209 /* Expand an expression EXP that calls a built-in function,
6210 with result going to TARGET if that's convenient
6211 (and in mode MODE if that's convenient).
6212 SUBTARGET may be used as the target for computing one of EXP's operands.
6213 IGNORE is nonzero if the value is to be ignored. */
6216 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6217 enum machine_mode mode ATTRIBUTE_UNUSED,
6218 int ignore ATTRIBUTE_UNUSED)
6220 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6221 tree arglist = TREE_OPERAND (exp, 1);
6222 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6223 struct builtin_description *d;
6230 ret = altivec_expand_builtin (exp, target, &success);
6237 ret = spe_expand_builtin (exp, target, &success);
6243 if (TARGET_ALTIVEC || TARGET_SPE)
6245 /* Handle simple unary operations. */
6246 d = (struct builtin_description *) bdesc_1arg;
6247 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6248 if (d->code == fcode)
6249 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6251 /* Handle simple binary operations. */
6252 d = (struct builtin_description *) bdesc_2arg;
6253 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6254 if (d->code == fcode)
6255 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6257 /* Handle simple ternary operations. */
6258 d = (struct builtin_description *) bdesc_3arg;
6259 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6260 if (d->code == fcode)
6261 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6269 rs6000_init_builtins (void)
6271 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6272 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6273 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6276 spe_init_builtins ();
6278 altivec_init_builtins ();
6279 if (TARGET_ALTIVEC || TARGET_SPE)
6280 rs6000_common_init_builtins ();
6283 /* Search through a set of builtins and enable the mask bits.
6284 DESC is an array of builtins.
6285 SIZE is the total number of builtins.
6286 START is the builtin enum at which to start.
6287 END is the builtin enum at which to end. */
6289 enable_mask_for_builtins (struct builtin_description *desc, int size,
6290 enum rs6000_builtins start,
6291 enum rs6000_builtins end)
6295 for (i = 0; i < size; ++i)
6296 if (desc[i].code == start)
6302 for (; i < size; ++i)
6304 /* Flip all the bits on. */
6305 desc[i].mask = target_flags;
6306 if (desc[i].code == end)
6312 spe_init_builtins (void)
6314 tree endlink = void_list_node;
6315 tree puint_type_node = build_pointer_type (unsigned_type_node);
6316 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6317 struct builtin_description *d;
6320 tree v2si_ftype_4_v2si
6321 = build_function_type
6322 (opaque_V2SI_type_node,
6323 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6324 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6325 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6326 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6329 tree v2sf_ftype_4_v2sf
6330 = build_function_type
6331 (opaque_V2SF_type_node,
6332 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6333 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6334 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6335 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6338 tree int_ftype_int_v2si_v2si
6339 = build_function_type
6341 tree_cons (NULL_TREE, integer_type_node,
6342 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6343 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6346 tree int_ftype_int_v2sf_v2sf
6347 = build_function_type
6349 tree_cons (NULL_TREE, integer_type_node,
6350 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6351 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6354 tree void_ftype_v2si_puint_int
6355 = build_function_type (void_type_node,
6356 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6357 tree_cons (NULL_TREE, puint_type_node,
6358 tree_cons (NULL_TREE,
6362 tree void_ftype_v2si_puint_char
6363 = build_function_type (void_type_node,
6364 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6365 tree_cons (NULL_TREE, puint_type_node,
6366 tree_cons (NULL_TREE,
6370 tree void_ftype_v2si_pv2si_int
6371 = build_function_type (void_type_node,
6372 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6373 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6374 tree_cons (NULL_TREE,
6378 tree void_ftype_v2si_pv2si_char
6379 = build_function_type (void_type_node,
6380 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6381 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6382 tree_cons (NULL_TREE,
6387 = build_function_type (void_type_node,
6388 tree_cons (NULL_TREE, integer_type_node, endlink));
6391 = build_function_type (integer_type_node, endlink);
6393 tree v2si_ftype_pv2si_int
6394 = build_function_type (opaque_V2SI_type_node,
6395 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6396 tree_cons (NULL_TREE, integer_type_node,
6399 tree v2si_ftype_puint_int
6400 = build_function_type (opaque_V2SI_type_node,
6401 tree_cons (NULL_TREE, puint_type_node,
6402 tree_cons (NULL_TREE, integer_type_node,
6405 tree v2si_ftype_pushort_int
6406 = build_function_type (opaque_V2SI_type_node,
6407 tree_cons (NULL_TREE, pushort_type_node,
6408 tree_cons (NULL_TREE, integer_type_node,
6411 tree v2si_ftype_signed_char
6412 = build_function_type (opaque_V2SI_type_node,
6413 tree_cons (NULL_TREE, signed_char_type_node,
6416 /* The initialization of the simple binary and unary builtins is
6417 done in rs6000_common_init_builtins, but we have to enable the
6418 mask bits here manually because we have run out of `target_flags'
6419 bits. We really need to redesign this mask business. */
6421 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6422 ARRAY_SIZE (bdesc_2arg),
6425 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6426 ARRAY_SIZE (bdesc_1arg),
6428 SPE_BUILTIN_EVSUBFUSIAAW);
6429 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6430 ARRAY_SIZE (bdesc_spe_predicates),
6431 SPE_BUILTIN_EVCMPEQ,
6432 SPE_BUILTIN_EVFSTSTLT);
6433 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6434 ARRAY_SIZE (bdesc_spe_evsel),
6435 SPE_BUILTIN_EVSEL_CMPGTS,
6436 SPE_BUILTIN_EVSEL_FSTSTEQ);
6438 (*lang_hooks.decls.pushdecl)
6439 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6440 opaque_V2SI_type_node));
6442 /* Initialize irregular SPE builtins. */
6444 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6445 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6446 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6447 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6448 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6449 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6450 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6451 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6452 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6453 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6454 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6455 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6456 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6457 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6458 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6459 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6460 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6461 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6464 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6465 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6466 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6467 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6468 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6469 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6470 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6471 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6472 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6473 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6474 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6475 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6476 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6477 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6478 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6479 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6480 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6481 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6482 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6483 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6484 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6485 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6488 d = (struct builtin_description *) bdesc_spe_predicates;
6489 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6493 switch (insn_data[d->icode].operand[1].mode)
6496 type = int_ftype_int_v2si_v2si;
6499 type = int_ftype_int_v2sf_v2sf;
6505 def_builtin (d->mask, d->name, type, d->code);
6508 /* Evsel predicates. */
6509 d = (struct builtin_description *) bdesc_spe_evsel;
6510 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6514 switch (insn_data[d->icode].operand[1].mode)
6517 type = v2si_ftype_4_v2si;
6520 type = v2sf_ftype_4_v2sf;
6526 def_builtin (d->mask, d->name, type, d->code);
6531 altivec_init_builtins (void)
6533 struct builtin_description *d;
6534 struct builtin_description_predicates *dp;
6536 tree pfloat_type_node = build_pointer_type (float_type_node);
6537 tree pint_type_node = build_pointer_type (integer_type_node);
6538 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6539 tree pchar_type_node = build_pointer_type (char_type_node);
6541 tree pvoid_type_node = build_pointer_type (void_type_node);
6543 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6544 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6545 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6546 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6548 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6550 tree int_ftype_int_v4si_v4si
6551 = build_function_type_list (integer_type_node,
6552 integer_type_node, V4SI_type_node,
6553 V4SI_type_node, NULL_TREE);
6554 tree v4sf_ftype_pcfloat
6555 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6556 tree void_ftype_pfloat_v4sf
6557 = build_function_type_list (void_type_node,
6558 pfloat_type_node, V4SF_type_node, NULL_TREE);
6559 tree v4si_ftype_pcint
6560 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6561 tree void_ftype_pint_v4si
6562 = build_function_type_list (void_type_node,
6563 pint_type_node, V4SI_type_node, NULL_TREE);
6564 tree v8hi_ftype_pcshort
6565 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6566 tree void_ftype_pshort_v8hi
6567 = build_function_type_list (void_type_node,
6568 pshort_type_node, V8HI_type_node, NULL_TREE);
6569 tree v16qi_ftype_pcchar
6570 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6571 tree void_ftype_pchar_v16qi
6572 = build_function_type_list (void_type_node,
6573 pchar_type_node, V16QI_type_node, NULL_TREE);
6574 tree void_ftype_v4si
6575 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6576 tree v8hi_ftype_void
6577 = build_function_type (V8HI_type_node, void_list_node);
6578 tree void_ftype_void
6579 = build_function_type (void_type_node, void_list_node);
6581 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6583 tree v16qi_ftype_long_pcvoid
6584 = build_function_type_list (V16QI_type_node,
6585 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6586 tree v8hi_ftype_long_pcvoid
6587 = build_function_type_list (V8HI_type_node,
6588 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6589 tree v4si_ftype_long_pcvoid
6590 = build_function_type_list (V4SI_type_node,
6591 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6593 tree void_ftype_v4si_long_pvoid
6594 = build_function_type_list (void_type_node,
6595 V4SI_type_node, long_integer_type_node,
6596 pvoid_type_node, NULL_TREE);
6597 tree void_ftype_v16qi_long_pvoid
6598 = build_function_type_list (void_type_node,
6599 V16QI_type_node, long_integer_type_node,
6600 pvoid_type_node, NULL_TREE);
6601 tree void_ftype_v8hi_long_pvoid
6602 = build_function_type_list (void_type_node,
6603 V8HI_type_node, long_integer_type_node,
6604 pvoid_type_node, NULL_TREE);
6605 tree int_ftype_int_v8hi_v8hi
6606 = build_function_type_list (integer_type_node,
6607 integer_type_node, V8HI_type_node,
6608 V8HI_type_node, NULL_TREE);
6609 tree int_ftype_int_v16qi_v16qi
6610 = build_function_type_list (integer_type_node,
6611 integer_type_node, V16QI_type_node,
6612 V16QI_type_node, NULL_TREE);
6613 tree int_ftype_int_v4sf_v4sf
6614 = build_function_type_list (integer_type_node,
6615 integer_type_node, V4SF_type_node,
6616 V4SF_type_node, NULL_TREE);
6617 tree v4si_ftype_v4si
6618 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6619 tree v8hi_ftype_v8hi
6620 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6621 tree v16qi_ftype_v16qi
6622 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6623 tree v4sf_ftype_v4sf
6624 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6625 tree void_ftype_pcvoid_int_char
6626 = build_function_type_list (void_type_node,
6627 pcvoid_type_node, integer_type_node,
6628 char_type_node, NULL_TREE);
6630 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6631 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6632 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6633 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6634 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6635 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6636 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6637 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6638 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6639 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6640 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6641 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6642 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6643 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6644 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6645 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6646 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6647 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6648 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6649 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6650 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
6651 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
6652 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6653 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6654 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6655 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
6656 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
6657 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
6658 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
6659 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
6660 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
6661 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
6663 /* Add the DST variants. */
6664 d = (struct builtin_description *) bdesc_dst;
6665 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6666 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6668 /* Initialize the predicates. */
6669 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6670 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6672 enum machine_mode mode1;
6675 mode1 = insn_data[dp->icode].operand[1].mode;
6680 type = int_ftype_int_v4si_v4si;
6683 type = int_ftype_int_v8hi_v8hi;
6686 type = int_ftype_int_v16qi_v16qi;
6689 type = int_ftype_int_v4sf_v4sf;
6695 def_builtin (dp->mask, dp->name, type, dp->code);
6698 /* Initialize the abs* operators. */
6699 d = (struct builtin_description *) bdesc_abs;
6700 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6702 enum machine_mode mode0;
6705 mode0 = insn_data[d->icode].operand[0].mode;
6710 type = v4si_ftype_v4si;
6713 type = v8hi_ftype_v8hi;
6716 type = v16qi_ftype_v16qi;
6719 type = v4sf_ftype_v4sf;
6725 def_builtin (d->mask, d->name, type, d->code);
6730 rs6000_common_init_builtins (void)
6732 struct builtin_description *d;
6735 tree v4sf_ftype_v4sf_v4sf_v16qi
6736 = build_function_type_list (V4SF_type_node,
6737 V4SF_type_node, V4SF_type_node,
6738 V16QI_type_node, NULL_TREE);
6739 tree v4si_ftype_v4si_v4si_v16qi
6740 = build_function_type_list (V4SI_type_node,
6741 V4SI_type_node, V4SI_type_node,
6742 V16QI_type_node, NULL_TREE);
6743 tree v8hi_ftype_v8hi_v8hi_v16qi
6744 = build_function_type_list (V8HI_type_node,
6745 V8HI_type_node, V8HI_type_node,
6746 V16QI_type_node, NULL_TREE);
6747 tree v16qi_ftype_v16qi_v16qi_v16qi
6748 = build_function_type_list (V16QI_type_node,
6749 V16QI_type_node, V16QI_type_node,
6750 V16QI_type_node, NULL_TREE);
6751 tree v4si_ftype_char
6752 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6753 tree v8hi_ftype_char
6754 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6755 tree v16qi_ftype_char
6756 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6757 tree v8hi_ftype_v16qi
6758 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6759 tree v4sf_ftype_v4sf
6760 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6762 tree v2si_ftype_v2si_v2si
6763 = build_function_type_list (opaque_V2SI_type_node,
6764 opaque_V2SI_type_node,
6765 opaque_V2SI_type_node, NULL_TREE);
6767 tree v2sf_ftype_v2sf_v2sf
6768 = build_function_type_list (opaque_V2SF_type_node,
6769 opaque_V2SF_type_node,
6770 opaque_V2SF_type_node, NULL_TREE);
6772 tree v2si_ftype_int_int
6773 = build_function_type_list (opaque_V2SI_type_node,
6774 integer_type_node, integer_type_node,
6777 tree v2si_ftype_v2si
6778 = build_function_type_list (opaque_V2SI_type_node,
6779 opaque_V2SI_type_node, NULL_TREE);
6781 tree v2sf_ftype_v2sf
6782 = build_function_type_list (opaque_V2SF_type_node,
6783 opaque_V2SF_type_node, NULL_TREE);
6785 tree v2sf_ftype_v2si
6786 = build_function_type_list (opaque_V2SF_type_node,
6787 opaque_V2SI_type_node, NULL_TREE);
6789 tree v2si_ftype_v2sf
6790 = build_function_type_list (opaque_V2SI_type_node,
6791 opaque_V2SF_type_node, NULL_TREE);
6793 tree v2si_ftype_v2si_char
6794 = build_function_type_list (opaque_V2SI_type_node,
6795 opaque_V2SI_type_node,
6796 char_type_node, NULL_TREE);
6798 tree v2si_ftype_int_char
6799 = build_function_type_list (opaque_V2SI_type_node,
6800 integer_type_node, char_type_node, NULL_TREE);
6802 tree v2si_ftype_char
6803 = build_function_type_list (opaque_V2SI_type_node,
6804 char_type_node, NULL_TREE);
6806 tree int_ftype_int_int
6807 = build_function_type_list (integer_type_node,
6808 integer_type_node, integer_type_node,
6811 tree v4si_ftype_v4si_v4si
6812 = build_function_type_list (V4SI_type_node,
6813 V4SI_type_node, V4SI_type_node, NULL_TREE);
6814 tree v4sf_ftype_v4si_char
6815 = build_function_type_list (V4SF_type_node,
6816 V4SI_type_node, char_type_node, NULL_TREE);
6817 tree v4si_ftype_v4sf_char
6818 = build_function_type_list (V4SI_type_node,
6819 V4SF_type_node, char_type_node, NULL_TREE);
6820 tree v4si_ftype_v4si_char
6821 = build_function_type_list (V4SI_type_node,
6822 V4SI_type_node, char_type_node, NULL_TREE);
6823 tree v8hi_ftype_v8hi_char
6824 = build_function_type_list (V8HI_type_node,
6825 V8HI_type_node, char_type_node, NULL_TREE);
6826 tree v16qi_ftype_v16qi_char
6827 = build_function_type_list (V16QI_type_node,
6828 V16QI_type_node, char_type_node, NULL_TREE);
6829 tree v16qi_ftype_v16qi_v16qi_char
6830 = build_function_type_list (V16QI_type_node,
6831 V16QI_type_node, V16QI_type_node,
6832 char_type_node, NULL_TREE);
6833 tree v8hi_ftype_v8hi_v8hi_char
6834 = build_function_type_list (V8HI_type_node,
6835 V8HI_type_node, V8HI_type_node,
6836 char_type_node, NULL_TREE);
6837 tree v4si_ftype_v4si_v4si_char
6838 = build_function_type_list (V4SI_type_node,
6839 V4SI_type_node, V4SI_type_node,
6840 char_type_node, NULL_TREE);
6841 tree v4sf_ftype_v4sf_v4sf_char
6842 = build_function_type_list (V4SF_type_node,
6843 V4SF_type_node, V4SF_type_node,
6844 char_type_node, NULL_TREE);
6845 tree v4sf_ftype_v4sf_v4sf
6846 = build_function_type_list (V4SF_type_node,
6847 V4SF_type_node, V4SF_type_node, NULL_TREE);
6848 tree v4sf_ftype_v4sf_v4sf_v4si
6849 = build_function_type_list (V4SF_type_node,
6850 V4SF_type_node, V4SF_type_node,
6851 V4SI_type_node, NULL_TREE);
6852 tree v4sf_ftype_v4sf_v4sf_v4sf
6853 = build_function_type_list (V4SF_type_node,
6854 V4SF_type_node, V4SF_type_node,
6855 V4SF_type_node, NULL_TREE);
6856 tree v4si_ftype_v4si_v4si_v4si
6857 = build_function_type_list (V4SI_type_node,
6858 V4SI_type_node, V4SI_type_node,
6859 V4SI_type_node, NULL_TREE);
6860 tree v8hi_ftype_v8hi_v8hi
6861 = build_function_type_list (V8HI_type_node,
6862 V8HI_type_node, V8HI_type_node, NULL_TREE);
6863 tree v8hi_ftype_v8hi_v8hi_v8hi
6864 = build_function_type_list (V8HI_type_node,
6865 V8HI_type_node, V8HI_type_node,
6866 V8HI_type_node, NULL_TREE);
6867 tree v4si_ftype_v8hi_v8hi_v4si
6868 = build_function_type_list (V4SI_type_node,
6869 V8HI_type_node, V8HI_type_node,
6870 V4SI_type_node, NULL_TREE);
6871 tree v4si_ftype_v16qi_v16qi_v4si
6872 = build_function_type_list (V4SI_type_node,
6873 V16QI_type_node, V16QI_type_node,
6874 V4SI_type_node, NULL_TREE);
6875 tree v16qi_ftype_v16qi_v16qi
6876 = build_function_type_list (V16QI_type_node,
6877 V16QI_type_node, V16QI_type_node, NULL_TREE);
6878 tree v4si_ftype_v4sf_v4sf
6879 = build_function_type_list (V4SI_type_node,
6880 V4SF_type_node, V4SF_type_node, NULL_TREE);
6881 tree v8hi_ftype_v16qi_v16qi
6882 = build_function_type_list (V8HI_type_node,
6883 V16QI_type_node, V16QI_type_node, NULL_TREE);
6884 tree v4si_ftype_v8hi_v8hi
6885 = build_function_type_list (V4SI_type_node,
6886 V8HI_type_node, V8HI_type_node, NULL_TREE);
6887 tree v8hi_ftype_v4si_v4si
6888 = build_function_type_list (V8HI_type_node,
6889 V4SI_type_node, V4SI_type_node, NULL_TREE);
6890 tree v16qi_ftype_v8hi_v8hi
6891 = build_function_type_list (V16QI_type_node,
6892 V8HI_type_node, V8HI_type_node, NULL_TREE);
6893 tree v4si_ftype_v16qi_v4si
6894 = build_function_type_list (V4SI_type_node,
6895 V16QI_type_node, V4SI_type_node, NULL_TREE);
6896 tree v4si_ftype_v16qi_v16qi
6897 = build_function_type_list (V4SI_type_node,
6898 V16QI_type_node, V16QI_type_node, NULL_TREE);
6899 tree v4si_ftype_v8hi_v4si
6900 = build_function_type_list (V4SI_type_node,
6901 V8HI_type_node, V4SI_type_node, NULL_TREE);
6902 tree v4si_ftype_v8hi
6903 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6904 tree int_ftype_v4si_v4si
6905 = build_function_type_list (integer_type_node,
6906 V4SI_type_node, V4SI_type_node, NULL_TREE);
6907 tree int_ftype_v4sf_v4sf
6908 = build_function_type_list (integer_type_node,
6909 V4SF_type_node, V4SF_type_node, NULL_TREE);
6910 tree int_ftype_v16qi_v16qi
6911 = build_function_type_list (integer_type_node,
6912 V16QI_type_node, V16QI_type_node, NULL_TREE);
6913 tree int_ftype_v8hi_v8hi
6914 = build_function_type_list (integer_type_node,
6915 V8HI_type_node, V8HI_type_node, NULL_TREE);
6917 /* Add the simple ternary operators. */
6918 d = (struct builtin_description *) bdesc_3arg;
6919 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6922 enum machine_mode mode0, mode1, mode2, mode3;
6925 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6928 mode0 = insn_data[d->icode].operand[0].mode;
6929 mode1 = insn_data[d->icode].operand[1].mode;
6930 mode2 = insn_data[d->icode].operand[2].mode;
6931 mode3 = insn_data[d->icode].operand[3].mode;
6933 /* When all four are of the same mode. */
6934 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6939 type = v4si_ftype_v4si_v4si_v4si;
6942 type = v4sf_ftype_v4sf_v4sf_v4sf;
6945 type = v8hi_ftype_v8hi_v8hi_v8hi;
6948 type = v16qi_ftype_v16qi_v16qi_v16qi;
6954 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6959 type = v4si_ftype_v4si_v4si_v16qi;
6962 type = v4sf_ftype_v4sf_v4sf_v16qi;
6965 type = v8hi_ftype_v8hi_v8hi_v16qi;
6968 type = v16qi_ftype_v16qi_v16qi_v16qi;
6974 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6975 && mode3 == V4SImode)
6976 type = v4si_ftype_v16qi_v16qi_v4si;
6977 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6978 && mode3 == V4SImode)
6979 type = v4si_ftype_v8hi_v8hi_v4si;
6980 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6981 && mode3 == V4SImode)
6982 type = v4sf_ftype_v4sf_v4sf_v4si;
6984 /* vchar, vchar, vchar, 4 bit literal. */
6985 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6987 type = v16qi_ftype_v16qi_v16qi_char;
6989 /* vshort, vshort, vshort, 4 bit literal. */
6990 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6992 type = v8hi_ftype_v8hi_v8hi_char;
6994 /* vint, vint, vint, 4 bit literal. */
6995 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6997 type = v4si_ftype_v4si_v4si_char;
6999 /* vfloat, vfloat, vfloat, 4 bit literal. */
7000 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7002 type = v4sf_ftype_v4sf_v4sf_char;
7007 def_builtin (d->mask, d->name, type, d->code);
7010 /* Add the simple binary operators. */
7011 d = (struct builtin_description *) bdesc_2arg;
7012 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7014 enum machine_mode mode0, mode1, mode2;
7017 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7020 mode0 = insn_data[d->icode].operand[0].mode;
7021 mode1 = insn_data[d->icode].operand[1].mode;
7022 mode2 = insn_data[d->icode].operand[2].mode;
7024 /* When all three operands are of the same mode. */
7025 if (mode0 == mode1 && mode1 == mode2)
7030 type = v4sf_ftype_v4sf_v4sf;
7033 type = v4si_ftype_v4si_v4si;
7036 type = v16qi_ftype_v16qi_v16qi;
7039 type = v8hi_ftype_v8hi_v8hi;
7042 type = v2si_ftype_v2si_v2si;
7045 type = v2sf_ftype_v2sf_v2sf;
7048 type = int_ftype_int_int;
7055 /* A few other combos we really don't want to do manually. */
7057 /* vint, vfloat, vfloat. */
7058 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7059 type = v4si_ftype_v4sf_v4sf;
7061 /* vshort, vchar, vchar. */
7062 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7063 type = v8hi_ftype_v16qi_v16qi;
7065 /* vint, vshort, vshort. */
7066 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7067 type = v4si_ftype_v8hi_v8hi;
7069 /* vshort, vint, vint. */
7070 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7071 type = v8hi_ftype_v4si_v4si;
7073 /* vchar, vshort, vshort. */
7074 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7075 type = v16qi_ftype_v8hi_v8hi;
7077 /* vint, vchar, vint. */
7078 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7079 type = v4si_ftype_v16qi_v4si;
7081 /* vint, vchar, vchar. */
7082 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7083 type = v4si_ftype_v16qi_v16qi;
7085 /* vint, vshort, vint. */
7086 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7087 type = v4si_ftype_v8hi_v4si;
7089 /* vint, vint, 5 bit literal. */
7090 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7091 type = v4si_ftype_v4si_char;
7093 /* vshort, vshort, 5 bit literal. */
7094 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7095 type = v8hi_ftype_v8hi_char;
7097 /* vchar, vchar, 5 bit literal. */
7098 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7099 type = v16qi_ftype_v16qi_char;
7101 /* vfloat, vint, 5 bit literal. */
7102 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7103 type = v4sf_ftype_v4si_char;
7105 /* vint, vfloat, 5 bit literal. */
7106 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7107 type = v4si_ftype_v4sf_char;
7109 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7110 type = v2si_ftype_int_int;
7112 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7113 type = v2si_ftype_v2si_char;
7115 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7116 type = v2si_ftype_int_char;
7119 else if (mode0 == SImode)
7124 type = int_ftype_v4si_v4si;
7127 type = int_ftype_v4sf_v4sf;
7130 type = int_ftype_v16qi_v16qi;
7133 type = int_ftype_v8hi_v8hi;
7143 def_builtin (d->mask, d->name, type, d->code);
7146 /* Add the simple unary operators. */
7147 d = (struct builtin_description *) bdesc_1arg;
7148 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7150 enum machine_mode mode0, mode1;
7153 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7156 mode0 = insn_data[d->icode].operand[0].mode;
7157 mode1 = insn_data[d->icode].operand[1].mode;
7159 if (mode0 == V4SImode && mode1 == QImode)
7160 type = v4si_ftype_char;
7161 else if (mode0 == V8HImode && mode1 == QImode)
7162 type = v8hi_ftype_char;
7163 else if (mode0 == V16QImode && mode1 == QImode)
7164 type = v16qi_ftype_char;
7165 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7166 type = v4sf_ftype_v4sf;
7167 else if (mode0 == V8HImode && mode1 == V16QImode)
7168 type = v8hi_ftype_v16qi;
7169 else if (mode0 == V4SImode && mode1 == V8HImode)
7170 type = v4si_ftype_v8hi;
7171 else if (mode0 == V2SImode && mode1 == V2SImode)
7172 type = v2si_ftype_v2si;
7173 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7174 type = v2sf_ftype_v2sf;
7175 else if (mode0 == V2SFmode && mode1 == V2SImode)
7176 type = v2sf_ftype_v2si;
7177 else if (mode0 == V2SImode && mode1 == V2SFmode)
7178 type = v2si_ftype_v2sf;
7179 else if (mode0 == V2SImode && mode1 == QImode)
7180 type = v2si_ftype_char;
7184 def_builtin (d->mask, d->name, type, d->code);
7189 rs6000_init_libfuncs (void)
7191 if (!TARGET_HARD_FLOAT)
7194 if (DEFAULT_ABI != ABI_V4)
7196 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7198 /* AIX library routines for float->int conversion. */
7199 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7200 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7203 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7204 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7205 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7206 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7207 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7211 /* 32-bit SVR4 quad floating point routines. */
7213 set_optab_libfunc (add_optab, TFmode, "_q_add");
7214 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7215 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7216 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7217 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7218 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7219 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7221 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7222 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7223 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7224 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7225 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7226 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7228 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7229 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7230 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7231 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7232 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7233 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7234 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7238 /* Expand a block move operation, and return 1 if successful. Return 0
7239 if we should let the compiler generate normal code.
7241 operands[0] is the destination
7242 operands[1] is the source
7243 operands[2] is the length
7244 operands[3] is the alignment */
7246 #define MAX_MOVE_REG 4
7249 expand_block_move (rtx operands[])
7251 rtx orig_dest = operands[0];
7252 rtx orig_src = operands[1];
7253 rtx bytes_rtx = operands[2];
7254 rtx align_rtx = operands[3];
7255 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7260 rtx stores[MAX_MOVE_REG];
7263 /* If this is not a fixed size move, just call memcpy */
7267 /* If this is not a fixed size alignment, abort */
7268 if (GET_CODE (align_rtx) != CONST_INT)
7270 align = INTVAL (align_rtx);
7272 /* Anything to move? */
7273 bytes = INTVAL (bytes_rtx);
7277 /* store_one_arg depends on expand_block_move to handle at least the size of
7278 reg_parm_stack_space. */
7279 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7282 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7285 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7286 rtx (*mov) (rtx, rtx);
7288 enum machine_mode mode = BLKmode;
7292 && bytes > 24 /* move up to 32 bytes at a time */
7300 && ! fixed_regs[12])
7302 move_bytes = (bytes > 32) ? 32 : bytes;
7303 gen_func.movstrsi = gen_movstrsi_8reg;
7305 else if (TARGET_STRING
7306 && bytes > 16 /* move up to 24 bytes at a time */
7312 && ! fixed_regs[10])
7314 move_bytes = (bytes > 24) ? 24 : bytes;
7315 gen_func.movstrsi = gen_movstrsi_6reg;
7317 else if (TARGET_STRING
7318 && bytes > 8 /* move up to 16 bytes at a time */
7324 move_bytes = (bytes > 16) ? 16 : bytes;
7325 gen_func.movstrsi = gen_movstrsi_4reg;
7327 else if (bytes >= 8 && TARGET_POWERPC64
7328 /* 64-bit loads and stores require word-aligned
7330 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7334 gen_func.mov = gen_movdi;
7336 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7337 { /* move up to 8 bytes at a time */
7338 move_bytes = (bytes > 8) ? 8 : bytes;
7339 gen_func.movstrsi = gen_movstrsi_2reg;
7341 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7342 { /* move 4 bytes */
7345 gen_func.mov = gen_movsi;
7347 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7348 { /* move 2 bytes */
7351 gen_func.mov = gen_movhi;
7353 else if (TARGET_STRING && bytes > 1)
7354 { /* move up to 4 bytes at a time */
7355 move_bytes = (bytes > 4) ? 4 : bytes;
7356 gen_func.movstrsi = gen_movstrsi_1reg;
7358 else /* move 1 byte at a time */
7362 gen_func.mov = gen_movqi;
7365 src = adjust_address (orig_src, mode, offset);
7366 dest = adjust_address (orig_dest, mode, offset);
7368 if (mode != BLKmode)
7370 rtx tmp_reg = gen_reg_rtx (mode);
7372 emit_insn ((*gen_func.mov) (tmp_reg, src));
7373 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7376 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7379 for (i = 0; i < num_reg; i++)
7380 emit_insn (stores[i]);
7384 if (mode == BLKmode)
7386 /* Move the address into scratch registers. The movstrsi
7387 patterns require zero offset. */
7388 if (!REG_P (XEXP (src, 0)))
7390 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7391 src = replace_equiv_address (src, src_reg);
7393 set_mem_size (src, GEN_INT (move_bytes));
7395 if (!REG_P (XEXP (dest, 0)))
7397 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7398 dest = replace_equiv_address (dest, dest_reg);
7400 set_mem_size (dest, GEN_INT (move_bytes));
7402 emit_insn ((*gen_func.movstrsi) (dest, src,
7403 GEN_INT (move_bytes & 31),
7412 /* Return 1 if OP is a load multiple operation. It is known to be a
7413 PARALLEL and the first section will be tested. */
7416 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7418 int count = XVECLEN (op, 0);
7419 unsigned int dest_regno;
7423 /* Perform a quick check so we don't blow up below. */
7425 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7426 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7427 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7430 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7431 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7433 for (i = 1; i < count; i++)
7435 rtx elt = XVECEXP (op, 0, i);
7437 if (GET_CODE (elt) != SET
7438 || GET_CODE (SET_DEST (elt)) != REG
7439 || GET_MODE (SET_DEST (elt)) != SImode
7440 || REGNO (SET_DEST (elt)) != dest_regno + i
7441 || GET_CODE (SET_SRC (elt)) != MEM
7442 || GET_MODE (SET_SRC (elt)) != SImode
7443 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7444 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7445 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7446 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7453 /* Similar, but tests for store multiple. Here, the second vector element
7454 is a CLOBBER. It will be tested later. */
7457 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7459 int count = XVECLEN (op, 0) - 1;
7460 unsigned int src_regno;
7464 /* Perform a quick check so we don't blow up below. */
7466 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7467 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7468 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7471 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7472 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7474 for (i = 1; i < count; i++)
7476 rtx elt = XVECEXP (op, 0, i + 1);
7478 if (GET_CODE (elt) != SET
7479 || GET_CODE (SET_SRC (elt)) != REG
7480 || GET_MODE (SET_SRC (elt)) != SImode
7481 || REGNO (SET_SRC (elt)) != src_regno + i
7482 || GET_CODE (SET_DEST (elt)) != MEM
7483 || GET_MODE (SET_DEST (elt)) != SImode
7484 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7485 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7486 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7487 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7494 /* Return a string to perform a load_multiple operation.
7495 operands[0] is the vector.
7496 operands[1] is the source address.
7497 operands[2] is the first destination register. */
7500 rs6000_output_load_multiple (rtx operands[3])
7502 /* We have to handle the case where the pseudo used to contain the address
7503 is assigned to one of the output registers. */
7505 int words = XVECLEN (operands[0], 0);
7508 if (XVECLEN (operands[0], 0) == 1)
7509 return "{l|lwz} %2,0(%1)";
7511 for (i = 0; i < words; i++)
7512 if (refers_to_regno_p (REGNO (operands[2]) + i,
7513 REGNO (operands[2]) + i + 1, operands[1], 0))
7517 xop[0] = GEN_INT (4 * (words-1));
7518 xop[1] = operands[1];
7519 xop[2] = operands[2];
7520 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7525 xop[0] = GEN_INT (4 * (words-1));
7526 xop[1] = operands[1];
7527 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7528 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7533 for (j = 0; j < words; j++)
7536 xop[0] = GEN_INT (j * 4);
7537 xop[1] = operands[1];
7538 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7539 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7541 xop[0] = GEN_INT (i * 4);
7542 xop[1] = operands[1];
7543 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7548 return "{lsi|lswi} %2,%1,%N0";
7551 /* Return 1 for a parallel vrsave operation. */
7554 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7556 int count = XVECLEN (op, 0);
7557 unsigned int dest_regno, src_regno;
7561 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7562 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7563 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7566 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7567 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7569 if (dest_regno != VRSAVE_REGNO
7570 && src_regno != VRSAVE_REGNO)
7573 for (i = 1; i < count; i++)
7575 rtx elt = XVECEXP (op, 0, i);
7577 if (GET_CODE (elt) != CLOBBER
7578 && GET_CODE (elt) != SET)
7585 /* Return 1 for an PARALLEL suitable for mfcr. */
7588 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7590 int count = XVECLEN (op, 0);
7593 /* Perform a quick check so we don't blow up below. */
7595 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7596 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7597 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7600 for (i = 0; i < count; i++)
7602 rtx exp = XVECEXP (op, 0, i);
7607 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7609 if (GET_CODE (src_reg) != REG
7610 || GET_MODE (src_reg) != CCmode
7611 || ! CR_REGNO_P (REGNO (src_reg)))
7614 if (GET_CODE (exp) != SET
7615 || GET_CODE (SET_DEST (exp)) != REG
7616 || GET_MODE (SET_DEST (exp)) != SImode
7617 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7619 unspec = SET_SRC (exp);
7620 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7622 if (GET_CODE (unspec) != UNSPEC
7623 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7624 || XVECLEN (unspec, 0) != 2
7625 || XVECEXP (unspec, 0, 0) != src_reg
7626 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7627 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7633 /* Return 1 for an PARALLEL suitable for mtcrf. */
7636 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7638 int count = XVECLEN (op, 0);
7642 /* Perform a quick check so we don't blow up below. */
7644 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7645 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7646 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7648 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7650 if (GET_CODE (src_reg) != REG
7651 || GET_MODE (src_reg) != SImode
7652 || ! INT_REGNO_P (REGNO (src_reg)))
7655 for (i = 0; i < count; i++)
7657 rtx exp = XVECEXP (op, 0, i);
7661 if (GET_CODE (exp) != SET
7662 || GET_CODE (SET_DEST (exp)) != REG
7663 || GET_MODE (SET_DEST (exp)) != CCmode
7664 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7666 unspec = SET_SRC (exp);
7667 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7669 if (GET_CODE (unspec) != UNSPEC
7670 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7671 || XVECLEN (unspec, 0) != 2
7672 || XVECEXP (unspec, 0, 0) != src_reg
7673 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7674 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7680 /* Return 1 for an PARALLEL suitable for lmw. */
7683 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7685 int count = XVECLEN (op, 0);
7686 unsigned int dest_regno;
7688 unsigned int base_regno;
7689 HOST_WIDE_INT offset;
7692 /* Perform a quick check so we don't blow up below. */
7694 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7695 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7696 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7699 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7700 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7703 || count != 32 - (int) dest_regno)
7706 if (legitimate_indirect_address_p (src_addr, 0))
7709 base_regno = REGNO (src_addr);
7710 if (base_regno == 0)
7713 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7715 offset = INTVAL (XEXP (src_addr, 1));
7716 base_regno = REGNO (XEXP (src_addr, 0));
7721 for (i = 0; i < count; i++)
7723 rtx elt = XVECEXP (op, 0, i);
7726 HOST_WIDE_INT newoffset;
7728 if (GET_CODE (elt) != SET
7729 || GET_CODE (SET_DEST (elt)) != REG
7730 || GET_MODE (SET_DEST (elt)) != SImode
7731 || REGNO (SET_DEST (elt)) != dest_regno + i
7732 || GET_CODE (SET_SRC (elt)) != MEM
7733 || GET_MODE (SET_SRC (elt)) != SImode)
7735 newaddr = XEXP (SET_SRC (elt), 0);
7736 if (legitimate_indirect_address_p (newaddr, 0))
7741 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7743 addr_reg = XEXP (newaddr, 0);
7744 newoffset = INTVAL (XEXP (newaddr, 1));
7748 if (REGNO (addr_reg) != base_regno
7749 || newoffset != offset + 4 * i)
7756 /* Return 1 for an PARALLEL suitable for stmw. */
7759 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7761 int count = XVECLEN (op, 0);
7762 unsigned int src_regno;
7764 unsigned int base_regno;
7765 HOST_WIDE_INT offset;
7768 /* Perform a quick check so we don't blow up below. */
7770 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7771 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7772 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7775 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7776 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7779 || count != 32 - (int) src_regno)
7782 if (legitimate_indirect_address_p (dest_addr, 0))
7785 base_regno = REGNO (dest_addr);
7786 if (base_regno == 0)
7789 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7791 offset = INTVAL (XEXP (dest_addr, 1));
7792 base_regno = REGNO (XEXP (dest_addr, 0));
7797 for (i = 0; i < count; i++)
7799 rtx elt = XVECEXP (op, 0, i);
7802 HOST_WIDE_INT newoffset;
7804 if (GET_CODE (elt) != SET
7805 || GET_CODE (SET_SRC (elt)) != REG
7806 || GET_MODE (SET_SRC (elt)) != SImode
7807 || REGNO (SET_SRC (elt)) != src_regno + i
7808 || GET_CODE (SET_DEST (elt)) != MEM
7809 || GET_MODE (SET_DEST (elt)) != SImode)
7811 newaddr = XEXP (SET_DEST (elt), 0);
7812 if (legitimate_indirect_address_p (newaddr, 0))
7817 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7819 addr_reg = XEXP (newaddr, 0);
7820 newoffset = INTVAL (XEXP (newaddr, 1));
7824 if (REGNO (addr_reg) != base_regno
7825 || newoffset != offset + 4 * i)
7832 /* A validation routine: say whether CODE, a condition code, and MODE
7833 match. The other alternatives either don't make sense or should
7834 never be generated. */
7837 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
7839 if (GET_RTX_CLASS (code) != '<'
7840 || GET_MODE_CLASS (mode) != MODE_CC)
7843 /* These don't make sense. */
7844 if ((code == GT || code == LT || code == GE || code == LE)
7845 && mode == CCUNSmode)
7848 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7849 && mode != CCUNSmode)
7852 if (mode != CCFPmode
7853 && (code == ORDERED || code == UNORDERED
7854 || code == UNEQ || code == LTGT
7855 || code == UNGT || code == UNLT
7856 || code == UNGE || code == UNLE))
7859 /* These should never be generated except for
7860 flag_finite_math_only. */
7861 if (mode == CCFPmode
7862 && ! flag_finite_math_only
7863 && (code == LE || code == GE
7864 || code == UNEQ || code == LTGT
7865 || code == UNGT || code == UNLT))
7868 /* These are invalid; the information is not there. */
7869 if (mode == CCEQmode
7870 && code != EQ && code != NE)
7874 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7875 We only check the opcode against the mode of the CC value here. */
7878 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7880 enum rtx_code code = GET_CODE (op);
7881 enum machine_mode cc_mode;
7883 if (GET_RTX_CLASS (code) != '<')
7886 cc_mode = GET_MODE (XEXP (op, 0));
7887 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7890 validate_condition_mode (code, cc_mode);
7895 /* Return 1 if OP is a comparison operation that is valid for a branch
7896 insn and which is true if the corresponding bit in the CC register
7900 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
7904 if (! branch_comparison_operator (op, mode))
7907 code = GET_CODE (op);
7908 return (code == EQ || code == LT || code == GT
7909 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7910 || code == LTU || code == GTU
7911 || code == UNORDERED);
7914 /* Return 1 if OP is a comparison operation that is valid for an scc
7915 insn: it must be a positive comparison. */
7918 scc_comparison_operator (rtx op, enum machine_mode mode)
7920 return branch_positive_comparison_operator (op, mode);
7924 trap_comparison_operator (rtx op, enum machine_mode mode)
7926 if (mode != VOIDmode && mode != GET_MODE (op))
7928 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7932 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7934 enum rtx_code code = GET_CODE (op);
7935 return (code == AND || code == IOR || code == XOR);
7939 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7941 enum rtx_code code = GET_CODE (op);
7942 return (code == IOR || code == XOR);
7946 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7948 enum rtx_code code = GET_CODE (op);
7949 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7952 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7953 mask required to convert the result of a rotate insn into a shift
7954 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7957 includes_lshift_p (rtx shiftop, rtx andop)
7959 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7961 shift_mask <<= INTVAL (shiftop);
7963 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7966 /* Similar, but for right shift. */
7969 includes_rshift_p (rtx shiftop, rtx andop)
7971 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7973 shift_mask >>= INTVAL (shiftop);
7975 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7978 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7979 to perform a left shift. It must have exactly SHIFTOP least
7980 significant 0's, then one or more 1's, then zero or more 0's. */
7983 includes_rldic_lshift_p (rtx shiftop, rtx andop)
7985 if (GET_CODE (andop) == CONST_INT)
7987 HOST_WIDE_INT c, lsb, shift_mask;
7990 if (c == 0 || c == ~0)
7994 shift_mask <<= INTVAL (shiftop);
7996 /* Find the least significant one bit. */
7999 /* It must coincide with the LSB of the shift mask. */
8000 if (-lsb != shift_mask)
8003 /* Invert to look for the next transition (if any). */
8006 /* Remove the low group of ones (originally low group of zeros). */
8009 /* Again find the lsb, and check we have all 1's above. */
8013 else if (GET_CODE (andop) == CONST_DOUBLE
8014 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8016 HOST_WIDE_INT low, high, lsb;
8017 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8019 low = CONST_DOUBLE_LOW (andop);
8020 if (HOST_BITS_PER_WIDE_INT < 64)
8021 high = CONST_DOUBLE_HIGH (andop);
8023 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8024 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8027 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8029 shift_mask_high = ~0;
8030 if (INTVAL (shiftop) > 32)
8031 shift_mask_high <<= INTVAL (shiftop) - 32;
8035 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8042 return high == -lsb;
8045 shift_mask_low = ~0;
8046 shift_mask_low <<= INTVAL (shiftop);
8050 if (-lsb != shift_mask_low)
8053 if (HOST_BITS_PER_WIDE_INT < 64)
8058 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8061 return high == -lsb;
8065 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8071 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8072 to perform a left shift. It must have SHIFTOP or more least
8073 significant 0's, with the remainder of the word 1's. */
8076 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8078 if (GET_CODE (andop) == CONST_INT)
8080 HOST_WIDE_INT c, lsb, shift_mask;
8083 shift_mask <<= INTVAL (shiftop);
8086 /* Find the least significant one bit. */
8089 /* It must be covered by the shift mask.
8090 This test also rejects c == 0. */
8091 if ((lsb & shift_mask) == 0)
8094 /* Check we have all 1's above the transition, and reject all 1's. */
8095 return c == -lsb && lsb != 1;
8097 else if (GET_CODE (andop) == CONST_DOUBLE
8098 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8100 HOST_WIDE_INT low, lsb, shift_mask_low;
8102 low = CONST_DOUBLE_LOW (andop);
8104 if (HOST_BITS_PER_WIDE_INT < 64)
8106 HOST_WIDE_INT high, shift_mask_high;
8108 high = CONST_DOUBLE_HIGH (andop);
8112 shift_mask_high = ~0;
8113 if (INTVAL (shiftop) > 32)
8114 shift_mask_high <<= INTVAL (shiftop) - 32;
8118 if ((lsb & shift_mask_high) == 0)
8121 return high == -lsb;
8127 shift_mask_low = ~0;
8128 shift_mask_low <<= INTVAL (shiftop);
8132 if ((lsb & shift_mask_low) == 0)
8135 return low == -lsb && lsb != 1;
8141 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8142 for lfq and stfq insns.
8144 Note reg1 and reg2 *must* be hard registers. To be sure we will
8145 abort if we are passed pseudo registers. */
8148 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8150 /* We might have been passed a SUBREG. */
8151 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8154 return (REGNO (reg1) == REGNO (reg2) - 1);
8157 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8158 addr1 and addr2 must be in consecutive memory locations
8159 (addr2 == addr1 + 8). */
8162 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8167 /* Extract an offset (if used) from the first addr. */
8168 if (GET_CODE (addr1) == PLUS)
8170 /* If not a REG, return zero. */
8171 if (GET_CODE (XEXP (addr1, 0)) != REG)
8175 reg1 = REGNO (XEXP (addr1, 0));
8176 /* The offset must be constant! */
8177 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8179 offset1 = INTVAL (XEXP (addr1, 1));
8182 else if (GET_CODE (addr1) != REG)
8186 reg1 = REGNO (addr1);
8187 /* This was a simple (mem (reg)) expression. Offset is 0. */
8191 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8192 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8193 register as addr1. */
8194 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8196 if (GET_CODE (addr2) != PLUS)
8199 if (GET_CODE (XEXP (addr2, 0)) != REG
8200 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8203 if (reg1 != REGNO (XEXP (addr2, 0)))
8206 /* The offset for the second addr must be 8 more than the first addr. */
8207 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8210 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8215 /* Return the register class of a scratch register needed to copy IN into
8216 or out of a register in CLASS in MODE. If it can be done directly,
8217 NO_REGS is returned. */
8220 secondary_reload_class (enum reg_class class,
8221 enum machine_mode mode ATTRIBUTE_UNUSED, rtx in)
8225 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8227 && MACHOPIC_INDIRECT
8231 /* We cannot copy a symbolic operand directly into anything
8232 other than BASE_REGS for TARGET_ELF. So indicate that a
8233 register from BASE_REGS is needed as an intermediate
8236 On Darwin, pic addresses require a load from memory, which
8237 needs a base register. */
8238 if (class != BASE_REGS
8239 && (GET_CODE (in) == SYMBOL_REF
8240 || GET_CODE (in) == HIGH
8241 || GET_CODE (in) == LABEL_REF
8242 || GET_CODE (in) == CONST))
8246 if (GET_CODE (in) == REG)
8249 if (regno >= FIRST_PSEUDO_REGISTER)
8251 regno = true_regnum (in);
8252 if (regno >= FIRST_PSEUDO_REGISTER)
8256 else if (GET_CODE (in) == SUBREG)
8258 regno = true_regnum (in);
8259 if (regno >= FIRST_PSEUDO_REGISTER)
8265 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8267 if (class == GENERAL_REGS || class == BASE_REGS
8268 || (regno >= 0 && INT_REGNO_P (regno)))
8271 /* Constants, memory, and FP registers can go into FP registers. */
8272 if ((regno == -1 || FP_REGNO_P (regno))
8273 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8276 /* Memory, and AltiVec registers can go into AltiVec registers. */
8277 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8278 && class == ALTIVEC_REGS)
8281 /* We can copy among the CR registers. */
8282 if ((class == CR_REGS || class == CR0_REGS)
8283 && regno >= 0 && CR_REGNO_P (regno))
8286 /* Otherwise, we need GENERAL_REGS. */
8287 return GENERAL_REGS;
8290 /* Given a comparison operation, return the bit number in CCR to test. We
8291 know this is a valid comparison.
8293 SCC_P is 1 if this is for an scc. That means that %D will have been
8294 used instead of %C, so the bits will be in different places.
8296 Return -1 if OP isn't a valid comparison for some reason. */
8299 ccr_bit (rtx op, int scc_p)
8301 enum rtx_code code = GET_CODE (op);
8302 enum machine_mode cc_mode;
8307 if (GET_RTX_CLASS (code) != '<')
8312 if (GET_CODE (reg) != REG
8313 || ! CR_REGNO_P (REGNO (reg)))
8316 cc_mode = GET_MODE (reg);
8317 cc_regnum = REGNO (reg);
8318 base_bit = 4 * (cc_regnum - CR0_REGNO);
8320 validate_condition_mode (code, cc_mode);
8322 /* When generating a sCOND operation, only positive conditions are
8324 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8325 && code != GTU && code != LTU)
8331 if (TARGET_E500 && !TARGET_FPRS
8332 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8333 return base_bit + 1;
8334 return scc_p ? base_bit + 3 : base_bit + 2;
8336 if (TARGET_E500 && !TARGET_FPRS
8337 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8338 return base_bit + 1;
8339 return base_bit + 2;
8340 case GT: case GTU: case UNLE:
8341 return base_bit + 1;
8342 case LT: case LTU: case UNGE:
8344 case ORDERED: case UNORDERED:
8345 return base_bit + 3;
8348 /* If scc, we will have done a cror to put the bit in the
8349 unordered position. So test that bit. For integer, this is ! LT
8350 unless this is an scc insn. */
8351 return scc_p ? base_bit + 3 : base_bit;
8354 return scc_p ? base_bit + 3 : base_bit + 1;
8361 /* Return the GOT register. */
8364 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8366 /* The second flow pass currently (June 1999) can't update
8367 regs_ever_live without disturbing other parts of the compiler, so
8368 update it here to make the prolog/epilogue code happy. */
8369 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8370 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8372 current_function_uses_pic_offset_table = 1;
8374 return pic_offset_table_rtx;
8377 /* Function to init struct machine_function.
8378 This will be called, via a pointer variable,
8379 from push_function_context. */
8381 static struct machine_function *
8382 rs6000_init_machine_status (void)
8384 return ggc_alloc_cleared (sizeof (machine_function));
8387 /* These macros test for integers and extract the low-order bits. */
8389 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8390 && GET_MODE (X) == VOIDmode)
8392 #define INT_LOWPART(X) \
8393 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8399 unsigned long val = INT_LOWPART (op);
8401 /* If the high bit is zero, the value is the first 1 bit we find
8403 if ((val & 0x80000000) == 0)
8405 if ((val & 0xffffffff) == 0)
8409 while (((val <<= 1) & 0x80000000) == 0)
8414 /* If the high bit is set and the low bit is not, or the mask is all
8415 1's, the value is zero. */
8416 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8419 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8422 while (((val >>= 1) & 1) != 0)
8432 unsigned long val = INT_LOWPART (op);
8434 /* If the low bit is zero, the value is the first 1 bit we find from
8438 if ((val & 0xffffffff) == 0)
8442 while (((val >>= 1) & 1) == 0)
8448 /* If the low bit is set and the high bit is not, or the mask is all
8449 1's, the value is 31. */
8450 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8453 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8456 while (((val <<= 1) & 0x80000000) != 0)
8462 /* Locate some local-dynamic symbol still in use by this function
8463 so that we can print its name in some tls_ld pattern. */
8466 rs6000_get_some_local_dynamic_name (void)
8470 if (cfun->machine->some_ld_name)
8471 return cfun->machine->some_ld_name;
8473 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8475 && for_each_rtx (&PATTERN (insn),
8476 rs6000_get_some_local_dynamic_name_1, 0))
8477 return cfun->machine->some_ld_name;
8482 /* Helper function for rs6000_get_some_local_dynamic_name. */
8485 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8489 if (GET_CODE (x) == SYMBOL_REF)
8491 const char *str = XSTR (x, 0);
8492 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8494 cfun->machine->some_ld_name = str;
8502 /* Print an operand. Recognize special options, documented below. */
8505 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8506 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8508 #define SMALL_DATA_RELOC "sda21"
8509 #define SMALL_DATA_REG 0
8513 print_operand (FILE *file, rtx x, int code)
8517 unsigned HOST_WIDE_INT uval;
8522 /* Write out an instruction after the call which may be replaced
8523 with glue code by the loader. This depends on the AIX version. */
8524 asm_fprintf (file, RS6000_CALL_GLUE);
8527 /* %a is output_address. */
8530 /* If X is a constant integer whose low-order 5 bits are zero,
8531 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8532 in the AIX assembler where "sri" with a zero shift count
8533 writes a trash instruction. */
8534 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8541 /* If constant, low-order 16 bits of constant, unsigned.
8542 Otherwise, write normally. */
8544 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8546 print_operand (file, x, 0);
8550 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8551 for 64-bit mask direction. */
8552 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8555 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8559 /* X is a CR register. Print the number of the EQ bit of the CR */
8560 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8561 output_operand_lossage ("invalid %%E value");
8563 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8567 /* X is a CR register. Print the shift count needed to move it
8568 to the high-order four bits. */
8569 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8570 output_operand_lossage ("invalid %%f value");
8572 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8576 /* Similar, but print the count for the rotate in the opposite
8578 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8579 output_operand_lossage ("invalid %%F value");
8581 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8585 /* X is a constant integer. If it is negative, print "m",
8586 otherwise print "z". This is to make an aze or ame insn. */
8587 if (GET_CODE (x) != CONST_INT)
8588 output_operand_lossage ("invalid %%G value");
8589 else if (INTVAL (x) >= 0)
8596 /* If constant, output low-order five bits. Otherwise, write
8599 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8601 print_operand (file, x, 0);
8605 /* If constant, output low-order six bits. Otherwise, write
8608 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8610 print_operand (file, x, 0);
8614 /* Print `i' if this is a constant, else nothing. */
8620 /* Write the bit number in CCR for jump. */
8623 output_operand_lossage ("invalid %%j code");
8625 fprintf (file, "%d", i);
8629 /* Similar, but add one for shift count in rlinm for scc and pass
8630 scc flag to `ccr_bit'. */
8633 output_operand_lossage ("invalid %%J code");
8635 /* If we want bit 31, write a shift count of zero, not 32. */
8636 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8640 /* X must be a constant. Write the 1's complement of the
8643 output_operand_lossage ("invalid %%k value");
8645 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8649 /* X must be a symbolic constant on ELF. Write an
8650 expression suitable for an 'addi' that adds in the low 16
8652 if (GET_CODE (x) != CONST)
8654 print_operand_address (file, x);
8659 if (GET_CODE (XEXP (x, 0)) != PLUS
8660 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8661 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8662 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8663 output_operand_lossage ("invalid %%K value");
8664 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8666 /* For GNU as, there must be a non-alphanumeric character
8667 between 'l' and the number. The '-' is added by
8668 print_operand() already. */
8669 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8671 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8675 /* %l is output_asm_label. */
8678 /* Write second word of DImode or DFmode reference. Works on register
8679 or non-indexed memory only. */
8680 if (GET_CODE (x) == REG)
8681 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8682 else if (GET_CODE (x) == MEM)
8684 /* Handle possible auto-increment. Since it is pre-increment and
8685 we have already done it, we can just use an offset of word. */
8686 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8687 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8688 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8691 output_address (XEXP (adjust_address_nv (x, SImode,
8695 if (small_data_operand (x, GET_MODE (x)))
8696 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8697 reg_names[SMALL_DATA_REG]);
8702 /* MB value for a mask operand. */
8703 if (! mask_operand (x, SImode))
8704 output_operand_lossage ("invalid %%m value");
8706 fprintf (file, "%d", extract_MB (x));
8710 /* ME value for a mask operand. */
8711 if (! mask_operand (x, SImode))
8712 output_operand_lossage ("invalid %%M value");
8714 fprintf (file, "%d", extract_ME (x));
8717 /* %n outputs the negative of its operand. */
8720 /* Write the number of elements in the vector times 4. */
8721 if (GET_CODE (x) != PARALLEL)
8722 output_operand_lossage ("invalid %%N value");
8724 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8728 /* Similar, but subtract 1 first. */
8729 if (GET_CODE (x) != PARALLEL)
8730 output_operand_lossage ("invalid %%O value");
8732 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8736 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8738 || INT_LOWPART (x) < 0
8739 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8740 output_operand_lossage ("invalid %%p value");
8742 fprintf (file, "%d", i);
8746 /* The operand must be an indirect memory reference. The result
8747 is the register number. */
8748 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8749 || REGNO (XEXP (x, 0)) >= 32)
8750 output_operand_lossage ("invalid %%P value");
8752 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8756 /* This outputs the logical code corresponding to a boolean
8757 expression. The expression may have one or both operands
8758 negated (if one, only the first one). For condition register
8759 logical operations, it will also treat the negated
8760 CR codes as NOTs, but not handle NOTs of them. */
8762 const char *const *t = 0;
8764 enum rtx_code code = GET_CODE (x);
8765 static const char * const tbl[3][3] = {
8766 { "and", "andc", "nor" },
8767 { "or", "orc", "nand" },
8768 { "xor", "eqv", "xor" } };
8772 else if (code == IOR)
8774 else if (code == XOR)
8777 output_operand_lossage ("invalid %%q value");
8779 if (GET_CODE (XEXP (x, 0)) != NOT)
8783 if (GET_CODE (XEXP (x, 1)) == NOT)
8801 /* X is a CR register. Print the mask for `mtcrf'. */
8802 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8803 output_operand_lossage ("invalid %%R value");
8805 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8809 /* Low 5 bits of 32 - value */
8811 output_operand_lossage ("invalid %%s value");
8813 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8817 /* PowerPC64 mask position. All 0's is excluded.
8818 CONST_INT 32-bit mask is considered sign-extended so any
8819 transition must occur within the CONST_INT, not on the boundary. */
8820 if (! mask64_operand (x, DImode))
8821 output_operand_lossage ("invalid %%S value");
8823 uval = INT_LOWPART (x);
8825 if (uval & 1) /* Clear Left */
8827 #if HOST_BITS_PER_WIDE_INT > 64
8828 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8832 else /* Clear Right */
8835 #if HOST_BITS_PER_WIDE_INT > 64
8836 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8844 fprintf (file, "%d", i);
8848 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8849 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8852 /* Bit 3 is OV bit. */
8853 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8855 /* If we want bit 31, write a shift count of zero, not 32. */
8856 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8860 /* Print the symbolic name of a branch target register. */
8861 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8862 && REGNO (x) != COUNT_REGISTER_REGNUM))
8863 output_operand_lossage ("invalid %%T value");
8864 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8865 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8867 fputs ("ctr", file);
8871 /* High-order 16 bits of constant for use in unsigned operand. */
8873 output_operand_lossage ("invalid %%u value");
8875 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8876 (INT_LOWPART (x) >> 16) & 0xffff);
8880 /* High-order 16 bits of constant for use in signed operand. */
8882 output_operand_lossage ("invalid %%v value");
8884 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8885 (INT_LOWPART (x) >> 16) & 0xffff);
8889 /* Print `u' if this has an auto-increment or auto-decrement. */
8890 if (GET_CODE (x) == MEM
8891 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8892 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8897 /* Print the trap code for this operand. */
8898 switch (GET_CODE (x))
8901 fputs ("eq", file); /* 4 */
8904 fputs ("ne", file); /* 24 */
8907 fputs ("lt", file); /* 16 */
8910 fputs ("le", file); /* 20 */
8913 fputs ("gt", file); /* 8 */
8916 fputs ("ge", file); /* 12 */
8919 fputs ("llt", file); /* 2 */
8922 fputs ("lle", file); /* 6 */
8925 fputs ("lgt", file); /* 1 */
8928 fputs ("lge", file); /* 5 */
8936 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8939 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8940 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8942 print_operand (file, x, 0);
8946 /* MB value for a PowerPC64 rldic operand. */
8947 val = (GET_CODE (x) == CONST_INT
8948 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8953 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8954 if ((val <<= 1) < 0)
8957 #if HOST_BITS_PER_WIDE_INT == 32
8958 if (GET_CODE (x) == CONST_INT && i >= 0)
8959 i += 32; /* zero-extend high-part was all 0's */
8960 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8962 val = CONST_DOUBLE_LOW (x);
8969 for ( ; i < 64; i++)
8970 if ((val <<= 1) < 0)
8975 fprintf (file, "%d", i + 1);
8979 if (GET_CODE (x) == MEM
8980 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8985 /* Like 'L', for third word of TImode */
8986 if (GET_CODE (x) == REG)
8987 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8988 else if (GET_CODE (x) == MEM)
8990 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8991 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8992 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8994 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8995 if (small_data_operand (x, GET_MODE (x)))
8996 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8997 reg_names[SMALL_DATA_REG]);
9002 /* X is a SYMBOL_REF. Write out the name preceded by a
9003 period and without any trailing data in brackets. Used for function
9004 names. If we are configured for System V (or the embedded ABI) on
9005 the PowerPC, do not emit the period, since those systems do not use
9006 TOCs and the like. */
9007 if (GET_CODE (x) != SYMBOL_REF)
9010 if (XSTR (x, 0)[0] != '.')
9012 switch (DEFAULT_ABI)
9027 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9029 assemble_name (file, XSTR (x, 0));
9033 /* Like 'L', for last word of TImode. */
9034 if (GET_CODE (x) == REG)
9035 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9036 else if (GET_CODE (x) == MEM)
9038 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9039 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9040 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9042 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9043 if (small_data_operand (x, GET_MODE (x)))
9044 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9045 reg_names[SMALL_DATA_REG]);
9049 /* Print AltiVec or SPE memory operand. */
9054 if (GET_CODE (x) != MEM)
9062 if (GET_CODE (tmp) == REG)
9064 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9067 /* Handle [reg+UIMM]. */
9068 else if (GET_CODE (tmp) == PLUS &&
9069 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9073 if (GET_CODE (XEXP (tmp, 0)) != REG)
9076 x = INTVAL (XEXP (tmp, 1));
9077 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9081 /* Fall through. Must be [reg+reg]. */
9083 if (GET_CODE (tmp) == REG)
9084 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9085 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9087 if (REGNO (XEXP (tmp, 0)) == 0)
9088 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9089 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9091 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9092 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9100 if (GET_CODE (x) == REG)
9101 fprintf (file, "%s", reg_names[REGNO (x)]);
9102 else if (GET_CODE (x) == MEM)
9104 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9105 know the width from the mode. */
9106 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9107 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9108 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9109 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9110 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9111 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9113 output_address (XEXP (x, 0));
9116 output_addr_const (file, x);
9120 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9124 output_operand_lossage ("invalid %%xn code");
9128 /* Print the address of an operand. */
9131 print_operand_address (FILE *file, rtx x)
9133 if (GET_CODE (x) == REG)
9134 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9135 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9136 || GET_CODE (x) == LABEL_REF)
9138 output_addr_const (file, x);
9139 if (small_data_operand (x, GET_MODE (x)))
9140 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9141 reg_names[SMALL_DATA_REG]);
9142 else if (TARGET_TOC)
9145 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9147 if (REGNO (XEXP (x, 0)) == 0)
9148 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9149 reg_names[ REGNO (XEXP (x, 0)) ]);
9151 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9152 reg_names[ REGNO (XEXP (x, 1)) ]);
9154 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9155 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9156 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9158 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9159 && CONSTANT_P (XEXP (x, 1)))
9161 output_addr_const (file, XEXP (x, 1));
9162 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9166 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9167 && CONSTANT_P (XEXP (x, 1)))
9169 fprintf (file, "lo16(");
9170 output_addr_const (file, XEXP (x, 1));
9171 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9174 else if (legitimate_constant_pool_address_p (x))
9176 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9178 rtx contains_minus = XEXP (x, 1);
9182 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9183 turn it into (sym) for output_addr_const. */
9184 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9185 contains_minus = XEXP (contains_minus, 0);
9187 minus = XEXP (contains_minus, 0);
9188 symref = XEXP (minus, 0);
9189 XEXP (contains_minus, 0) = symref;
9194 name = XSTR (symref, 0);
9195 newname = alloca (strlen (name) + sizeof ("@toc"));
9196 strcpy (newname, name);
9197 strcat (newname, "@toc");
9198 XSTR (symref, 0) = newname;
9200 output_addr_const (file, XEXP (x, 1));
9202 XSTR (symref, 0) = name;
9203 XEXP (contains_minus, 0) = minus;
9206 output_addr_const (file, XEXP (x, 1));
9208 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9214 /* Target hook for assembling integer objects. The PowerPC version has
9215 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9216 is defined. It also needs to handle DI-mode objects on 64-bit
9220 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9222 #ifdef RELOCATABLE_NEEDS_FIXUP
9223 /* Special handling for SI values. */
9224 if (size == 4 && aligned_p)
9226 extern int in_toc_section (void);
9227 static int recurse = 0;
9229 /* For -mrelocatable, we mark all addresses that need to be fixed up
9230 in the .fixup section. */
9231 if (TARGET_RELOCATABLE
9232 && !in_toc_section ()
9233 && !in_text_section ()
9235 && GET_CODE (x) != CONST_INT
9236 && GET_CODE (x) != CONST_DOUBLE
9242 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9244 ASM_OUTPUT_LABEL (asm_out_file, buf);
9245 fprintf (asm_out_file, "\t.long\t(");
9246 output_addr_const (asm_out_file, x);
9247 fprintf (asm_out_file, ")@fixup\n");
9248 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9249 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9250 fprintf (asm_out_file, "\t.long\t");
9251 assemble_name (asm_out_file, buf);
9252 fprintf (asm_out_file, "\n\t.previous\n");
9256 /* Remove initial .'s to turn a -mcall-aixdesc function
9257 address into the address of the descriptor, not the function
9259 else if (GET_CODE (x) == SYMBOL_REF
9260 && XSTR (x, 0)[0] == '.'
9261 && DEFAULT_ABI == ABI_AIX)
9263 const char *name = XSTR (x, 0);
9264 while (*name == '.')
9267 fprintf (asm_out_file, "\t.long\t%s\n", name);
9271 #endif /* RELOCATABLE_NEEDS_FIXUP */
9272 return default_assemble_integer (x, size, aligned_p);
9275 #ifdef HAVE_GAS_HIDDEN
9276 /* Emit an assembler directive to set symbol visibility for DECL to
9280 rs6000_assemble_visibility (tree decl, int vis)
9282 /* Functions need to have their entry point symbol visibility set as
9283 well as their descriptor symbol visibility. */
9284 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9286 static const char * const visibility_types[] = {
9287 NULL, "internal", "hidden", "protected"
9290 const char *name, *type;
9292 name = ((* targetm.strip_name_encoding)
9293 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9294 type = visibility_types[vis];
9296 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9297 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9300 default_assemble_visibility (decl, vis);
9305 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9307 /* Reversal of FP compares takes care -- an ordered compare
9308 becomes an unordered compare and vice versa. */
9309 if (mode == CCFPmode
9310 && (!flag_finite_math_only
9311 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9312 || code == UNEQ || code == LTGT))
9313 return reverse_condition_maybe_unordered (code);
9315 return reverse_condition (code);
9318 /* Generate a compare for CODE. Return a brand-new rtx that
9319 represents the result of the compare. */
9322 rs6000_generate_compare (enum rtx_code code)
9324 enum machine_mode comp_mode;
9327 if (rs6000_compare_fp_p)
9328 comp_mode = CCFPmode;
9329 else if (code == GTU || code == LTU
9330 || code == GEU || code == LEU)
9331 comp_mode = CCUNSmode;
9335 /* First, the compare. */
9336 compare_result = gen_reg_rtx (comp_mode);
9338 /* SPE FP compare instructions on the GPRs. Yuck! */
9339 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9340 && rs6000_compare_fp_p)
9342 rtx cmp, or1, or2, or_result, compare_result2;
9350 cmp = flag_finite_math_only
9351 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9353 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9354 rs6000_compare_op1);
9362 cmp = flag_finite_math_only
9363 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9365 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9366 rs6000_compare_op1);
9374 cmp = flag_finite_math_only
9375 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9377 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9378 rs6000_compare_op1);
9384 /* Synthesize LE and GE from LT/GT || EQ. */
9385 if (code == LE || code == GE || code == LEU || code == GEU)
9387 /* Synthesize GE/LE frome GT/LT || EQ. */
9393 case LE: code = LT; break;
9394 case GE: code = GT; break;
9395 case LEU: code = LT; break;
9396 case GEU: code = GT; break;
9400 or1 = gen_reg_rtx (SImode);
9401 or2 = gen_reg_rtx (SImode);
9402 or_result = gen_reg_rtx (CCEQmode);
9403 compare_result2 = gen_reg_rtx (CCFPmode);
9406 cmp = flag_finite_math_only
9407 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9409 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9410 rs6000_compare_op1);
9413 /* The MC8540 FP compare instructions set the CR bits
9414 differently than other PPC compare instructions. For
9415 that matter, there is no generic test instruction, but a
9416 testgt, testlt, and testeq. For a true condition, bit 2
9417 is set (x1xx) in the CR. Following the traditional CR
9423 ... bit 2 would be a GT CR alias, so later on we
9424 look in the GT bits for the branch instructions.
9425 However, we must be careful to emit correct RTL in
9426 the meantime, so optimizations don't get confused. */
9428 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9429 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9431 /* OR them together. */
9432 cmp = gen_rtx_SET (VOIDmode, or_result,
9433 gen_rtx_COMPARE (CCEQmode,
9434 gen_rtx_IOR (SImode, or1, or2),
9436 compare_result = or_result;
9441 /* We only care about 1 bit (x1xx), so map everything to NE to
9442 maintain rtl sanity. We'll get to the right bit (x1xx) at
9443 code output time. */
9444 if (code == NE || code == LTGT)
9445 /* Do the inverse here because we have no cmpne
9446 instruction. We use the cmpeq instruction and expect
9447 to get a 0 instead. */
9456 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9457 gen_rtx_COMPARE (comp_mode,
9459 rs6000_compare_op1)));
9461 /* Some kinds of FP comparisons need an OR operation;
9462 under flag_finite_math_only we don't bother. */
9463 if (rs6000_compare_fp_p
9464 && ! flag_finite_math_only
9465 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9466 && (code == LE || code == GE
9467 || code == UNEQ || code == LTGT
9468 || code == UNGT || code == UNLT))
9470 enum rtx_code or1, or2;
9471 rtx or1_rtx, or2_rtx, compare2_rtx;
9472 rtx or_result = gen_reg_rtx (CCEQmode);
9476 case LE: or1 = LT; or2 = EQ; break;
9477 case GE: or1 = GT; or2 = EQ; break;
9478 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9479 case LTGT: or1 = LT; or2 = GT; break;
9480 case UNGT: or1 = UNORDERED; or2 = GT; break;
9481 case UNLT: or1 = UNORDERED; or2 = LT; break;
9484 validate_condition_mode (or1, comp_mode);
9485 validate_condition_mode (or2, comp_mode);
9486 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9487 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9488 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9489 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9491 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9493 compare_result = or_result;
9497 validate_condition_mode (code, GET_MODE (compare_result));
9499 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9503 /* Emit the RTL for an sCOND pattern. */
9506 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9509 enum machine_mode op_mode;
9510 enum rtx_code cond_code;
9512 condition_rtx = rs6000_generate_compare (code);
9513 cond_code = GET_CODE (condition_rtx);
9516 || cond_code == GE || cond_code == LE
9517 || cond_code == GEU || cond_code == LEU
9518 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9520 rtx not_result = gen_reg_rtx (CCEQmode);
9521 rtx not_op, rev_cond_rtx;
9522 enum machine_mode cc_mode;
9524 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9526 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9527 SImode, XEXP (condition_rtx, 0), const0_rtx);
9528 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9529 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9530 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9533 op_mode = GET_MODE (rs6000_compare_op0);
9534 if (op_mode == VOIDmode)
9535 op_mode = GET_MODE (rs6000_compare_op1);
9537 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9539 PUT_MODE (condition_rtx, DImode);
9540 convert_move (result, condition_rtx, 0);
9544 PUT_MODE (condition_rtx, SImode);
9545 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9549 /* Emit a branch of kind CODE to location LOC. */
9552 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9554 rtx condition_rtx, loc_ref;
9556 condition_rtx = rs6000_generate_compare (code);
9557 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9558 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9559 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9563 /* Return the string to output a conditional branch to LABEL, which is
9564 the operand number of the label, or -1 if the branch is really a
9567 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9568 condition code register and its mode specifies what kind of
9571 REVERSED is nonzero if we should reverse the sense of the comparison.
9573 INSN is the insn. */
9576 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9578 static char string[64];
9579 enum rtx_code code = GET_CODE (op);
9580 rtx cc_reg = XEXP (op, 0);
9581 enum machine_mode mode = GET_MODE (cc_reg);
9582 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9583 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9584 int really_reversed = reversed ^ need_longbranch;
9590 validate_condition_mode (code, mode);
9592 /* Work out which way this really branches. We could use
9593 reverse_condition_maybe_unordered here always but this
9594 makes the resulting assembler clearer. */
9595 if (really_reversed)
9597 /* Reversal of FP compares takes care -- an ordered compare
9598 becomes an unordered compare and vice versa. */
9599 if (mode == CCFPmode)
9600 code = reverse_condition_maybe_unordered (code);
9602 code = reverse_condition (code);
9605 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9607 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9610 /* Opposite of GT. */
9612 else if (code == NE)
9620 /* Not all of these are actually distinct opcodes, but
9621 we distinguish them for clarity of the resulting assembler. */
9623 ccode = "ne"; break;
9625 ccode = "eq"; break;
9627 ccode = "ge"; break;
9628 case GT: case GTU: case UNGT:
9629 ccode = "gt"; break;
9631 ccode = "le"; break;
9632 case LT: case LTU: case UNLT:
9633 ccode = "lt"; break;
9634 case UNORDERED: ccode = "un"; break;
9635 case ORDERED: ccode = "nu"; break;
9636 case UNGE: ccode = "nl"; break;
9637 case UNLE: ccode = "ng"; break;
9642 /* Maybe we have a guess as to how likely the branch is.
9643 The old mnemonics don't have a way to specify this information. */
9645 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9646 if (note != NULL_RTX)
9648 /* PROB is the difference from 50%. */
9649 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9650 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9652 /* Only hint for highly probable/improbable branches on newer
9653 cpus as static prediction overrides processor dynamic
9654 prediction. For older cpus we may as well always hint, but
9655 assume not taken for branches that are very close to 50% as a
9656 mispredicted taken branch is more expensive than a
9657 mispredicted not-taken branch. */
9659 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9661 if (abs (prob) > REG_BR_PROB_BASE / 20
9662 && ((prob > 0) ^ need_longbranch))
9670 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9672 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9674 /* We need to escape any '%' characters in the reg_names string.
9675 Assume they'd only be the first character... */
9676 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9678 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9682 /* If the branch distance was too far, we may have to use an
9683 unconditional branch to go the distance. */
9684 if (need_longbranch)
9685 s += sprintf (s, ",$+8\n\tb %s", label);
9687 s += sprintf (s, ",%s", label);
9693 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9694 operands of the last comparison is nonzero/true, FALSE_COND if it
9695 is zero/false. Return 0 if the hardware has no such operation. */
9698 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9700 enum rtx_code code = GET_CODE (op);
9701 rtx op0 = rs6000_compare_op0;
9702 rtx op1 = rs6000_compare_op1;
9704 enum machine_mode compare_mode = GET_MODE (op0);
9705 enum machine_mode result_mode = GET_MODE (dest);
9708 /* These modes should always match. */
9709 if (GET_MODE (op1) != compare_mode
9710 /* In the isel case however, we can use a compare immediate, so
9711 op1 may be a small constant. */
9712 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9714 if (GET_MODE (true_cond) != result_mode)
9716 if (GET_MODE (false_cond) != result_mode)
9719 /* First, work out if the hardware can do this at all, or
9720 if it's too slow... */
9721 if (! rs6000_compare_fp_p)
9724 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9728 /* Eliminate half of the comparisons by switching operands, this
9729 makes the remaining code simpler. */
9730 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9731 || code == LTGT || code == LT || code == UNLE)
9733 code = reverse_condition_maybe_unordered (code);
9735 true_cond = false_cond;
9739 /* UNEQ and LTGT take four instructions for a comparison with zero,
9740 it'll probably be faster to use a branch here too. */
9741 if (code == UNEQ && HONOR_NANS (compare_mode))
9744 if (GET_CODE (op1) == CONST_DOUBLE)
9745 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9747 /* We're going to try to implement comparisons by performing
9748 a subtract, then comparing against zero. Unfortunately,
9749 Inf - Inf is NaN which is not zero, and so if we don't
9750 know that the operand is finite and the comparison
9751 would treat EQ different to UNORDERED, we can't do it. */
9752 if (HONOR_INFINITIES (compare_mode)
9753 && code != GT && code != UNGE
9754 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9755 /* Constructs of the form (a OP b ? a : b) are safe. */
9756 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9757 || (! rtx_equal_p (op0, true_cond)
9758 && ! rtx_equal_p (op1, true_cond))))
9760 /* At this point we know we can use fsel. */
9762 /* Reduce the comparison to a comparison against zero. */
9763 temp = gen_reg_rtx (compare_mode);
9764 emit_insn (gen_rtx_SET (VOIDmode, temp,
9765 gen_rtx_MINUS (compare_mode, op0, op1)));
9767 op1 = CONST0_RTX (compare_mode);
9769 /* If we don't care about NaNs we can reduce some of the comparisons
9770 down to faster ones. */
9771 if (! HONOR_NANS (compare_mode))
9777 true_cond = false_cond;
9790 /* Now, reduce everything down to a GE. */
9797 temp = gen_reg_rtx (compare_mode);
9798 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9803 temp = gen_reg_rtx (compare_mode);
9804 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9809 temp = gen_reg_rtx (compare_mode);
9810 emit_insn (gen_rtx_SET (VOIDmode, temp,
9811 gen_rtx_NEG (compare_mode,
9812 gen_rtx_ABS (compare_mode, op0))));
9817 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9818 temp = gen_reg_rtx (result_mode);
9819 emit_insn (gen_rtx_SET (VOIDmode, temp,
9820 gen_rtx_IF_THEN_ELSE (result_mode,
9821 gen_rtx_GE (VOIDmode,
9823 true_cond, false_cond)));
9824 false_cond = true_cond;
9827 temp = gen_reg_rtx (compare_mode);
9828 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9833 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9834 temp = gen_reg_rtx (result_mode);
9835 emit_insn (gen_rtx_SET (VOIDmode, temp,
9836 gen_rtx_IF_THEN_ELSE (result_mode,
9837 gen_rtx_GE (VOIDmode,
9839 true_cond, false_cond)));
9840 true_cond = false_cond;
9843 temp = gen_reg_rtx (compare_mode);
9844 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9852 emit_insn (gen_rtx_SET (VOIDmode, dest,
9853 gen_rtx_IF_THEN_ELSE (result_mode,
9854 gen_rtx_GE (VOIDmode,
9856 true_cond, false_cond)));
9860 /* Same as above, but for ints (isel). */
9863 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9865 rtx condition_rtx, cr;
9867 /* All isel implementations thus far are 32-bits. */
9868 if (GET_MODE (rs6000_compare_op0) != SImode)
9871 /* We still have to do the compare, because isel doesn't do a
9872 compare, it just looks at the CRx bits set by a previous compare
9874 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9875 cr = XEXP (condition_rtx, 0);
9877 if (GET_MODE (cr) == CCmode)
9878 emit_insn (gen_isel_signed (dest, condition_rtx,
9879 true_cond, false_cond, cr));
9881 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9882 true_cond, false_cond, cr));
9888 output_isel (rtx *operands)
9892 code = GET_CODE (operands[1]);
9893 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9895 PUT_CODE (operands[1], reverse_condition (code));
9896 return "isel %0,%3,%2,%j1";
9899 return "isel %0,%2,%3,%j1";
9903 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
9905 enum machine_mode mode = GET_MODE (op0);
9909 if (code == SMAX || code == SMIN)
9914 if (code == SMAX || code == UMAX)
9915 target = emit_conditional_move (dest, c, op0, op1, mode,
9918 target = emit_conditional_move (dest, c, op0, op1, mode,
9920 if (target == NULL_RTX)
9923 emit_move_insn (dest, target);
9926 /* Called by splitter for multireg moves.
9928 operands[0] : Destination of move
9929 operands[1] : Source of move
9932 operands[2-n] : Destination slots
9933 operands[n-m] : Source slots
9934 where n = 2 + HARD_REGNO_NREGS (reg, GET_MODE (operands[0]))
9935 m = 2 + 2 * HARD_REGNO_NREGS (reg, GET_MODE (operands[0])) - 1
9937 Splits the move of operands[1] to operands[0].
9938 This is done, if GPRs are one of the operands. In this case
9939 a sequence of simple move insns has to be issued. The sequence of these
9940 move insns has to be done in correct order to avoid early clobber of the
9941 base register or destructive overlap of registers.
9945 rs6000_split_multireg_move (rtx *operands)
9947 int nregs, reg, i, j, used_update = 0;
9948 enum machine_mode mode;
9949 rtx dst = operands[0];
9950 rtx src = operands[1];
9953 /* Calculate number to move (2/4 for 32/64 bit mode). */
9955 reg = REG_P (operands[0]) ? REGNO (operands[0]) : REGNO (operands[1]);
9956 mode = GET_MODE (operands[0]);
9957 nregs = HARD_REGNO_NREGS (reg, mode);
9959 if (REG_P (operands[1])
9960 && REG_P (operands[0])
9961 && (REGNO (operands[1]) < REGNO (operands[0])))
9963 /* Move register range backwards, if we have destructive overlap. */
9966 for (i = 0; i < nregs; i++)
9969 operands[i+2] = operand_subword (operands[0], j, 0, mode);
9970 operands[i+2+nregs] =
9971 operand_subword (operands[1], j, 0, mode);
9978 if (GET_CODE (operands[1]) == MEM)
9982 if (GET_CODE (XEXP (operands[1], 0)) == PRE_INC
9983 || GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
9986 breg = XEXP (XEXP (operands[1], 0), 0);
9987 delta_rtx = GET_CODE (XEXP (operands[1], 0)) == PRE_INC
9988 ? GEN_INT (GET_MODE_SIZE (GET_MODE (operands[1])))
9989 : GEN_INT (-GET_MODE_SIZE (GET_MODE (operands[1])));
9990 insn = emit_insn (TARGET_32BIT
9991 ? gen_addsi3 (breg, breg, delta_rtx)
9992 : gen_adddi3 (breg, breg, delta_rtx));
9993 src = gen_rtx_MEM (mode, breg);
9996 /* We have now address involving an base register only.
9997 If we use one of the registers to address memory,
9998 we have change that register last. */
10000 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10001 ? XEXP (XEXP (src, 0), 0)
10007 if (REGNO (breg) >= REGNO (dst)
10008 && REGNO (breg) < REGNO (dst) + nregs)
10009 j = REGNO (breg) - REGNO (dst);
10012 if (GET_CODE (operands[0]) == MEM)
10016 if (GET_CODE (XEXP (operands[0], 0)) == PRE_INC
10017 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
10020 breg = XEXP (XEXP (operands[0], 0), 0);
10021 delta_rtx = GET_CODE (XEXP (operands[0], 0)) == PRE_INC
10022 ? GEN_INT (GET_MODE_SIZE (GET_MODE (operands[0])))
10023 : GEN_INT (-GET_MODE_SIZE (GET_MODE (operands[0])));
10025 /* We have to update the breg before doing the store.
10026 Use store with update, if available. */
10030 insn = emit_insn (TARGET_32BIT
10031 ? gen_movsi_update (breg, breg, delta_rtx,
10032 operand_subword (src, 0, 0, mode))
10033 : gen_movdi_update (breg, breg, delta_rtx,
10034 operand_subword (src, 0, 0, mode)));
10038 insn = emit_insn (TARGET_32BIT
10039 ? gen_addsi3 (breg, breg, delta_rtx)
10040 : gen_adddi3 (breg, breg, delta_rtx));
10041 dst = gen_rtx_MEM (mode, breg);
10045 for (i = 0; i < nregs; i++)
10047 /* Calculate index to next subword. */
10052 operands[i+2] = operand_subword (dst, j, 0, mode);
10053 operands[i+2+nregs] = operand_subword (src, j, 0, mode);
10055 if (j == 0 && used_update)
10057 /* Already emited move of first word by
10058 store with update -> emit dead insn instead (r := r). */
10059 operands[i+2] = operands[i+2+nregs];
10066 /* This page contains routines that are used to determine what the
10067 function prologue and epilogue code will do and write them out. */
10069 /* Return the first fixed-point register that is required to be
10070 saved. 32 if none. */
10073 first_reg_to_save (void)
10077 /* Find lowest numbered live register. */
10078 for (first_reg = 13; first_reg <= 31; first_reg++)
10079 if (regs_ever_live[first_reg]
10080 && (! call_used_regs[first_reg]
10081 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10082 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10083 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10088 && current_function_uses_pic_offset_table
10089 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10090 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10096 /* Similar, for FP regs. */
10099 first_fp_reg_to_save (void)
10103 /* Find lowest numbered live register. */
10104 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10105 if (regs_ever_live[first_reg])
10111 /* Similar, for AltiVec regs. */
10114 first_altivec_reg_to_save (void)
10118 /* Stack frame remains as is unless we are in AltiVec ABI. */
10119 if (! TARGET_ALTIVEC_ABI)
10120 return LAST_ALTIVEC_REGNO + 1;
10122 /* Find lowest numbered live register. */
10123 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10124 if (regs_ever_live[i])
10130 /* Return a 32-bit mask of the AltiVec registers we need to set in
10131 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10132 the 32-bit word is 0. */
10134 static unsigned int
10135 compute_vrsave_mask (void)
10137 unsigned int i, mask = 0;
10139 /* First, find out if we use _any_ altivec registers. */
10140 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10141 if (regs_ever_live[i])
10142 mask |= ALTIVEC_REG_BIT (i);
10147 /* Next, remove the argument registers from the set. These must
10148 be in the VRSAVE mask set by the caller, so we don't need to add
10149 them in again. More importantly, the mask we compute here is
10150 used to generate CLOBBERs in the set_vrsave insn, and we do not
10151 wish the argument registers to die. */
10152 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10153 mask &= ~ALTIVEC_REG_BIT (i);
10155 /* Similarly, remove the return value from the set. */
10158 diddle_return_value (is_altivec_return_reg, &yes);
10160 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10167 is_altivec_return_reg (rtx reg, void *xyes)
10169 bool *yes = (bool *) xyes;
10170 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10175 /* Calculate the stack information for the current function. This is
10176 complicated by having two separate calling sequences, the AIX calling
10177 sequence and the V.4 calling sequence.
10179 AIX (and Darwin/Mac OS X) stack frames look like:
10181 SP----> +---------------------------------------+
10182 | back chain to caller | 0 0
10183 +---------------------------------------+
10184 | saved CR | 4 8 (8-11)
10185 +---------------------------------------+
10187 +---------------------------------------+
10188 | reserved for compilers | 12 24
10189 +---------------------------------------+
10190 | reserved for binders | 16 32
10191 +---------------------------------------+
10192 | saved TOC pointer | 20 40
10193 +---------------------------------------+
10194 | Parameter save area (P) | 24 48
10195 +---------------------------------------+
10196 | Alloca space (A) | 24+P etc.
10197 +---------------------------------------+
10198 | Local variable space (L) | 24+P+A
10199 +---------------------------------------+
10200 | Float/int conversion temporary (X) | 24+P+A+L
10201 +---------------------------------------+
10202 | Save area for AltiVec registers (W) | 24+P+A+L+X
10203 +---------------------------------------+
10204 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10205 +---------------------------------------+
10206 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10207 +---------------------------------------+
10208 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10209 +---------------------------------------+
10210 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10211 +---------------------------------------+
10212 old SP->| back chain to caller's caller |
10213 +---------------------------------------+
10215 The required alignment for AIX configurations is two words (i.e., 8
10219 V.4 stack frames look like:
10221 SP----> +---------------------------------------+
10222 | back chain to caller | 0
10223 +---------------------------------------+
10224 | caller's saved LR | 4
10225 +---------------------------------------+
10226 | Parameter save area (P) | 8
10227 +---------------------------------------+
10228 | Alloca space (A) | 8+P
10229 +---------------------------------------+
10230 | Varargs save area (V) | 8+P+A
10231 +---------------------------------------+
10232 | Local variable space (L) | 8+P+A+V
10233 +---------------------------------------+
10234 | Float/int conversion temporary (X) | 8+P+A+V+L
10235 +---------------------------------------+
10236 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10237 +---------------------------------------+
10238 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10239 +---------------------------------------+
10240 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10241 +---------------------------------------+
10242 | SPE: area for 64-bit GP registers |
10243 +---------------------------------------+
10244 | SPE alignment padding |
10245 +---------------------------------------+
10246 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10247 +---------------------------------------+
10248 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10249 +---------------------------------------+
10250 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10251 +---------------------------------------+
10252 old SP->| back chain to caller's caller |
10253 +---------------------------------------+
10255 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10256 given. (But note below and in sysv4.h that we require only 8 and
10257 may round up the size of our stack frame anyways. The historical
10258 reason is early versions of powerpc-linux which didn't properly
10259 align the stack at program startup. A happy side-effect is that
10260 -mno-eabi libraries can be used with -meabi programs.)
10262 The EABI configuration defaults to the V.4 layout. However,
10263 the stack alignment requirements may differ. If -mno-eabi is not
10264 given, the required stack alignment is 8 bytes; if -mno-eabi is
10265 given, the required alignment is 16 bytes. (But see V.4 comment
10268 #ifndef ABI_STACK_BOUNDARY
10269 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10273 rs6000_stack_info (void)
10275 static rs6000_stack_t info, zero_info;
10276 rs6000_stack_t *info_ptr = &info;
10277 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10279 int total_raw_size;
10281 /* Zero all fields portably. */
10286 /* Cache value so we don't rescan instruction chain over and over. */
10287 if (cfun->machine->insn_chain_scanned_p == 0)
10289 cfun->machine->insn_chain_scanned_p = 1;
10290 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10294 /* Select which calling sequence. */
10295 info_ptr->abi = DEFAULT_ABI;
10297 /* Calculate which registers need to be saved & save area size. */
10298 info_ptr->first_gp_reg_save = first_reg_to_save ();
10299 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10300 even if it currently looks like we won't. */
10301 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10302 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10303 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10304 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10305 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10307 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10309 /* For the SPE, we have an additional upper 32-bits on each GPR.
10310 Ideally we should save the entire 64-bits only when the upper
10311 half is used in SIMD instructions. Since we only record
10312 registers live (not the size they are used in), this proves
10313 difficult because we'd have to traverse the instruction chain at
10314 the right time, taking reload into account. This is a real pain,
10315 so we opt to save the GPRs in 64-bits always if but one register
10316 gets used in 64-bits. Otherwise, all the registers in the frame
10317 get saved in 32-bits.
10319 So... since when we save all GPRs (except the SP) in 64-bits, the
10320 traditional GP save area will be empty. */
10321 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10322 info_ptr->gp_size = 0;
10324 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10325 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10327 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10328 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10329 - info_ptr->first_altivec_reg_save);
10331 /* Does this function call anything? */
10332 info_ptr->calls_p = (! current_function_is_leaf
10333 || cfun->machine->ra_needs_full_frame);
10335 /* Determine if we need to save the link register. */
10336 if (rs6000_ra_ever_killed ()
10337 || (DEFAULT_ABI == ABI_AIX
10338 && current_function_profile
10339 && !TARGET_PROFILE_KERNEL)
10340 #ifdef TARGET_RELOCATABLE
10341 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10343 || (info_ptr->first_fp_reg_save != 64
10344 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10345 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10346 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10347 || (DEFAULT_ABI == ABI_DARWIN
10349 && current_function_uses_pic_offset_table)
10350 || info_ptr->calls_p)
10352 info_ptr->lr_save_p = 1;
10353 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10356 /* Determine if we need to save the condition code registers. */
10357 if (regs_ever_live[CR2_REGNO]
10358 || regs_ever_live[CR3_REGNO]
10359 || regs_ever_live[CR4_REGNO])
10361 info_ptr->cr_save_p = 1;
10362 if (DEFAULT_ABI == ABI_V4)
10363 info_ptr->cr_size = reg_size;
10366 /* If the current function calls __builtin_eh_return, then we need
10367 to allocate stack space for registers that will hold data for
10368 the exception handler. */
10369 if (current_function_calls_eh_return)
10372 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10375 /* SPE saves EH registers in 64-bits. */
10376 ehrd_size = i * (TARGET_SPE_ABI
10377 && info_ptr->spe_64bit_regs_used != 0
10378 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10383 /* Determine various sizes. */
10384 info_ptr->reg_size = reg_size;
10385 info_ptr->fixed_size = RS6000_SAVE_AREA;
10386 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10387 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10388 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10391 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10392 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10394 info_ptr->spe_gp_size = 0;
10396 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
10398 info_ptr->vrsave_mask = compute_vrsave_mask ();
10399 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
10403 info_ptr->vrsave_mask = 0;
10404 info_ptr->vrsave_size = 0;
10407 /* Calculate the offsets. */
10408 switch (DEFAULT_ABI)
10416 info_ptr->fp_save_offset = - info_ptr->fp_size;
10417 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10419 if (TARGET_ALTIVEC_ABI)
10421 info_ptr->vrsave_save_offset
10422 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10424 /* Align stack so vector save area is on a quadword boundary. */
10425 if (info_ptr->altivec_size != 0)
10426 info_ptr->altivec_padding_size
10427 = 16 - (-info_ptr->vrsave_save_offset % 16);
10429 info_ptr->altivec_padding_size = 0;
10431 info_ptr->altivec_save_offset
10432 = info_ptr->vrsave_save_offset
10433 - info_ptr->altivec_padding_size
10434 - info_ptr->altivec_size;
10436 /* Adjust for AltiVec case. */
10437 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10440 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10441 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10442 info_ptr->lr_save_offset = 2*reg_size;
10446 info_ptr->fp_save_offset = - info_ptr->fp_size;
10447 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10448 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10450 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10452 /* Align stack so SPE GPR save area is aligned on a
10453 double-word boundary. */
10454 if (info_ptr->spe_gp_size != 0)
10455 info_ptr->spe_padding_size
10456 = 8 - (-info_ptr->cr_save_offset % 8);
10458 info_ptr->spe_padding_size = 0;
10460 info_ptr->spe_gp_save_offset
10461 = info_ptr->cr_save_offset
10462 - info_ptr->spe_padding_size
10463 - info_ptr->spe_gp_size;
10465 /* Adjust for SPE case. */
10466 info_ptr->toc_save_offset
10467 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10469 else if (TARGET_ALTIVEC_ABI)
10471 info_ptr->vrsave_save_offset
10472 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10474 /* Align stack so vector save area is on a quadword boundary. */
10475 if (info_ptr->altivec_size != 0)
10476 info_ptr->altivec_padding_size
10477 = 16 - (-info_ptr->vrsave_save_offset % 16);
10479 info_ptr->altivec_padding_size = 0;
10481 info_ptr->altivec_save_offset
10482 = info_ptr->vrsave_save_offset
10483 - info_ptr->altivec_padding_size
10484 - info_ptr->altivec_size;
10486 /* Adjust for AltiVec case. */
10487 info_ptr->toc_save_offset
10488 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10491 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10492 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10493 info_ptr->lr_save_offset = reg_size;
10497 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10498 + info_ptr->gp_size
10499 + info_ptr->altivec_size
10500 + info_ptr->altivec_padding_size
10501 + info_ptr->spe_gp_size
10502 + info_ptr->spe_padding_size
10504 + info_ptr->cr_size
10505 + info_ptr->lr_size
10506 + info_ptr->vrsave_size
10507 + info_ptr->toc_size,
10508 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10511 total_raw_size = (info_ptr->vars_size
10512 + info_ptr->parm_size
10513 + info_ptr->save_size
10514 + info_ptr->varargs_size
10515 + info_ptr->fixed_size);
10517 info_ptr->total_size =
10518 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10520 /* Determine if we need to allocate any stack frame:
10522 For AIX we need to push the stack if a frame pointer is needed
10523 (because the stack might be dynamically adjusted), if we are
10524 debugging, if we make calls, or if the sum of fp_save, gp_save,
10525 and local variables are more than the space needed to save all
10526 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10527 + 18*8 = 288 (GPR13 reserved).
10529 For V.4 we don't have the stack cushion that AIX uses, but assume
10530 that the debugger can handle stackless frames. */
10532 if (info_ptr->calls_p)
10533 info_ptr->push_p = 1;
10535 else if (DEFAULT_ABI == ABI_V4)
10536 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10538 else if (frame_pointer_needed)
10539 info_ptr->push_p = 1;
10541 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10542 info_ptr->push_p = 1;
10546 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10548 /* Zero offsets if we're not saving those registers. */
10549 if (info_ptr->fp_size == 0)
10550 info_ptr->fp_save_offset = 0;
10552 if (info_ptr->gp_size == 0)
10553 info_ptr->gp_save_offset = 0;
10555 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10556 info_ptr->altivec_save_offset = 0;
10558 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10559 info_ptr->vrsave_save_offset = 0;
10561 if (! TARGET_SPE_ABI
10562 || info_ptr->spe_64bit_regs_used == 0
10563 || info_ptr->spe_gp_size == 0)
10564 info_ptr->spe_gp_save_offset = 0;
10566 if (! info_ptr->lr_save_p)
10567 info_ptr->lr_save_offset = 0;
10569 if (! info_ptr->cr_save_p)
10570 info_ptr->cr_save_offset = 0;
10572 if (! info_ptr->toc_save_p)
10573 info_ptr->toc_save_offset = 0;
10578 /* Return true if the current function uses any GPRs in 64-bit SIMD
10582 spe_func_has_64bit_regs_p (void)
10586 /* Functions that save and restore all the call-saved registers will
10587 need to save/restore the registers in 64-bits. */
10588 if (current_function_calls_eh_return
10589 || current_function_calls_setjmp
10590 || current_function_has_nonlocal_goto)
10593 insns = get_insns ();
10595 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10601 i = PATTERN (insn);
10602 if (GET_CODE (i) == SET
10603 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10612 debug_stack_info (rs6000_stack_t *info)
10614 const char *abi_string;
10617 info = rs6000_stack_info ();
10619 fprintf (stderr, "\nStack information for function %s:\n",
10620 ((current_function_decl && DECL_NAME (current_function_decl))
10621 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10626 default: abi_string = "Unknown"; break;
10627 case ABI_NONE: abi_string = "NONE"; break;
10628 case ABI_AIX: abi_string = "AIX"; break;
10629 case ABI_DARWIN: abi_string = "Darwin"; break;
10630 case ABI_V4: abi_string = "V.4"; break;
10633 fprintf (stderr, "\tABI = %5s\n", abi_string);
10635 if (TARGET_ALTIVEC_ABI)
10636 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10638 if (TARGET_SPE_ABI)
10639 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10641 if (info->first_gp_reg_save != 32)
10642 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10644 if (info->first_fp_reg_save != 64)
10645 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10647 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10648 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10649 info->first_altivec_reg_save);
10651 if (info->lr_save_p)
10652 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10654 if (info->cr_save_p)
10655 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10657 if (info->toc_save_p)
10658 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10660 if (info->vrsave_mask)
10661 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10664 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10667 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10669 if (info->gp_save_offset)
10670 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10672 if (info->fp_save_offset)
10673 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10675 if (info->altivec_save_offset)
10676 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10677 info->altivec_save_offset);
10679 if (info->spe_gp_save_offset)
10680 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10681 info->spe_gp_save_offset);
10683 if (info->vrsave_save_offset)
10684 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10685 info->vrsave_save_offset);
10687 if (info->lr_save_offset)
10688 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10690 if (info->cr_save_offset)
10691 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10693 if (info->toc_save_offset)
10694 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10696 if (info->varargs_save_offset)
10697 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10699 if (info->total_size)
10700 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10702 if (info->varargs_size)
10703 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10705 if (info->vars_size)
10706 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10708 if (info->parm_size)
10709 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10711 if (info->fixed_size)
10712 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10715 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10717 if (info->spe_gp_size)
10718 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10721 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10723 if (info->altivec_size)
10724 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10726 if (info->vrsave_size)
10727 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10729 if (info->altivec_padding_size)
10730 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10731 info->altivec_padding_size);
10733 if (info->spe_padding_size)
10734 fprintf (stderr, "\tspe_padding_size = %5d\n",
10735 info->spe_padding_size);
10738 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10741 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10743 if (info->toc_size)
10744 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10746 if (info->save_size)
10747 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10749 if (info->reg_size != 4)
10750 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10752 fprintf (stderr, "\n");
10756 rs6000_return_addr (int count, rtx frame)
10758 /* Currently we don't optimize very well between prolog and body
10759 code and for PIC code the code can be actually quite bad, so
10760 don't try to be too clever here. */
10761 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10763 cfun->machine->ra_needs_full_frame = 1;
10770 plus_constant (copy_to_reg
10771 (gen_rtx_MEM (Pmode,
10772 memory_address (Pmode, frame))),
10773 RETURN_ADDRESS_OFFSET)));
10776 cfun->machine->ra_need_lr = 1;
10777 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10780 /* Say whether a function is a candidate for sibcall handling or not.
10781 We do not allow indirect calls to be optimized into sibling calls.
10782 Also, we can't do it if there are any vector parameters; there's
10783 nowhere to put the VRsave code so it works; note that functions with
10784 vector parameters are required to have a prototype, so the argument
10785 type info must be available here. (The tail recursion case can work
10786 with vector parameters, but there's no way to distinguish here.) */
10788 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
10793 if (TARGET_ALTIVEC_VRSAVE)
10795 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10796 type; type = TREE_CHAIN (type))
10798 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10802 if (DEFAULT_ABI == ABI_DARWIN
10803 || (*targetm.binds_local_p) (decl))
10805 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10807 if (!lookup_attribute ("longcall", attr_list)
10808 || lookup_attribute ("shortcall", attr_list))
10816 rs6000_ra_ever_killed (void)
10822 /* Irritatingly, there are two kinds of thunks -- those created with
10823 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10824 through the regular part of the compiler. This is a very hacky
10825 way to tell them apart. */
10826 if (current_function_is_thunk && !no_new_pseudos)
10829 /* regs_ever_live has LR marked as used if any sibcalls are present,
10830 but this should not force saving and restoring in the
10831 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10832 clobbers LR, so that is inappropriate. */
10834 /* Also, the prologue can generate a store into LR that
10835 doesn't really count, like this:
10838 bcl to set PIC register
10842 When we're called from the epilogue, we need to avoid counting
10843 this as a store. */
10845 push_topmost_sequence ();
10846 top = get_insns ();
10847 pop_topmost_sequence ();
10848 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10850 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10854 if (FIND_REG_INC_NOTE (insn, reg))
10856 else if (GET_CODE (insn) == CALL_INSN
10857 && !SIBLING_CALL_P (insn))
10859 else if (set_of (reg, insn) != NULL_RTX
10860 && !prologue_epilogue_contains (insn))
10867 /* Add a REG_MAYBE_DEAD note to the insn. */
10869 rs6000_maybe_dead (rtx insn)
10871 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10876 /* Emit instructions needed to load the TOC register.
10877 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10878 a constant pool; or for SVR4 -fpic. */
10881 rs6000_emit_load_toc_table (int fromprolog)
10884 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10886 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10888 rtx temp = (fromprolog
10889 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10890 : gen_reg_rtx (Pmode));
10891 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10893 rs6000_maybe_dead (insn);
10894 insn = emit_move_insn (dest, temp);
10896 rs6000_maybe_dead (insn);
10898 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10901 rtx tempLR = (fromprolog
10902 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10903 : gen_reg_rtx (Pmode));
10904 rtx temp0 = (fromprolog
10905 ? gen_rtx_REG (Pmode, 0)
10906 : gen_reg_rtx (Pmode));
10909 /* possibly create the toc section */
10910 if (! toc_initialized)
10913 function_section (current_function_decl);
10920 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10921 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10923 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10924 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10926 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10928 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10929 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10936 static int reload_toc_labelno = 0;
10938 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10940 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10941 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10943 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10944 emit_move_insn (dest, tempLR);
10945 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10947 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10949 rs6000_maybe_dead (insn);
10951 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10953 /* This is for AIX code running in non-PIC ELF32. */
10956 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10957 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10959 insn = emit_insn (gen_elf_high (dest, realsym));
10961 rs6000_maybe_dead (insn);
10962 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10964 rs6000_maybe_dead (insn);
10966 else if (DEFAULT_ABI == ABI_AIX)
10969 insn = emit_insn (gen_load_toc_aix_si (dest));
10971 insn = emit_insn (gen_load_toc_aix_di (dest));
10973 rs6000_maybe_dead (insn);
10980 get_TOC_alias_set (void)
10982 static int set = -1;
10984 set = new_alias_set ();
10988 /* This returns nonzero if the current function uses the TOC. This is
10989 determined by the presence of (unspec ... UNSPEC_TOC) or
10990 use (unspec ... UNSPEC_TOC), which are generated by the various
10991 load_toc_* patterns. */
10998 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11001 rtx pat = PATTERN (insn);
11004 if (GET_CODE (pat) == PARALLEL)
11005 for (i = 0; i < XVECLEN (pat, 0); i++)
11007 rtx sub = XVECEXP (pat, 0, i);
11008 if (GET_CODE (sub) == USE)
11010 sub = XEXP (sub, 0);
11011 if (GET_CODE (sub) == UNSPEC
11012 && XINT (sub, 1) == UNSPEC_TOC)
11021 create_TOC_reference (rtx symbol)
11023 return gen_rtx_PLUS (Pmode,
11024 gen_rtx_REG (Pmode, TOC_REGISTER),
11025 gen_rtx_CONST (Pmode,
11026 gen_rtx_MINUS (Pmode, symbol,
11027 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11030 /* If _Unwind_* has been called from within the same module,
11031 toc register is not guaranteed to be saved to 40(1) on function
11032 entry. Save it there in that case. */
11035 rs6000_aix_emit_builtin_unwind_init (void)
11038 rtx stack_top = gen_reg_rtx (Pmode);
11039 rtx opcode_addr = gen_reg_rtx (Pmode);
11040 rtx opcode = gen_reg_rtx (SImode);
11041 rtx tocompare = gen_reg_rtx (SImode);
11042 rtx no_toc_save_needed = gen_label_rtx ();
11044 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11045 emit_move_insn (stack_top, mem);
11047 mem = gen_rtx_MEM (Pmode,
11048 gen_rtx_PLUS (Pmode, stack_top,
11049 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11050 emit_move_insn (opcode_addr, mem);
11051 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11052 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11053 : 0xE8410028, SImode));
11055 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11056 SImode, NULL_RTX, NULL_RTX,
11057 no_toc_save_needed);
11059 mem = gen_rtx_MEM (Pmode,
11060 gen_rtx_PLUS (Pmode, stack_top,
11061 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11062 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11063 emit_label (no_toc_save_needed);
11066 /* This ties together stack memory (MEM with an alias set of
11067 rs6000_sr_alias_set) and the change to the stack pointer. */
11070 rs6000_emit_stack_tie (void)
11072 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11074 set_mem_alias_set (mem, rs6000_sr_alias_set);
11075 emit_insn (gen_stack_tie (mem));
11078 /* Emit the correct code for allocating stack space, as insns.
11079 If COPY_R12, make sure a copy of the old frame is left in r12.
11080 The generated code may use hard register 0 as a temporary. */
11083 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11086 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11087 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11088 rtx todec = GEN_INT (-size);
11090 if (current_function_limit_stack)
11092 if (REG_P (stack_limit_rtx)
11093 && REGNO (stack_limit_rtx) > 1
11094 && REGNO (stack_limit_rtx) <= 31)
11096 emit_insn (TARGET_32BIT
11097 ? gen_addsi3 (tmp_reg,
11100 : gen_adddi3 (tmp_reg,
11104 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11107 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11109 && DEFAULT_ABI == ABI_V4)
11111 rtx toload = gen_rtx_CONST (VOIDmode,
11112 gen_rtx_PLUS (Pmode,
11116 emit_insn (gen_elf_high (tmp_reg, toload));
11117 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11118 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11122 warning ("stack limit expression is not supported");
11125 if (copy_r12 || ! TARGET_UPDATE)
11126 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11132 /* Need a note here so that try_split doesn't get confused. */
11133 if (get_last_insn() == NULL_RTX)
11134 emit_note (NOTE_INSN_DELETED);
11135 insn = emit_move_insn (tmp_reg, todec);
11136 try_split (PATTERN (insn), insn, 0);
11140 insn = emit_insn (TARGET_32BIT
11141 ? gen_movsi_update (stack_reg, stack_reg,
11143 : gen_movdi_update (stack_reg, stack_reg,
11144 todec, stack_reg));
11148 insn = emit_insn (TARGET_32BIT
11149 ? gen_addsi3 (stack_reg, stack_reg, todec)
11150 : gen_adddi3 (stack_reg, stack_reg, todec));
11151 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11152 gen_rtx_REG (Pmode, 12));
11155 RTX_FRAME_RELATED_P (insn) = 1;
11157 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11158 gen_rtx_SET (VOIDmode, stack_reg,
11159 gen_rtx_PLUS (Pmode, stack_reg,
11164 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11165 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11166 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11167 deduce these equivalences by itself so it wasn't necessary to hold
11168 its hand so much. */
11171 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11172 rtx reg2, rtx rreg)
11176 /* copy_rtx will not make unique copies of registers, so we need to
11177 ensure we don't have unwanted sharing here. */
11179 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11182 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11184 real = copy_rtx (PATTERN (insn));
11186 if (reg2 != NULL_RTX)
11187 real = replace_rtx (real, reg2, rreg);
11189 real = replace_rtx (real, reg,
11190 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11191 STACK_POINTER_REGNUM),
11194 /* We expect that 'real' is either a SET or a PARALLEL containing
11195 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11196 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11198 if (GET_CODE (real) == SET)
11202 temp = simplify_rtx (SET_SRC (set));
11204 SET_SRC (set) = temp;
11205 temp = simplify_rtx (SET_DEST (set));
11207 SET_DEST (set) = temp;
11208 if (GET_CODE (SET_DEST (set)) == MEM)
11210 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11212 XEXP (SET_DEST (set), 0) = temp;
11215 else if (GET_CODE (real) == PARALLEL)
11218 for (i = 0; i < XVECLEN (real, 0); i++)
11219 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11221 rtx set = XVECEXP (real, 0, i);
11223 temp = simplify_rtx (SET_SRC (set));
11225 SET_SRC (set) = temp;
11226 temp = simplify_rtx (SET_DEST (set));
11228 SET_DEST (set) = temp;
11229 if (GET_CODE (SET_DEST (set)) == MEM)
11231 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11233 XEXP (SET_DEST (set), 0) = temp;
11235 RTX_FRAME_RELATED_P (set) = 1;
11242 real = spe_synthesize_frame_save (real);
11244 RTX_FRAME_RELATED_P (insn) = 1;
11245 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11250 /* Given an SPE frame note, return a PARALLEL of SETs with the
11251 original note, plus a synthetic register save. */
11254 spe_synthesize_frame_save (rtx real)
11256 rtx synth, offset, reg, real2;
11258 if (GET_CODE (real) != SET
11259 || GET_MODE (SET_SRC (real)) != V2SImode)
11262 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11263 frame related note. The parallel contains a set of the register
11264 being saved, and another set to a synthetic register (n+1200).
11265 This is so we can differentiate between 64-bit and 32-bit saves.
11266 Words cannot describe this nastiness. */
11268 if (GET_CODE (SET_DEST (real)) != MEM
11269 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11270 || GET_CODE (SET_SRC (real)) != REG)
11274 (set (mem (plus (reg x) (const y)))
11277 (set (mem (plus (reg x) (const y+4)))
11281 real2 = copy_rtx (real);
11282 PUT_MODE (SET_DEST (real2), SImode);
11283 reg = SET_SRC (real2);
11284 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11285 synth = copy_rtx (real2);
11287 if (BYTES_BIG_ENDIAN)
11289 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11290 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11293 reg = SET_SRC (synth);
11295 synth = replace_rtx (synth, reg,
11296 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11298 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11299 synth = replace_rtx (synth, offset,
11300 GEN_INT (INTVAL (offset)
11301 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11303 RTX_FRAME_RELATED_P (synth) = 1;
11304 RTX_FRAME_RELATED_P (real2) = 1;
11305 if (BYTES_BIG_ENDIAN)
11306 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11308 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11313 /* Returns an insn that has a vrsave set operation with the
11314 appropriate CLOBBERs. */
11317 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11320 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11321 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11324 = gen_rtx_SET (VOIDmode,
11326 gen_rtx_UNSPEC_VOLATILE (SImode,
11327 gen_rtvec (2, reg, vrsave),
11332 /* We need to clobber the registers in the mask so the scheduler
11333 does not move sets to VRSAVE before sets of AltiVec registers.
11335 However, if the function receives nonlocal gotos, reload will set
11336 all call saved registers live. We will end up with:
11338 (set (reg 999) (mem))
11339 (parallel [ (set (reg vrsave) (unspec blah))
11340 (clobber (reg 999))])
11342 The clobber will cause the store into reg 999 to be dead, and
11343 flow will attempt to delete an epilogue insn. In this case, we
11344 need an unspec use/set of the register. */
11346 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11347 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11349 if (!epiloguep || call_used_regs [i])
11350 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11351 gen_rtx_REG (V4SImode, i));
11354 rtx reg = gen_rtx_REG (V4SImode, i);
11357 = gen_rtx_SET (VOIDmode,
11359 gen_rtx_UNSPEC (V4SImode,
11360 gen_rtvec (1, reg), 27));
11364 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11366 for (i = 0; i < nclobs; ++i)
11367 XVECEXP (insn, 0, i) = clobs[i];
11372 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11373 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11376 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11377 unsigned int regno, int offset, int total_size)
11379 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11380 rtx replacea, replaceb;
11382 int_rtx = GEN_INT (offset);
11384 /* Some cases that need register indexed addressing. */
11385 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11387 && SPE_VECTOR_MODE (mode)
11388 && !SPE_CONST_OFFSET_OK (offset)))
11390 /* Whomever calls us must make sure r11 is available in the
11391 flow path of instructions in the prologue. */
11392 offset_rtx = gen_rtx_REG (Pmode, 11);
11393 emit_move_insn (offset_rtx, int_rtx);
11395 replacea = offset_rtx;
11396 replaceb = int_rtx;
11400 offset_rtx = int_rtx;
11401 replacea = NULL_RTX;
11402 replaceb = NULL_RTX;
11405 reg = gen_rtx_REG (mode, regno);
11406 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11407 mem = gen_rtx_MEM (mode, addr);
11408 set_mem_alias_set (mem, rs6000_sr_alias_set);
11410 insn = emit_move_insn (mem, reg);
11412 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11415 /* Emit an offset memory reference suitable for a frame store, while
11416 converting to a valid addressing mode. */
11419 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11421 rtx int_rtx, offset_rtx;
11423 int_rtx = GEN_INT (offset);
11425 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11427 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11428 emit_move_insn (offset_rtx, int_rtx);
11431 offset_rtx = int_rtx;
11433 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11436 /* Emit function prologue as insns. */
11439 rs6000_emit_prologue (void)
11441 rs6000_stack_t *info = rs6000_stack_info ();
11442 enum machine_mode reg_mode = Pmode;
11443 int reg_size = UNITS_PER_WORD;
11444 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11445 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11446 rtx frame_reg_rtx = sp_reg_rtx;
11447 rtx cr_save_rtx = NULL_RTX;
11449 int saving_FPRs_inline;
11450 int using_store_multiple;
11451 HOST_WIDE_INT sp_offset = 0;
11453 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11455 reg_mode = V2SImode;
11459 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11460 && (!TARGET_SPE_ABI
11461 || info->spe_64bit_regs_used == 0)
11462 && info->first_gp_reg_save < 31);
11463 saving_FPRs_inline = (info->first_fp_reg_save == 64
11464 || FP_SAVE_INLINE (info->first_fp_reg_save)
11465 || current_function_calls_eh_return
11466 || cfun->machine->ra_need_lr);
11468 /* For V.4, update stack before we do any saving and set back pointer. */
11470 && (DEFAULT_ABI == ABI_V4
11471 || current_function_calls_eh_return))
11473 if (info->total_size < 32767)
11474 sp_offset = info->total_size;
11476 frame_reg_rtx = frame_ptr_rtx;
11477 rs6000_emit_allocate_stack (info->total_size,
11478 (frame_reg_rtx != sp_reg_rtx
11479 && (info->cr_save_p
11481 || info->first_fp_reg_save < 64
11482 || info->first_gp_reg_save < 32
11484 if (frame_reg_rtx != sp_reg_rtx)
11485 rs6000_emit_stack_tie ();
11488 /* Save AltiVec registers if needed. */
11489 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11493 /* There should be a non inline version of this, for when we
11494 are saving lots of vector registers. */
11495 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11496 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11498 rtx areg, savereg, mem;
11501 offset = info->altivec_save_offset + sp_offset
11502 + 16 * (i - info->first_altivec_reg_save);
11504 savereg = gen_rtx_REG (V4SImode, i);
11506 areg = gen_rtx_REG (Pmode, 0);
11507 emit_move_insn (areg, GEN_INT (offset));
11509 /* AltiVec addressing mode is [reg+reg]. */
11510 mem = gen_rtx_MEM (V4SImode,
11511 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11513 set_mem_alias_set (mem, rs6000_sr_alias_set);
11515 insn = emit_move_insn (mem, savereg);
11517 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11518 areg, GEN_INT (offset));
11522 /* VRSAVE is a bit vector representing which AltiVec registers
11523 are used. The OS uses this to determine which vector
11524 registers to save on a context switch. We need to save
11525 VRSAVE on the stack frame, add whatever AltiVec registers we
11526 used in this function, and do the corresponding magic in the
11529 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11531 rtx reg, mem, vrsave;
11534 /* Get VRSAVE onto a GPR. */
11535 reg = gen_rtx_REG (SImode, 12);
11536 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11538 emit_insn (gen_get_vrsave_internal (reg));
11540 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11543 offset = info->vrsave_save_offset + sp_offset;
11545 = gen_rtx_MEM (SImode,
11546 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11547 set_mem_alias_set (mem, rs6000_sr_alias_set);
11548 insn = emit_move_insn (mem, reg);
11550 /* Include the registers in the mask. */
11551 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11553 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11556 /* If we use the link register, get it into r0. */
11557 if (info->lr_save_p)
11558 emit_move_insn (gen_rtx_REG (Pmode, 0),
11559 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11561 /* If we need to save CR, put it into r12. */
11562 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11564 cr_save_rtx = gen_rtx_REG (SImode, 12);
11565 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11568 /* Do any required saving of fpr's. If only one or two to save, do
11569 it ourselves. Otherwise, call function. */
11570 if (saving_FPRs_inline)
11573 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11574 if ((regs_ever_live[info->first_fp_reg_save+i]
11575 && ! call_used_regs[info->first_fp_reg_save+i]))
11576 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11577 info->first_fp_reg_save + i,
11578 info->fp_save_offset + sp_offset + 8 * i,
11581 else if (info->first_fp_reg_save != 64)
11585 const char *alloc_rname;
11587 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11589 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11590 gen_rtx_REG (Pmode,
11591 LINK_REGISTER_REGNUM));
11592 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11593 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11594 alloc_rname = ggc_strdup (rname);
11595 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11596 gen_rtx_SYMBOL_REF (Pmode,
11598 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11600 rtx addr, reg, mem;
11601 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11602 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11603 GEN_INT (info->fp_save_offset
11604 + sp_offset + 8*i));
11605 mem = gen_rtx_MEM (DFmode, addr);
11606 set_mem_alias_set (mem, rs6000_sr_alias_set);
11608 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11610 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11611 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11612 NULL_RTX, NULL_RTX);
11615 /* Save GPRs. This is done as a PARALLEL if we are using
11616 the store-multiple instructions. */
11617 if (using_store_multiple)
11621 p = rtvec_alloc (32 - info->first_gp_reg_save);
11622 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11624 rtx addr, reg, mem;
11625 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11626 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11627 GEN_INT (info->gp_save_offset
11630 mem = gen_rtx_MEM (reg_mode, addr);
11631 set_mem_alias_set (mem, rs6000_sr_alias_set);
11633 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11635 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11636 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11637 NULL_RTX, NULL_RTX);
11642 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11643 if ((regs_ever_live[info->first_gp_reg_save+i]
11644 && ! call_used_regs[info->first_gp_reg_save+i])
11645 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11646 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11647 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11649 rtx addr, reg, mem;
11650 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11652 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11654 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11657 if (!SPE_CONST_OFFSET_OK (offset))
11659 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11660 emit_move_insn (b, GEN_INT (offset));
11663 b = GEN_INT (offset);
11665 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11666 mem = gen_rtx_MEM (V2SImode, addr);
11667 set_mem_alias_set (mem, rs6000_sr_alias_set);
11668 insn = emit_move_insn (mem, reg);
11670 if (GET_CODE (b) == CONST_INT)
11671 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11672 NULL_RTX, NULL_RTX);
11674 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11675 b, GEN_INT (offset));
11679 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11680 GEN_INT (info->gp_save_offset
11683 mem = gen_rtx_MEM (reg_mode, addr);
11684 set_mem_alias_set (mem, rs6000_sr_alias_set);
11686 insn = emit_move_insn (mem, reg);
11687 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11688 NULL_RTX, NULL_RTX);
11693 /* ??? There's no need to emit actual instructions here, but it's the
11694 easiest way to get the frame unwind information emitted. */
11695 if (current_function_calls_eh_return)
11697 unsigned int i, regno;
11699 /* In AIX ABI we need to pretend we save r2 here. */
11702 rtx addr, reg, mem;
11704 reg = gen_rtx_REG (reg_mode, 2);
11705 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11706 GEN_INT (sp_offset + 5 * reg_size));
11707 mem = gen_rtx_MEM (reg_mode, addr);
11708 set_mem_alias_set (mem, rs6000_sr_alias_set);
11710 insn = emit_move_insn (mem, reg);
11711 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11712 NULL_RTX, NULL_RTX);
11713 PATTERN (insn) = gen_blockage ();
11718 regno = EH_RETURN_DATA_REGNO (i);
11719 if (regno == INVALID_REGNUM)
11722 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11723 info->ehrd_offset + sp_offset
11724 + reg_size * (int) i,
11729 /* Save lr if we used it. */
11730 if (info->lr_save_p)
11732 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11733 GEN_INT (info->lr_save_offset + sp_offset));
11734 rtx reg = gen_rtx_REG (Pmode, 0);
11735 rtx mem = gen_rtx_MEM (Pmode, addr);
11736 /* This should not be of rs6000_sr_alias_set, because of
11737 __builtin_return_address. */
11739 insn = emit_move_insn (mem, reg);
11740 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11741 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11744 /* Save CR if we use any that must be preserved. */
11745 if (info->cr_save_p)
11747 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11748 GEN_INT (info->cr_save_offset + sp_offset));
11749 rtx mem = gen_rtx_MEM (SImode, addr);
11751 set_mem_alias_set (mem, rs6000_sr_alias_set);
11753 /* If r12 was used to hold the original sp, copy cr into r0 now
11755 if (REGNO (frame_reg_rtx) == 12)
11757 cr_save_rtx = gen_rtx_REG (SImode, 0);
11758 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11760 insn = emit_move_insn (mem, cr_save_rtx);
11762 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11763 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11764 But that's OK. All we have to do is specify that _one_ condition
11765 code register is saved in this stack slot. The thrower's epilogue
11766 will then restore all the call-saved registers.
11767 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11768 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11769 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11772 /* Update stack and set back pointer unless this is V.4,
11773 for which it was done previously. */
11775 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
11776 rs6000_emit_allocate_stack (info->total_size, FALSE);
11778 /* Set frame pointer, if needed. */
11779 if (frame_pointer_needed)
11781 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11783 RTX_FRAME_RELATED_P (insn) = 1;
11786 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11787 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11788 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11789 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11791 /* If emit_load_toc_table will use the link register, we need to save
11792 it. We use R12 for this purpose because emit_load_toc_table
11793 can use register 0. This allows us to use a plain 'blr' to return
11794 from the procedure more often. */
11795 int save_LR_around_toc_setup = (TARGET_ELF
11796 && DEFAULT_ABI != ABI_AIX
11798 && ! info->lr_save_p
11799 && EXIT_BLOCK_PTR->pred != NULL);
11800 if (save_LR_around_toc_setup)
11802 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11803 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11804 rs6000_emit_load_toc_table (TRUE);
11805 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11808 rs6000_emit_load_toc_table (TRUE);
11812 if (DEFAULT_ABI == ABI_DARWIN
11813 && flag_pic && current_function_uses_pic_offset_table)
11815 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11816 const char *picbase = machopic_function_base_name ();
11817 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11819 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11821 rs6000_maybe_dead (
11822 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11823 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11828 /* Write function prologue. */
11831 rs6000_output_function_prologue (FILE *file,
11832 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
11834 rs6000_stack_t *info = rs6000_stack_info ();
11836 if (TARGET_DEBUG_STACK)
11837 debug_stack_info (info);
11839 /* Write .extern for any function we will call to save and restore
11841 if (info->first_fp_reg_save < 64
11842 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11843 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11844 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11845 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11846 RESTORE_FP_SUFFIX);
11848 /* Write .extern for AIX common mode routines, if needed. */
11849 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11851 fputs ("\t.extern __mulh\n", file);
11852 fputs ("\t.extern __mull\n", file);
11853 fputs ("\t.extern __divss\n", file);
11854 fputs ("\t.extern __divus\n", file);
11855 fputs ("\t.extern __quoss\n", file);
11856 fputs ("\t.extern __quous\n", file);
11857 common_mode_defined = 1;
11860 if (! HAVE_prologue)
11864 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11865 the "toplevel" insn chain. */
11866 emit_note (NOTE_INSN_DELETED);
11867 rs6000_emit_prologue ();
11868 emit_note (NOTE_INSN_DELETED);
11870 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11874 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11876 INSN_ADDRESSES_NEW (insn, addr);
11881 if (TARGET_DEBUG_STACK)
11882 debug_rtx_list (get_insns (), 100);
11883 final (get_insns (), file, FALSE, FALSE);
11887 rs6000_pic_labelno++;
11890 /* Emit function epilogue as insns.
11892 At present, dwarf2out_frame_debug_expr doesn't understand
11893 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11894 anywhere in the epilogue. Most of the insns below would in any case
11895 need special notes to explain where r11 is in relation to the stack. */
11898 rs6000_emit_epilogue (int sibcall)
11900 rs6000_stack_t *info;
11901 int restoring_FPRs_inline;
11902 int using_load_multiple;
11903 int using_mfcr_multiple;
11904 int use_backchain_to_restore_sp;
11906 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11907 rtx frame_reg_rtx = sp_reg_rtx;
11908 enum machine_mode reg_mode = Pmode;
11909 int reg_size = UNITS_PER_WORD;
11912 info = rs6000_stack_info ();
11914 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11916 reg_mode = V2SImode;
11920 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11921 && (!TARGET_SPE_ABI
11922 || info->spe_64bit_regs_used == 0)
11923 && info->first_gp_reg_save < 31);
11924 restoring_FPRs_inline = (sibcall
11925 || current_function_calls_eh_return
11926 || info->first_fp_reg_save == 64
11927 || FP_SAVE_INLINE (info->first_fp_reg_save));
11928 use_backchain_to_restore_sp = (frame_pointer_needed
11929 || current_function_calls_alloca
11930 || info->total_size > 32767);
11931 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11932 || rs6000_cpu == PROCESSOR_PPC603
11933 || rs6000_cpu == PROCESSOR_PPC750
11936 /* If we have a frame pointer, a call to alloca, or a large stack
11937 frame, restore the old stack pointer using the backchain. Otherwise,
11938 we know what size to update it with. */
11939 if (use_backchain_to_restore_sp)
11941 /* Under V.4, don't reset the stack pointer until after we're done
11942 loading the saved registers. */
11943 if (DEFAULT_ABI == ABI_V4)
11944 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11946 emit_move_insn (frame_reg_rtx,
11947 gen_rtx_MEM (Pmode, sp_reg_rtx));
11950 else if (info->push_p)
11952 if (DEFAULT_ABI == ABI_V4
11953 || current_function_calls_eh_return)
11954 sp_offset = info->total_size;
11957 emit_insn (TARGET_32BIT
11958 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11959 GEN_INT (info->total_size))
11960 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11961 GEN_INT (info->total_size)));
11965 /* Restore AltiVec registers if needed. */
11966 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11970 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11971 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11973 rtx addr, areg, mem;
11975 areg = gen_rtx_REG (Pmode, 0);
11977 (areg, GEN_INT (info->altivec_save_offset
11979 + 16 * (i - info->first_altivec_reg_save)));
11981 /* AltiVec addressing mode is [reg+reg]. */
11982 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11983 mem = gen_rtx_MEM (V4SImode, addr);
11984 set_mem_alias_set (mem, rs6000_sr_alias_set);
11986 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11990 /* Restore VRSAVE if needed. */
11991 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11993 rtx addr, mem, reg;
11995 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11996 GEN_INT (info->vrsave_save_offset + sp_offset));
11997 mem = gen_rtx_MEM (SImode, addr);
11998 set_mem_alias_set (mem, rs6000_sr_alias_set);
11999 reg = gen_rtx_REG (SImode, 12);
12000 emit_move_insn (reg, mem);
12002 emit_insn (generate_set_vrsave (reg, info, 1));
12005 /* Get the old lr if we saved it. */
12006 if (info->lr_save_p)
12008 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12009 info->lr_save_offset + sp_offset);
12011 set_mem_alias_set (mem, rs6000_sr_alias_set);
12013 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12016 /* Get the old cr if we saved it. */
12017 if (info->cr_save_p)
12019 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12020 GEN_INT (info->cr_save_offset + sp_offset));
12021 rtx mem = gen_rtx_MEM (SImode, addr);
12023 set_mem_alias_set (mem, rs6000_sr_alias_set);
12025 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12028 /* Set LR here to try to overlap restores below. */
12029 if (info->lr_save_p)
12030 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12031 gen_rtx_REG (Pmode, 0));
12033 /* Load exception handler data registers, if needed. */
12034 if (current_function_calls_eh_return)
12036 unsigned int i, regno;
12040 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12041 GEN_INT (sp_offset + 5 * reg_size));
12042 rtx mem = gen_rtx_MEM (reg_mode, addr);
12044 set_mem_alias_set (mem, rs6000_sr_alias_set);
12046 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12053 regno = EH_RETURN_DATA_REGNO (i);
12054 if (regno == INVALID_REGNUM)
12057 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12058 info->ehrd_offset + sp_offset
12059 + reg_size * (int) i);
12060 set_mem_alias_set (mem, rs6000_sr_alias_set);
12062 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12066 /* Restore GPRs. This is done as a PARALLEL if we are using
12067 the load-multiple instructions. */
12068 if (using_load_multiple)
12071 p = rtvec_alloc (32 - info->first_gp_reg_save);
12072 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12074 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12075 GEN_INT (info->gp_save_offset
12078 rtx mem = gen_rtx_MEM (reg_mode, addr);
12080 set_mem_alias_set (mem, rs6000_sr_alias_set);
12083 gen_rtx_SET (VOIDmode,
12084 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12087 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12090 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12091 if ((regs_ever_live[info->first_gp_reg_save+i]
12092 && ! call_used_regs[info->first_gp_reg_save+i])
12093 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12094 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12095 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12097 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12098 GEN_INT (info->gp_save_offset
12101 rtx mem = gen_rtx_MEM (reg_mode, addr);
12103 /* Restore 64-bit quantities for SPE. */
12104 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12106 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12109 if (!SPE_CONST_OFFSET_OK (offset))
12111 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12112 emit_move_insn (b, GEN_INT (offset));
12115 b = GEN_INT (offset);
12117 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12118 mem = gen_rtx_MEM (V2SImode, addr);
12121 set_mem_alias_set (mem, rs6000_sr_alias_set);
12123 emit_move_insn (gen_rtx_REG (reg_mode,
12124 info->first_gp_reg_save + i), mem);
12127 /* Restore fpr's if we need to do it without calling a function. */
12128 if (restoring_FPRs_inline)
12129 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12130 if ((regs_ever_live[info->first_fp_reg_save+i]
12131 && ! call_used_regs[info->first_fp_reg_save+i]))
12134 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12135 GEN_INT (info->fp_save_offset
12138 mem = gen_rtx_MEM (DFmode, addr);
12139 set_mem_alias_set (mem, rs6000_sr_alias_set);
12141 emit_move_insn (gen_rtx_REG (DFmode,
12142 info->first_fp_reg_save + i),
12146 /* If we saved cr, restore it here. Just those that were used. */
12147 if (info->cr_save_p)
12149 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12152 if (using_mfcr_multiple)
12154 for (i = 0; i < 8; i++)
12155 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12161 if (using_mfcr_multiple && count > 1)
12166 p = rtvec_alloc (count);
12169 for (i = 0; i < 8; i++)
12170 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12172 rtvec r = rtvec_alloc (2);
12173 RTVEC_ELT (r, 0) = r12_rtx;
12174 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12175 RTVEC_ELT (p, ndx) =
12176 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12177 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12180 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12185 for (i = 0; i < 8; i++)
12186 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12188 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12194 /* If this is V.4, unwind the stack pointer after all of the loads
12195 have been done. We need to emit a block here so that sched
12196 doesn't decide to move the sp change before the register restores
12197 (which may not have any obvious dependency on the stack). This
12198 doesn't hurt performance, because there is no scheduling that can
12199 be done after this point. */
12200 if (DEFAULT_ABI == ABI_V4
12201 || current_function_calls_eh_return)
12203 if (frame_reg_rtx != sp_reg_rtx)
12204 rs6000_emit_stack_tie ();
12206 if (use_backchain_to_restore_sp)
12208 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12210 else if (sp_offset != 0)
12212 emit_insn (TARGET_32BIT
12213 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12214 GEN_INT (sp_offset))
12215 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12216 GEN_INT (sp_offset)));
12220 if (current_function_calls_eh_return)
12222 rtx sa = EH_RETURN_STACKADJ_RTX;
12223 emit_insn (TARGET_32BIT
12224 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12225 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12231 if (! restoring_FPRs_inline)
12232 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12234 p = rtvec_alloc (2);
12236 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12237 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12238 gen_rtx_REG (Pmode,
12239 LINK_REGISTER_REGNUM));
12241 /* If we have to restore more than two FP registers, branch to the
12242 restore function. It will return to our caller. */
12243 if (! restoring_FPRs_inline)
12247 const char *alloc_rname;
12249 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12250 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12251 alloc_rname = ggc_strdup (rname);
12252 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12253 gen_rtx_SYMBOL_REF (Pmode,
12256 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12259 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12260 GEN_INT (info->fp_save_offset + 8*i));
12261 mem = gen_rtx_MEM (DFmode, addr);
12262 set_mem_alias_set (mem, rs6000_sr_alias_set);
12264 RTVEC_ELT (p, i+3) =
12265 gen_rtx_SET (VOIDmode,
12266 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12271 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12275 /* Write function epilogue. */
12278 rs6000_output_function_epilogue (FILE *file,
12279 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12281 rs6000_stack_t *info = rs6000_stack_info ();
12283 if (! HAVE_epilogue)
12285 rtx insn = get_last_insn ();
12286 /* If the last insn was a BARRIER, we don't have to write anything except
12287 the trace table. */
12288 if (GET_CODE (insn) == NOTE)
12289 insn = prev_nonnote_insn (insn);
12290 if (insn == 0 || GET_CODE (insn) != BARRIER)
12292 /* This is slightly ugly, but at least we don't have two
12293 copies of the epilogue-emitting code. */
12296 /* A NOTE_INSN_DELETED is supposed to be at the start
12297 and end of the "toplevel" insn chain. */
12298 emit_note (NOTE_INSN_DELETED);
12299 rs6000_emit_epilogue (FALSE);
12300 emit_note (NOTE_INSN_DELETED);
12302 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12306 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12308 INSN_ADDRESSES_NEW (insn, addr);
12313 if (TARGET_DEBUG_STACK)
12314 debug_rtx_list (get_insns (), 100);
12315 final (get_insns (), file, FALSE, FALSE);
12320 #if TARGET_OBJECT_FORMAT == OBJECT_MACHO
12321 /* Mach-O doesn't support labels at the end of objects, so if
12322 it looks like we might want one, insert a NOP. */
12324 rtx insn = get_last_insn ();
12327 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12328 insn = PREV_INSN (insn);
12332 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12333 fputs ("\tnop\n", file);
12337 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12340 We don't output a traceback table if -finhibit-size-directive was
12341 used. The documentation for -finhibit-size-directive reads
12342 ``don't output a @code{.size} assembler directive, or anything
12343 else that would cause trouble if the function is split in the
12344 middle, and the two halves are placed at locations far apart in
12345 memory.'' The traceback table has this property, since it
12346 includes the offset from the start of the function to the
12347 traceback table itself.
12349 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12350 different traceback table. */
12351 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12352 && rs6000_traceback != traceback_none)
12354 const char *fname = NULL;
12355 const char *language_string = lang_hooks.name;
12356 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12358 int optional_tbtab;
12360 if (rs6000_traceback == traceback_full)
12361 optional_tbtab = 1;
12362 else if (rs6000_traceback == traceback_part)
12363 optional_tbtab = 0;
12365 optional_tbtab = !optimize_size && !TARGET_ELF;
12367 if (optional_tbtab)
12369 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12370 while (*fname == '.') /* V.4 encodes . in the name */
12373 /* Need label immediately before tbtab, so we can compute
12374 its offset from the function start. */
12375 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12376 ASM_OUTPUT_LABEL (file, fname);
12379 /* The .tbtab pseudo-op can only be used for the first eight
12380 expressions, since it can't handle the possibly variable
12381 length fields that follow. However, if you omit the optional
12382 fields, the assembler outputs zeros for all optional fields
12383 anyways, giving each variable length field is minimum length
12384 (as defined in sys/debug.h). Thus we can not use the .tbtab
12385 pseudo-op at all. */
12387 /* An all-zero word flags the start of the tbtab, for debuggers
12388 that have to find it by searching forward from the entry
12389 point or from the current pc. */
12390 fputs ("\t.long 0\n", file);
12392 /* Tbtab format type. Use format type 0. */
12393 fputs ("\t.byte 0,", file);
12395 /* Language type. Unfortunately, there doesn't seem to be any
12396 official way to get this info, so we use language_string. C
12397 is 0. C++ is 9. No number defined for Obj-C, so use the
12398 value for C for now. There is no official value for Java,
12399 although IBM appears to be using 13. There is no official value
12400 for Chill, so we've chosen 44 pseudo-randomly. */
12401 if (! strcmp (language_string, "GNU C")
12402 || ! strcmp (language_string, "GNU Objective-C"))
12404 else if (! strcmp (language_string, "GNU F77"))
12406 else if (! strcmp (language_string, "GNU Ada"))
12408 else if (! strcmp (language_string, "GNU Pascal"))
12410 else if (! strcmp (language_string, "GNU C++"))
12412 else if (! strcmp (language_string, "GNU Java"))
12414 else if (! strcmp (language_string, "GNU CHILL"))
12418 fprintf (file, "%d,", i);
12420 /* 8 single bit fields: global linkage (not set for C extern linkage,
12421 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12422 from start of procedure stored in tbtab, internal function, function
12423 has controlled storage, function has no toc, function uses fp,
12424 function logs/aborts fp operations. */
12425 /* Assume that fp operations are used if any fp reg must be saved. */
12426 fprintf (file, "%d,",
12427 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12429 /* 6 bitfields: function is interrupt handler, name present in
12430 proc table, function calls alloca, on condition directives
12431 (controls stack walks, 3 bits), saves condition reg, saves
12433 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12434 set up as a frame pointer, even when there is no alloca call. */
12435 fprintf (file, "%d,",
12436 ((optional_tbtab << 6)
12437 | ((optional_tbtab & frame_pointer_needed) << 5)
12438 | (info->cr_save_p << 1)
12439 | (info->lr_save_p)));
12441 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12443 fprintf (file, "%d,",
12444 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12446 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12447 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12449 if (optional_tbtab)
12451 /* Compute the parameter info from the function decl argument
12454 int next_parm_info_bit = 31;
12456 for (decl = DECL_ARGUMENTS (current_function_decl);
12457 decl; decl = TREE_CHAIN (decl))
12459 rtx parameter = DECL_INCOMING_RTL (decl);
12460 enum machine_mode mode = GET_MODE (parameter);
12462 if (GET_CODE (parameter) == REG)
12464 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12470 if (mode == SFmode)
12472 else if (mode == DFmode || mode == TFmode)
12477 /* If only one bit will fit, don't or in this entry. */
12478 if (next_parm_info_bit > 0)
12479 parm_info |= (bits << (next_parm_info_bit - 1));
12480 next_parm_info_bit -= 2;
12484 fixed_parms += ((GET_MODE_SIZE (mode)
12485 + (UNITS_PER_WORD - 1))
12487 next_parm_info_bit -= 1;
12493 /* Number of fixed point parameters. */
12494 /* This is actually the number of words of fixed point parameters; thus
12495 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12496 fprintf (file, "%d,", fixed_parms);
12498 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12500 /* This is actually the number of fp registers that hold parameters;
12501 and thus the maximum value is 13. */
12502 /* Set parameters on stack bit if parameters are not in their original
12503 registers, regardless of whether they are on the stack? Xlc
12504 seems to set the bit when not optimizing. */
12505 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12507 if (! optional_tbtab)
12510 /* Optional fields follow. Some are variable length. */
12512 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12513 11 double float. */
12514 /* There is an entry for each parameter in a register, in the order that
12515 they occur in the parameter list. Any intervening arguments on the
12516 stack are ignored. If the list overflows a long (max possible length
12517 34 bits) then completely leave off all elements that don't fit. */
12518 /* Only emit this long if there was at least one parameter. */
12519 if (fixed_parms || float_parms)
12520 fprintf (file, "\t.long %d\n", parm_info);
12522 /* Offset from start of code to tb table. */
12523 fputs ("\t.long ", file);
12524 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12526 RS6000_OUTPUT_BASENAME (file, fname);
12528 assemble_name (file, fname);
12530 fputs ("-.", file);
12532 RS6000_OUTPUT_BASENAME (file, fname);
12534 assemble_name (file, fname);
12538 /* Interrupt handler mask. */
12539 /* Omit this long, since we never set the interrupt handler bit
12542 /* Number of CTL (controlled storage) anchors. */
12543 /* Omit this long, since the has_ctl bit is never set above. */
12545 /* Displacement into stack of each CTL anchor. */
12546 /* Omit this list of longs, because there are no CTL anchors. */
12548 /* Length of function name. */
12551 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12553 /* Function name. */
12554 assemble_string (fname, strlen (fname));
12556 /* Register for alloca automatic storage; this is always reg 31.
12557 Only emit this if the alloca bit was set above. */
12558 if (frame_pointer_needed)
12559 fputs ("\t.byte 31\n", file);
12561 fputs ("\t.align 2\n", file);
12565 /* A C compound statement that outputs the assembler code for a thunk
12566 function, used to implement C++ virtual function calls with
12567 multiple inheritance. The thunk acts as a wrapper around a virtual
12568 function, adjusting the implicit object parameter before handing
12569 control off to the real function.
12571 First, emit code to add the integer DELTA to the location that
12572 contains the incoming first argument. Assume that this argument
12573 contains a pointer, and is the one used to pass the `this' pointer
12574 in C++. This is the incoming argument *before* the function
12575 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12576 values of all other incoming arguments.
12578 After the addition, emit code to jump to FUNCTION, which is a
12579 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12580 not touch the return address. Hence returning from FUNCTION will
12581 return to whoever called the current `thunk'.
12583 The effect must be as if FUNCTION had been called directly with the
12584 adjusted first argument. This macro is responsible for emitting
12585 all of the code for a thunk function; output_function_prologue()
12586 and output_function_epilogue() are not invoked.
12588 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12589 been extracted from it.) It might possibly be useful on some
12590 targets, but probably not.
12592 If you do not define this macro, the target-independent code in the
12593 C++ frontend will generate a less efficient heavyweight thunk that
12594 calls FUNCTION instead of jumping to it. The generic approach does
12595 not support varargs. */
12598 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
12599 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12602 rtx this, insn, funexp;
12604 reload_completed = 1;
12605 epilogue_completed = 1;
12606 no_new_pseudos = 1;
12608 /* Mark the end of the (empty) prologue. */
12609 emit_note (NOTE_INSN_PROLOGUE_END);
12611 /* Find the "this" pointer. If the function returns a structure,
12612 the structure return pointer is in r3. */
12613 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12614 this = gen_rtx_REG (Pmode, 4);
12616 this = gen_rtx_REG (Pmode, 3);
12618 /* Apply the constant offset, if required. */
12621 rtx delta_rtx = GEN_INT (delta);
12622 emit_insn (TARGET_32BIT
12623 ? gen_addsi3 (this, this, delta_rtx)
12624 : gen_adddi3 (this, this, delta_rtx));
12627 /* Apply the offset from the vtable, if required. */
12630 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12631 rtx tmp = gen_rtx_REG (Pmode, 12);
12633 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12634 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12636 emit_insn (TARGET_32BIT
12637 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12638 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12639 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12643 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12645 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12647 emit_insn (TARGET_32BIT
12648 ? gen_addsi3 (this, this, tmp)
12649 : gen_adddi3 (this, this, tmp));
12652 /* Generate a tail call to the target function. */
12653 if (!TREE_USED (function))
12655 assemble_external (function);
12656 TREE_USED (function) = 1;
12658 funexp = XEXP (DECL_RTL (function), 0);
12659 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12662 if (MACHOPIC_INDIRECT)
12663 funexp = machopic_indirect_call_target (funexp);
12666 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12667 generate sibcall RTL explicitly to avoid constraint abort. */
12668 insn = emit_call_insn (
12669 gen_rtx_PARALLEL (VOIDmode,
12671 gen_rtx_CALL (VOIDmode,
12672 funexp, const0_rtx),
12673 gen_rtx_USE (VOIDmode, const0_rtx),
12674 gen_rtx_USE (VOIDmode,
12675 gen_rtx_REG (SImode,
12676 LINK_REGISTER_REGNUM)),
12677 gen_rtx_RETURN (VOIDmode))));
12678 SIBLING_CALL_P (insn) = 1;
12681 /* Run just enough of rest_of_compilation to get the insns emitted.
12682 There's not really enough bulk here to make other passes such as
12683 instruction scheduling worth while. Note that use_thunk calls
12684 assemble_start_function and assemble_end_function. */
12685 insn = get_insns ();
12686 insn_locators_initialize ();
12687 shorten_branches (insn);
12688 final_start_function (insn, file, 1);
12689 final (insn, file, 1, 0);
12690 final_end_function ();
12692 reload_completed = 0;
12693 epilogue_completed = 0;
12694 no_new_pseudos = 0;
12697 /* A quick summary of the various types of 'constant-pool tables'
12700 Target Flags Name One table per
12701 AIX (none) AIX TOC object file
12702 AIX -mfull-toc AIX TOC object file
12703 AIX -mminimal-toc AIX minimal TOC translation unit
12704 SVR4/EABI (none) SVR4 SDATA object file
12705 SVR4/EABI -fpic SVR4 pic object file
12706 SVR4/EABI -fPIC SVR4 PIC translation unit
12707 SVR4/EABI -mrelocatable EABI TOC function
12708 SVR4/EABI -maix AIX TOC object file
12709 SVR4/EABI -maix -mminimal-toc
12710 AIX minimal TOC translation unit
12712 Name Reg. Set by entries contains:
12713 made by addrs? fp? sum?
12715 AIX TOC 2 crt0 as Y option option
12716 AIX minimal TOC 30 prolog gcc Y Y option
12717 SVR4 SDATA 13 crt0 gcc N Y N
12718 SVR4 pic 30 prolog ld Y not yet N
12719 SVR4 PIC 30 prolog gcc Y option option
12720 EABI TOC 30 prolog gcc Y option option
12724 /* Hash functions for the hash table. */
12727 rs6000_hash_constant (rtx k)
12729 enum rtx_code code = GET_CODE (k);
12730 enum machine_mode mode = GET_MODE (k);
12731 unsigned result = (code << 3) ^ mode;
12732 const char *format;
12735 format = GET_RTX_FORMAT (code);
12736 flen = strlen (format);
12742 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12745 if (mode != VOIDmode)
12746 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12758 for (; fidx < flen; fidx++)
12759 switch (format[fidx])
12764 const char *str = XSTR (k, fidx);
12765 len = strlen (str);
12766 result = result * 613 + len;
12767 for (i = 0; i < len; i++)
12768 result = result * 613 + (unsigned) str[i];
12773 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12777 result = result * 613 + (unsigned) XINT (k, fidx);
12780 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12781 result = result * 613 + (unsigned) XWINT (k, fidx);
12785 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12786 result = result * 613 + (unsigned) (XWINT (k, fidx)
12800 toc_hash_function (const void *hash_entry)
12802 const struct toc_hash_struct *thc =
12803 (const struct toc_hash_struct *) hash_entry;
12804 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12807 /* Compare H1 and H2 for equivalence. */
12810 toc_hash_eq (const void *h1, const void *h2)
12812 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12813 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12815 if (((const struct toc_hash_struct *) h1)->key_mode
12816 != ((const struct toc_hash_struct *) h2)->key_mode)
12819 return rtx_equal_p (r1, r2);
12822 /* These are the names given by the C++ front-end to vtables, and
12823 vtable-like objects. Ideally, this logic should not be here;
12824 instead, there should be some programmatic way of inquiring as
12825 to whether or not an object is a vtable. */
12827 #define VTABLE_NAME_P(NAME) \
12828 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12829 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12830 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12831 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12834 rs6000_output_symbol_ref (FILE *file, rtx x)
12836 /* Currently C++ toc references to vtables can be emitted before it
12837 is decided whether the vtable is public or private. If this is
12838 the case, then the linker will eventually complain that there is
12839 a reference to an unknown section. Thus, for vtables only,
12840 we emit the TOC reference to reference the symbol and not the
12842 const char *name = XSTR (x, 0);
12844 if (VTABLE_NAME_P (name))
12846 RS6000_OUTPUT_BASENAME (file, name);
12849 assemble_name (file, name);
12852 /* Output a TOC entry. We derive the entry name from what is being
12856 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
12859 const char *name = buf;
12860 const char *real_name;
12867 /* When the linker won't eliminate them, don't output duplicate
12868 TOC entries (this happens on AIX if there is any kind of TOC,
12869 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12871 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12873 struct toc_hash_struct *h;
12876 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12877 time because GGC is not initialized at that point. */
12878 if (toc_hash_table == NULL)
12879 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12880 toc_hash_eq, NULL);
12882 h = ggc_alloc (sizeof (*h));
12884 h->key_mode = mode;
12885 h->labelno = labelno;
12887 found = htab_find_slot (toc_hash_table, h, 1);
12888 if (*found == NULL)
12890 else /* This is indeed a duplicate.
12891 Set this label equal to that label. */
12893 fputs ("\t.set ", file);
12894 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12895 fprintf (file, "%d,", labelno);
12896 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12897 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12903 /* If we're going to put a double constant in the TOC, make sure it's
12904 aligned properly when strict alignment is on. */
12905 if (GET_CODE (x) == CONST_DOUBLE
12906 && STRICT_ALIGNMENT
12907 && GET_MODE_BITSIZE (mode) >= 64
12908 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12909 ASM_OUTPUT_ALIGN (file, 3);
12912 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12914 /* Handle FP constants specially. Note that if we have a minimal
12915 TOC, things we put here aren't actually in the TOC, so we can allow
12917 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12919 REAL_VALUE_TYPE rv;
12922 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12923 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12927 if (TARGET_MINIMAL_TOC)
12928 fputs (DOUBLE_INT_ASM_OP, file);
12930 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12931 k[0] & 0xffffffff, k[1] & 0xffffffff,
12932 k[2] & 0xffffffff, k[3] & 0xffffffff);
12933 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12934 k[0] & 0xffffffff, k[1] & 0xffffffff,
12935 k[2] & 0xffffffff, k[3] & 0xffffffff);
12940 if (TARGET_MINIMAL_TOC)
12941 fputs ("\t.long ", file);
12943 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12944 k[0] & 0xffffffff, k[1] & 0xffffffff,
12945 k[2] & 0xffffffff, k[3] & 0xffffffff);
12946 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12947 k[0] & 0xffffffff, k[1] & 0xffffffff,
12948 k[2] & 0xffffffff, k[3] & 0xffffffff);
12952 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12954 REAL_VALUE_TYPE rv;
12957 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12958 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12962 if (TARGET_MINIMAL_TOC)
12963 fputs (DOUBLE_INT_ASM_OP, file);
12965 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12966 k[0] & 0xffffffff, k[1] & 0xffffffff);
12967 fprintf (file, "0x%lx%08lx\n",
12968 k[0] & 0xffffffff, k[1] & 0xffffffff);
12973 if (TARGET_MINIMAL_TOC)
12974 fputs ("\t.long ", file);
12976 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12977 k[0] & 0xffffffff, k[1] & 0xffffffff);
12978 fprintf (file, "0x%lx,0x%lx\n",
12979 k[0] & 0xffffffff, k[1] & 0xffffffff);
12983 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12985 REAL_VALUE_TYPE rv;
12988 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12989 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12993 if (TARGET_MINIMAL_TOC)
12994 fputs (DOUBLE_INT_ASM_OP, file);
12996 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12997 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13002 if (TARGET_MINIMAL_TOC)
13003 fputs ("\t.long ", file);
13005 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13006 fprintf (file, "0x%lx\n", l & 0xffffffff);
13010 else if (GET_MODE (x) == VOIDmode
13011 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13013 unsigned HOST_WIDE_INT low;
13014 HOST_WIDE_INT high;
13016 if (GET_CODE (x) == CONST_DOUBLE)
13018 low = CONST_DOUBLE_LOW (x);
13019 high = CONST_DOUBLE_HIGH (x);
13022 #if HOST_BITS_PER_WIDE_INT == 32
13025 high = (low & 0x80000000) ? ~0 : 0;
13029 low = INTVAL (x) & 0xffffffff;
13030 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13034 /* TOC entries are always Pmode-sized, but since this
13035 is a bigendian machine then if we're putting smaller
13036 integer constants in the TOC we have to pad them.
13037 (This is still a win over putting the constants in
13038 a separate constant pool, because then we'd have
13039 to have both a TOC entry _and_ the actual constant.)
13041 For a 32-bit target, CONST_INT values are loaded and shifted
13042 entirely within `low' and can be stored in one TOC entry. */
13044 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13045 abort ();/* It would be easy to make this work, but it doesn't now. */
13047 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13049 #if HOST_BITS_PER_WIDE_INT == 32
13050 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13051 POINTER_SIZE, &low, &high, 0);
13054 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13055 high = (HOST_WIDE_INT) low >> 32;
13062 if (TARGET_MINIMAL_TOC)
13063 fputs (DOUBLE_INT_ASM_OP, file);
13065 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13066 (long) high & 0xffffffff, (long) low & 0xffffffff);
13067 fprintf (file, "0x%lx%08lx\n",
13068 (long) high & 0xffffffff, (long) low & 0xffffffff);
13073 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13075 if (TARGET_MINIMAL_TOC)
13076 fputs ("\t.long ", file);
13078 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13079 (long) high & 0xffffffff, (long) low & 0xffffffff);
13080 fprintf (file, "0x%lx,0x%lx\n",
13081 (long) high & 0xffffffff, (long) low & 0xffffffff);
13085 if (TARGET_MINIMAL_TOC)
13086 fputs ("\t.long ", file);
13088 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13089 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13095 if (GET_CODE (x) == CONST)
13097 if (GET_CODE (XEXP (x, 0)) != PLUS)
13100 base = XEXP (XEXP (x, 0), 0);
13101 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13104 if (GET_CODE (base) == SYMBOL_REF)
13105 name = XSTR (base, 0);
13106 else if (GET_CODE (base) == LABEL_REF)
13107 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13108 else if (GET_CODE (base) == CODE_LABEL)
13109 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13113 real_name = (*targetm.strip_name_encoding) (name);
13114 if (TARGET_MINIMAL_TOC)
13115 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13118 fprintf (file, "\t.tc %s", real_name);
13121 fprintf (file, ".N%d", - offset);
13123 fprintf (file, ".P%d", offset);
13125 fputs ("[TC],", file);
13128 /* Currently C++ toc references to vtables can be emitted before it
13129 is decided whether the vtable is public or private. If this is
13130 the case, then the linker will eventually complain that there is
13131 a TOC reference to an unknown section. Thus, for vtables only,
13132 we emit the TOC reference to reference the symbol and not the
13134 if (VTABLE_NAME_P (name))
13136 RS6000_OUTPUT_BASENAME (file, name);
13138 fprintf (file, "%d", offset);
13139 else if (offset > 0)
13140 fprintf (file, "+%d", offset);
13143 output_addr_const (file, x);
13147 /* Output an assembler pseudo-op to write an ASCII string of N characters
13148 starting at P to FILE.
13150 On the RS/6000, we have to do this using the .byte operation and
13151 write out special characters outside the quoted string.
13152 Also, the assembler is broken; very long strings are truncated,
13153 so we must artificially break them up early. */
13156 output_ascii (FILE *file, const char *p, int n)
13159 int i, count_string;
13160 const char *for_string = "\t.byte \"";
13161 const char *for_decimal = "\t.byte ";
13162 const char *to_close = NULL;
13165 for (i = 0; i < n; i++)
13168 if (c >= ' ' && c < 0177)
13171 fputs (for_string, file);
13174 /* Write two quotes to get one. */
13182 for_decimal = "\"\n\t.byte ";
13186 if (count_string >= 512)
13188 fputs (to_close, file);
13190 for_string = "\t.byte \"";
13191 for_decimal = "\t.byte ";
13199 fputs (for_decimal, file);
13200 fprintf (file, "%d", c);
13202 for_string = "\n\t.byte \"";
13203 for_decimal = ", ";
13209 /* Now close the string if we have written one. Then end the line. */
13211 fputs (to_close, file);
13214 /* Generate a unique section name for FILENAME for a section type
13215 represented by SECTION_DESC. Output goes into BUF.
13217 SECTION_DESC can be any string, as long as it is different for each
13218 possible section type.
13220 We name the section in the same manner as xlc. The name begins with an
13221 underscore followed by the filename (after stripping any leading directory
13222 names) with the last period replaced by the string SECTION_DESC. If
13223 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13227 rs6000_gen_section_name (char **buf, const char *filename,
13228 const char *section_desc)
13230 const char *q, *after_last_slash, *last_period = 0;
13234 after_last_slash = filename;
13235 for (q = filename; *q; q++)
13238 after_last_slash = q + 1;
13239 else if (*q == '.')
13243 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13244 *buf = (char *) xmalloc (len);
13249 for (q = after_last_slash; *q; q++)
13251 if (q == last_period)
13253 strcpy (p, section_desc);
13254 p += strlen (section_desc);
13258 else if (ISALNUM (*q))
13262 if (last_period == 0)
13263 strcpy (p, section_desc);
13268 /* Emit profile function. */
13271 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13273 if (TARGET_PROFILE_KERNEL)
13276 if (DEFAULT_ABI == ABI_AIX)
13278 #ifndef NO_PROFILE_COUNTERS
13279 # define NO_PROFILE_COUNTERS 0
13281 if (NO_PROFILE_COUNTERS)
13282 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13286 const char *label_name;
13289 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13290 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13291 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13293 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13297 else if (DEFAULT_ABI == ABI_DARWIN)
13299 const char *mcount_name = RS6000_MCOUNT;
13300 int caller_addr_regno = LINK_REGISTER_REGNUM;
13302 /* Be conservative and always set this, at least for now. */
13303 current_function_uses_pic_offset_table = 1;
13306 /* For PIC code, set up a stub and collect the caller's address
13307 from r0, which is where the prologue puts it. */
13308 if (MACHOPIC_INDIRECT)
13310 mcount_name = machopic_stub_name (mcount_name);
13311 if (current_function_uses_pic_offset_table)
13312 caller_addr_regno = 0;
13315 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13317 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13321 /* Write function profiler code. */
13324 output_function_profiler (FILE *file, int labelno)
13329 switch (DEFAULT_ABI)
13338 warning ("no profiling of 64-bit code for this ABI");
13341 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13342 fprintf (file, "\tmflr %s\n", reg_names[0]);
13345 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13346 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13347 reg_names[0], save_lr, reg_names[1]);
13348 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13349 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13350 assemble_name (file, buf);
13351 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13353 else if (flag_pic > 1)
13355 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13356 reg_names[0], save_lr, reg_names[1]);
13357 /* Now, we need to get the address of the label. */
13358 fputs ("\tbl 1f\n\t.long ", file);
13359 assemble_name (file, buf);
13360 fputs ("-.\n1:", file);
13361 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13362 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13363 reg_names[0], reg_names[11]);
13364 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13365 reg_names[0], reg_names[0], reg_names[11]);
13369 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13370 assemble_name (file, buf);
13371 fputs ("@ha\n", file);
13372 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13373 reg_names[0], save_lr, reg_names[1]);
13374 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13375 assemble_name (file, buf);
13376 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13379 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13380 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13385 if (!TARGET_PROFILE_KERNEL)
13387 /* Don't do anything, done in output_profile_hook (). */
13394 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13395 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13397 if (current_function_needs_context)
13399 asm_fprintf (file, "\tstd %s,24(%s)\n",
13400 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13401 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13402 asm_fprintf (file, "\tld %s,24(%s)\n",
13403 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13406 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13414 rs6000_use_dfa_pipeline_interface (void)
13419 /* Power4 load update and store update instructions are cracked into a
13420 load or store and an integer insn which are executed in the same cycle.
13421 Branches have their own dispatch slot which does not count against the
13422 GCC issue rate, but it changes the program flow so there are no other
13423 instructions to issue in this cycle. */
13426 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13427 int verbose ATTRIBUTE_UNUSED,
13428 rtx insn, int more)
13430 if (GET_CODE (PATTERN (insn)) == USE
13431 || GET_CODE (PATTERN (insn)) == CLOBBER)
13434 if (rs6000_cpu == PROCESSOR_POWER4)
13436 if (is_microcoded_insn (insn))
13438 else if (is_cracked_insn (insn))
13439 return more > 2 ? more - 2 : 0;
13445 /* Adjust the cost of a scheduling dependency. Return the new cost of
13446 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13449 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13452 if (! recog_memoized (insn))
13455 if (REG_NOTE_KIND (link) != 0)
13458 if (REG_NOTE_KIND (link) == 0)
13460 /* Data dependency; DEP_INSN writes a register that INSN reads
13461 some cycles later. */
13462 switch (get_attr_type (insn))
13465 /* Tell the first scheduling pass about the latency between
13466 a mtctr and bctr (and mtlr and br/blr). The first
13467 scheduling pass will not know about this latency since
13468 the mtctr instruction, which has the latency associated
13469 to it, will be generated by reload. */
13470 return TARGET_POWER ? 5 : 4;
13472 /* Leave some extra cycles between a compare and its
13473 dependent branch, to inhibit expensive mispredicts. */
13474 if ((rs6000_cpu_attr == CPU_PPC603
13475 || rs6000_cpu_attr == CPU_PPC604
13476 || rs6000_cpu_attr == CPU_PPC604E
13477 || rs6000_cpu_attr == CPU_PPC620
13478 || rs6000_cpu_attr == CPU_PPC630
13479 || rs6000_cpu_attr == CPU_PPC750
13480 || rs6000_cpu_attr == CPU_PPC7400
13481 || rs6000_cpu_attr == CPU_PPC7450
13482 || rs6000_cpu_attr == CPU_POWER4)
13483 && recog_memoized (dep_insn)
13484 && (INSN_CODE (dep_insn) >= 0)
13485 && (get_attr_type (dep_insn) == TYPE_CMP
13486 || get_attr_type (dep_insn) == TYPE_COMPARE
13487 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13488 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13489 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13490 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13491 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13492 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13497 /* Fall out to return default cost. */
13503 /* The function returns a true if INSN is microcoded.
13504 Return false ptherwise. */
13507 is_microcoded_insn (rtx insn)
13509 if (!insn || !INSN_P (insn)
13510 || GET_CODE (PATTERN (insn)) == USE
13511 || GET_CODE (PATTERN (insn)) == CLOBBER)
13514 if (rs6000_cpu == PROCESSOR_POWER4)
13516 enum attr_type type = get_attr_type (insn);
13517 if (type == TYPE_LOAD_EXT_U
13518 || type == TYPE_LOAD_EXT_UX
13519 || type == TYPE_LOAD_UX
13520 || type == TYPE_STORE_UX
13521 || type == TYPE_MFCR)
13528 /* The function returns a non-zero value if INSN can be scheduled only
13529 as the first insn in a dispatch group ("dispatch-slot restricted").
13530 In this case, the returned value indicates how many dispatch slots
13531 the insn occupies (at the beginning of the group).
13532 Return 0 otherwise. */
13535 is_dispatch_slot_restricted (rtx insn)
13537 enum attr_type type;
13539 if (rs6000_cpu != PROCESSOR_POWER4)
13543 || insn == NULL_RTX
13544 || GET_CODE (insn) == NOTE
13545 || GET_CODE (PATTERN (insn)) == USE
13546 || GET_CODE (PATTERN (insn)) == CLOBBER)
13549 type = get_attr_type (insn);
13555 case TYPE_DELAYED_CR:
13556 case TYPE_CR_LOGICAL:
13568 /* The function returns true if INSN is cracked into 2 instructions
13569 by the processor (and therefore occupies 2 issue slots). */
13572 is_cracked_insn (rtx insn)
13574 if (!insn || !INSN_P (insn)
13575 || GET_CODE (PATTERN (insn)) == USE
13576 || GET_CODE (PATTERN (insn)) == CLOBBER)
13579 if (rs6000_cpu == PROCESSOR_POWER4)
13581 enum attr_type type = get_attr_type (insn);
13582 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13583 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13584 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13585 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13586 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13587 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13588 || type == TYPE_IDIV || type == TYPE_LDIV
13589 || type == TYPE_INSERT_WORD)
13596 /* The function returns true if INSN can be issued only from
13597 the branch slot. */
13600 is_branch_slot_insn (rtx insn)
13602 if (!insn || !INSN_P (insn)
13603 || GET_CODE (PATTERN (insn)) == USE
13604 || GET_CODE (PATTERN (insn)) == CLOBBER)
13607 if (rs6000_cpu == PROCESSOR_POWER4)
13609 enum attr_type type = get_attr_type (insn);
13610 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
13618 /* A C statement (sans semicolon) to update the integer scheduling
13619 priority INSN_PRIORITY (INSN). Increase the priority to execute the
13620 INSN earlier, reduce the priority to execute INSN later. Do not
13621 define this macro if you do not need to adjust the scheduling
13622 priorities of insns. */
13625 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
13627 /* On machines (like the 750) which have asymmetric integer units,
13628 where one integer unit can do multiply and divides and the other
13629 can't, reduce the priority of multiply/divide so it is scheduled
13630 before other integer operations. */
13633 if (! INSN_P (insn))
13636 if (GET_CODE (PATTERN (insn)) == USE)
13639 switch (rs6000_cpu_attr) {
13641 switch (get_attr_type (insn))
13648 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13649 priority, priority);
13650 if (priority >= 0 && priority < 0x01000000)
13657 if (is_dispatch_slot_restricted (insn)
13658 && reload_completed
13659 && current_sched_info->sched_max_insns_priority
13660 && rs6000_sched_restricted_insns_priority)
13663 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
13664 if (rs6000_sched_restricted_insns_priority == 1)
13665 /* Attach highest priority to insn. This means that in
13666 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
13667 precede 'priority' (critical path) considerations. */
13668 return current_sched_info->sched_max_insns_priority;
13669 else if (rs6000_sched_restricted_insns_priority == 2)
13670 /* Increase priority of insn by a minimal amount. This means that in
13671 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
13672 precede dispatch-slot restriction considerations. */
13673 return (priority + 1);
13679 /* Return how many instructions the machine can issue per cycle. */
13682 rs6000_issue_rate (void)
13684 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13685 if (!reload_completed)
13688 switch (rs6000_cpu_attr) {
13689 case CPU_RIOS1: /* ? */
13691 case CPU_PPC601: /* ? */
13713 /* Return how many instructions to look ahead for better insn
13717 rs6000_use_sched_lookahead (void)
13719 if (rs6000_cpu_attr == CPU_PPC8540)
13724 /* Determine is PAT refers to memory. */
13727 is_mem_ref (rtx pat)
13733 if (GET_CODE (pat) == MEM)
13736 /* Recursively process the pattern. */
13737 fmt = GET_RTX_FORMAT (GET_CODE (pat));
13739 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
13742 ret |= is_mem_ref (XEXP (pat, i));
13743 else if (fmt[i] == 'E')
13744 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
13745 ret |= is_mem_ref (XVECEXP (pat, i, j));
13751 /* Determine if PAT is a PATTERN of a load insn. */
13754 is_load_insn1 (rtx pat)
13756 if (!pat || pat == NULL_RTX)
13759 if (GET_CODE (pat) == SET)
13760 return is_mem_ref (SET_SRC (pat));
13762 if (GET_CODE (pat) == PARALLEL)
13766 for (i = 0; i < XVECLEN (pat, 0); i++)
13767 if (is_load_insn1 (XVECEXP (pat, 0, i)))
13774 /* Determine if INSN loads from memory. */
13777 is_load_insn (rtx insn)
13779 if (!insn || !INSN_P (insn))
13782 if (GET_CODE (insn) == CALL_INSN)
13785 return is_load_insn1 (PATTERN (insn));
13788 /* Determine if PAT is a PATTERN of a store insn. */
13791 is_store_insn1 (rtx pat)
13793 if (!pat || pat == NULL_RTX)
13796 if (GET_CODE (pat) == SET)
13797 return is_mem_ref (SET_DEST (pat));
13799 if (GET_CODE (pat) == PARALLEL)
13803 for (i = 0; i < XVECLEN (pat, 0); i++)
13804 if (is_store_insn1 (XVECEXP (pat, 0, i)))
13811 /* Determine if INSN stores to memory. */
13814 is_store_insn (rtx insn)
13816 if (!insn || !INSN_P (insn))
13819 return is_store_insn1 (PATTERN (insn));
13822 /* Returns whether the dependence between INSN and NEXT is considered
13823 costly by the given target. */
13826 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
13828 /* If the flag is not enbled - no dependence is considered costly;
13829 allow all dependent insns in the same group.
13830 This is the most aggressive option. */
13831 if (rs6000_sched_costly_dep == no_dep_costly)
13834 /* If the flag is set to 1 - a dependence is always considered costly;
13835 do not allow dependent instructions in the same group.
13836 This is the most conservative option. */
13837 if (rs6000_sched_costly_dep == all_deps_costly)
13840 if (rs6000_sched_costly_dep == store_to_load_dep_costly
13841 && is_load_insn (next)
13842 && is_store_insn (insn))
13843 /* Prevent load after store in the same group. */
13846 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
13847 && is_load_insn (next)
13848 && is_store_insn (insn)
13849 && (!link || (int) REG_NOTE_KIND (link) == 0))
13850 /* Prevent load after store in the same group if it is a true dependence. */
13853 /* The flag is set to X; dependences with latency >= X are considered costly,
13854 and will not be scheduled in the same group. */
13855 if (rs6000_sched_costly_dep <= max_dep_latency
13856 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
13862 /* Return the next insn after INSN that is found before TAIL is reached,
13863 skipping any "non-active" insns - insns that will not actually occupy
13864 an issue slot. Return NULL_RTX if such an insn is not found. */
13867 get_next_active_insn (rtx insn, rtx tail)
13871 if (!insn || insn == tail)
13874 next_insn = NEXT_INSN (insn);
13877 && next_insn != tail
13878 && (GET_CODE(next_insn) == NOTE
13879 || GET_CODE (PATTERN (next_insn)) == USE
13880 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
13882 next_insn = NEXT_INSN (next_insn);
13885 if (!next_insn || next_insn == tail)
13891 /* Return whether the presence of INSN causes a dispatch group terminatation
13892 of group WHICH_GROUP.
13894 If WHICH_GROUP == current_group, this function will return true if INSN
13895 causes the termination of the current group (i.e, the dispatch group to
13896 which INSN belongs). This means that INSN will be the last insn in the
13897 group it belongs to.
13899 If WHICH_GROUP == previous_group, this function will return true if INSN
13900 causes the termination of the previous group (i.e, the dispatch group that
13901 precedes the group to which INSN belongs). This means that INSN will be
13902 the first insn in the group it belongs to). */
13905 insn_terminates_group_p (rtx insn, enum group_termination which_group)
13907 enum attr_type type;
13912 type = get_attr_type (insn);
13914 if (is_microcoded_insn (insn))
13917 if (which_group == current_group)
13919 if (is_branch_slot_insn (insn))
13923 else if (which_group == previous_group)
13925 if (is_dispatch_slot_restricted (insn))
13933 /* Return true if it is recommended to keep NEXT_INSN "far" (in a seperate
13934 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
13937 is_costly_group (rtx *group_insns, rtx next_insn)
13942 int issue_rate = rs6000_issue_rate ();
13944 for (i = 0; i < issue_rate; i++)
13946 rtx insn = group_insns[i];
13949 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
13951 rtx next = XEXP (link, 0);
13952 if (next == next_insn)
13954 cost = insn_cost (insn, link, next_insn);
13955 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
13964 /* Utility of the function redefine_groups.
13965 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
13966 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
13967 to keep it "far" (in a separate group) from GROUP_INSNS, following
13968 one of the following schemes, depending on the value of the flag
13969 -minsert_sched_nops = X:
13970 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
13971 in order to force NEXT_INSN into a seperate group.
13972 (2) X < sched_finish_regroup_exact: insert exactly X nops.
13973 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
13974 insertion (has a group just ended, how many vacant issue slots remain in the
13975 last group, and how many dispatch groups were encountered so far). */
13978 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
13979 bool *group_end, int can_issue_more, int *group_count)
13983 int issue_rate = rs6000_issue_rate ();
13984 bool end = *group_end;
13987 if (next_insn == NULL_RTX)
13988 return can_issue_more;
13990 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
13991 return can_issue_more;
13993 force = is_costly_group (group_insns, next_insn);
13995 return can_issue_more;
13997 if (sched_verbose > 6)
13998 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
13999 *group_count ,can_issue_more);
14001 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14004 can_issue_more = 0;
14006 /* Since only a branch can be issued in the last issue_slot, it is
14007 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14008 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14009 in this case the last nop will start a new group and the branch will be
14010 forced to the new group. */
14011 if (can_issue_more && !is_branch_slot_insn (next_insn))
14014 while (can_issue_more > 0)
14017 emit_insn_before (nop, next_insn);
14025 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14027 int n_nops = rs6000_sched_insert_nops;
14029 /* Nops can't be issued from the branch slot, so the effective
14030 issue_rate for nops is 'issue_rate - 1'. */
14031 if (can_issue_more == 0)
14032 can_issue_more = issue_rate;
14034 if (can_issue_more == 0)
14036 can_issue_more = issue_rate - 1;
14039 for (i = 0; i < issue_rate; i++)
14041 group_insns[i] = 0;
14048 emit_insn_before (nop, next_insn);
14049 if (can_issue_more == issue_rate - 1) /* new group begins */
14052 if (can_issue_more == 0)
14054 can_issue_more = issue_rate - 1;
14057 for (i = 0; i < issue_rate; i++)
14059 group_insns[i] = 0;
14065 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14068 *group_end = /* Is next_insn going to start a new group? */
14070 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14071 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14072 || (can_issue_more < issue_rate &&
14073 insn_terminates_group_p (next_insn, previous_group)));
14074 if (*group_end && end)
14077 if (sched_verbose > 6)
14078 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14079 *group_count, can_issue_more);
14080 return can_issue_more;
14083 return can_issue_more;
14086 /* This function tries to synch the dispatch groups that the compiler "sees"
14087 with the dispatch groups that the processor dispatcher is expected to
14088 form in practice. It tries to achieve this synchronization by forcing the
14089 estimated processor grouping on the compiler (as opposed to the function
14090 'pad_goups' which tries to force the scheduler's grouping on the processor).
14092 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14093 examines the (estimated) dispatch groups that will be formed by the processor
14094 dispatcher. It marks these group boundaries to reflect the estimated
14095 processor grouping, overriding the grouping that the scheduler had marked.
14096 Depending on the value of the flag '-minsert-sched-nops' this function can
14097 force certain insns into separate groups or force a certain distance between
14098 them by inserting nops, for example, if there exists a "costly dependence"
14101 The function estimates the group boundaries that the processor will form as
14102 folllows: It keeps track of how many vacant issue slots are available after
14103 each insn. A subsequent insn will start a new group if one of the following
14105 - no more vacant issue slots remain in the current dispatch group.
14106 - only the last issue slot, which is the branch slot, is vacant, but the next
14107 insn is not a branch.
14108 - only the last 2 or less issue slots, including the branch slot, are vacant,
14109 which means that a cracked insn (which occupies two issue slots) can't be
14110 issued in this group.
14111 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14112 start a new group. */
14115 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14117 rtx insn, next_insn;
14119 int can_issue_more;
14122 int group_count = 0;
14126 issue_rate = rs6000_issue_rate ();
14127 group_insns = alloca (issue_rate * sizeof (rtx));
14128 for (i = 0; i < issue_rate; i++)
14130 group_insns[i] = 0;
14132 can_issue_more = issue_rate;
14134 insn = get_next_active_insn (prev_head_insn, tail);
14137 while (insn != NULL_RTX)
14139 slot = (issue_rate - can_issue_more);
14140 group_insns[slot] = insn;
14142 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14143 if (insn_terminates_group_p (insn, current_group))
14144 can_issue_more = 0;
14146 next_insn = get_next_active_insn (insn, tail);
14147 if (next_insn == NULL_RTX)
14148 return group_count + 1;
14150 group_end = /* Is next_insn going to start a new group? */
14151 (can_issue_more == 0
14152 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14153 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14154 || (can_issue_more < issue_rate &&
14155 insn_terminates_group_p (next_insn, previous_group)));
14157 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14158 next_insn, &group_end, can_issue_more, &group_count);
14163 can_issue_more = 0;
14164 for (i = 0; i < issue_rate; i++)
14166 group_insns[i] = 0;
14170 if (GET_MODE (next_insn) == TImode && can_issue_more)
14171 PUT_MODE(next_insn, VOIDmode);
14172 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14173 PUT_MODE (next_insn, TImode);
14176 if (can_issue_more == 0)
14177 can_issue_more = issue_rate;
14180 return group_count;
14183 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14184 dispatch group boundaries that the scheduler had marked. Pad with nops
14185 any dispatch groups which have vacant issue slots, in order to force the
14186 scheduler's grouping on the processor dispatcher. The function
14187 returns the number of dispatch groups found. */
14190 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14192 rtx insn, next_insn;
14195 int can_issue_more;
14197 int group_count = 0;
14199 /* Initialize issue_rate. */
14200 issue_rate = rs6000_issue_rate ();
14201 can_issue_more = issue_rate;
14203 insn = get_next_active_insn (prev_head_insn, tail);
14204 next_insn = get_next_active_insn (insn, tail);
14206 while (insn != NULL_RTX)
14209 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14211 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14213 if (next_insn == NULL_RTX)
14218 /* If the scheduler had marked group termination at this location
14219 (between insn and next_indn), and neither insn nor next_insn will
14220 force group termination, pad the group with nops to force group
14223 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14224 && !insn_terminates_group_p (insn, current_group)
14225 && !insn_terminates_group_p (next_insn, previous_group))
14227 if (!is_branch_slot_insn(next_insn))
14230 while (can_issue_more)
14233 emit_insn_before (nop, next_insn);
14238 can_issue_more = issue_rate;
14243 next_insn = get_next_active_insn (insn, tail);
14246 return group_count;
14249 /* The following function is called at the end of scheduling BB.
14250 After reload, it inserts nops at insn group bundling. */
14253 rs6000_sched_finish (FILE *dump, int sched_verbose)
14258 fprintf (dump, "=== Finishing schedule.\n");
14260 if (reload_completed && rs6000_cpu == PROCESSOR_POWER4)
14262 if (rs6000_sched_insert_nops == sched_finish_none)
14265 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14266 n_groups = pad_groups (dump, sched_verbose,
14267 current_sched_info->prev_head,
14268 current_sched_info->next_tail);
14270 n_groups = redefine_groups (dump, sched_verbose,
14271 current_sched_info->prev_head,
14272 current_sched_info->next_tail);
14274 if (sched_verbose >= 6)
14276 fprintf (dump, "ngroups = %d\n", n_groups);
14277 print_rtl (dump, current_sched_info->prev_head);
14278 fprintf (dump, "Done finish_sched\n");
14283 /* Length in units of the trampoline for entering a nested function. */
14286 rs6000_trampoline_size (void)
14290 switch (DEFAULT_ABI)
14296 ret = (TARGET_32BIT) ? 12 : 24;
14301 ret = (TARGET_32BIT) ? 40 : 48;
14308 /* Emit RTL insns to initialize the variable parts of a trampoline.
14309 FNADDR is an RTX for the address of the function's pure code.
14310 CXT is an RTX for the static chain value for the function. */
14313 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14315 enum machine_mode pmode = Pmode;
14316 int regsize = (TARGET_32BIT) ? 4 : 8;
14317 rtx ctx_reg = force_reg (pmode, cxt);
14319 switch (DEFAULT_ABI)
14324 /* Macros to shorten the code expansions below. */
14325 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14326 #define MEM_PLUS(addr,offset) \
14327 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14329 /* Under AIX, just build the 3 word function descriptor */
14332 rtx fn_reg = gen_reg_rtx (pmode);
14333 rtx toc_reg = gen_reg_rtx (pmode);
14334 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14335 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14336 emit_move_insn (MEM_DEREF (addr), fn_reg);
14337 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14338 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14342 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14345 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14346 FALSE, VOIDmode, 4,
14348 GEN_INT (rs6000_trampoline_size ()), SImode,
14358 /* Table of valid machine attributes. */
14360 const struct attribute_spec rs6000_attribute_table[] =
14362 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14363 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14364 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14365 { NULL, 0, 0, false, false, false, NULL }
14368 /* Handle a "longcall" or "shortcall" attribute; arguments as in
14369 struct attribute_spec.handler. */
14372 rs6000_handle_longcall_attribute (tree *node, tree name,
14373 tree args ATTRIBUTE_UNUSED,
14374 int flags ATTRIBUTE_UNUSED,
14375 bool *no_add_attrs)
14377 if (TREE_CODE (*node) != FUNCTION_TYPE
14378 && TREE_CODE (*node) != FIELD_DECL
14379 && TREE_CODE (*node) != TYPE_DECL)
14381 warning ("`%s' attribute only applies to functions",
14382 IDENTIFIER_POINTER (name));
14383 *no_add_attrs = true;
14389 /* Set longcall attributes on all functions declared when
14390 rs6000_default_long_calls is true. */
14392 rs6000_set_default_type_attributes (tree type)
14394 if (rs6000_default_long_calls
14395 && (TREE_CODE (type) == FUNCTION_TYPE
14396 || TREE_CODE (type) == METHOD_TYPE))
14397 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
14399 TYPE_ATTRIBUTES (type));
14402 /* Return a reference suitable for calling a function with the
14403 longcall attribute. */
14406 rs6000_longcall_ref (rtx call_ref)
14408 const char *call_name;
14411 if (GET_CODE (call_ref) != SYMBOL_REF)
14414 /* System V adds '.' to the internal name, so skip them. */
14415 call_name = XSTR (call_ref, 0);
14416 if (*call_name == '.')
14418 while (*call_name == '.')
14421 node = get_identifier (call_name);
14422 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
14425 return force_reg (Pmode, call_ref);
14428 #ifdef USING_ELFOS_H
14430 /* A C statement or statements to switch to the appropriate section
14431 for output of RTX in mode MODE. You can assume that RTX is some
14432 kind of constant in RTL. The argument MODE is redundant except in
14433 the case of a `const_int' rtx. Select the section by calling
14434 `text_section' or one of the alternatives for other sections.
14436 Do not define this macro if you put all constants in the read-only
14440 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
14441 unsigned HOST_WIDE_INT align)
14443 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14446 default_elf_select_rtx_section (mode, x, align);
14449 /* A C statement or statements to switch to the appropriate
14450 section for output of DECL. DECL is either a `VAR_DECL' node
14451 or a constant of some sort. RELOC indicates whether forming
14452 the initial value of DECL requires link-time relocations. */
14455 rs6000_elf_select_section (tree decl, int reloc,
14456 unsigned HOST_WIDE_INT align)
14458 /* Pretend that we're always building for a shared library when
14459 ABI_AIX, because otherwise we end up with dynamic relocations
14460 in read-only sections. This happens for function pointers,
14461 references to vtables in typeinfo, and probably other cases. */
14462 default_elf_select_section_1 (decl, reloc, align,
14463 flag_pic || DEFAULT_ABI == ABI_AIX);
14466 /* A C statement to build up a unique section name, expressed as a
14467 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
14468 RELOC indicates whether the initial value of EXP requires
14469 link-time relocations. If you do not define this macro, GCC will use
14470 the symbol name prefixed by `.' as the section name. Note - this
14471 macro can now be called for uninitialized data items as well as
14472 initialized data and functions. */
14475 rs6000_elf_unique_section (tree decl, int reloc)
14477 /* As above, pretend that we're always building for a shared library
14478 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
14479 default_unique_section_1 (decl, reloc,
14480 flag_pic || DEFAULT_ABI == ABI_AIX);
14483 /* For a SYMBOL_REF, set generic flags and then perform some
14484 target-specific processing.
14486 When the AIX ABI is requested on a non-AIX system, replace the
14487 function name with the real name (with a leading .) rather than the
14488 function descriptor name. This saves a lot of overriding code to
14489 read the prefixes. */
14492 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
14494 default_encode_section_info (decl, rtl, first);
14497 && TREE_CODE (decl) == FUNCTION_DECL
14499 && DEFAULT_ABI == ABI_AIX)
14501 rtx sym_ref = XEXP (rtl, 0);
14502 size_t len = strlen (XSTR (sym_ref, 0));
14503 char *str = alloca (len + 2);
14505 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
14506 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
14511 rs6000_elf_in_small_data_p (tree decl)
14513 if (rs6000_sdata == SDATA_NONE)
14516 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
14518 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
14519 if (strcmp (section, ".sdata") == 0
14520 || strcmp (section, ".sdata2") == 0
14521 || strcmp (section, ".sbss") == 0
14522 || strcmp (section, ".sbss2") == 0
14523 || strcmp (section, ".PPC.EMB.sdata0") == 0
14524 || strcmp (section, ".PPC.EMB.sbss0") == 0)
14529 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
14532 && (unsigned HOST_WIDE_INT) size <= g_switch_value
14533 /* If it's not public, and we're not going to reference it there,
14534 there's no need to put it in the small data section. */
14535 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
14542 #endif /* USING_ELFOS_H */
14545 /* Return a REG that occurs in ADDR with coefficient 1.
14546 ADDR can be effectively incremented by incrementing REG.
14548 r0 is special and we must not select it as an address
14549 register by this routine since our caller will try to
14550 increment the returned register via an "la" instruction. */
14553 find_addr_reg (rtx addr)
14555 while (GET_CODE (addr) == PLUS)
14557 if (GET_CODE (XEXP (addr, 0)) == REG
14558 && REGNO (XEXP (addr, 0)) != 0)
14559 addr = XEXP (addr, 0);
14560 else if (GET_CODE (XEXP (addr, 1)) == REG
14561 && REGNO (XEXP (addr, 1)) != 0)
14562 addr = XEXP (addr, 1);
14563 else if (CONSTANT_P (XEXP (addr, 0)))
14564 addr = XEXP (addr, 1);
14565 else if (CONSTANT_P (XEXP (addr, 1)))
14566 addr = XEXP (addr, 0);
14570 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
14576 rs6000_fatal_bad_address (rtx op)
14578 fatal_insn ("bad address", op);
14584 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
14585 reference and a constant. */
14588 symbolic_operand (rtx op)
14590 switch (GET_CODE (op))
14597 return (GET_CODE (op) == SYMBOL_REF ||
14598 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
14599 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
14600 && GET_CODE (XEXP (op, 1)) == CONST_INT);
14607 #ifdef RS6000_LONG_BRANCH
14609 static tree stub_list = 0;
14611 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
14612 procedure calls to the linked list. */
14615 add_compiler_stub (tree label_name, tree function_name, int line_number)
14617 tree stub = build_tree_list (function_name, label_name);
14618 TREE_TYPE (stub) = build_int_2 (line_number, 0);
14619 TREE_CHAIN (stub) = stub_list;
14623 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
14624 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
14625 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
14627 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
14628 handling procedure calls from the linked list and initializes the
14632 output_compiler_stub (void)
14635 char label_buf[256];
14639 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
14641 fprintf (asm_out_file,
14642 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
14644 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14645 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14646 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
14647 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14649 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
14651 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
14654 label_buf[0] = '_';
14655 strcpy (label_buf+1,
14656 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
14659 strcpy (tmp_buf, "lis r12,hi16(");
14660 strcat (tmp_buf, label_buf);
14661 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
14662 strcat (tmp_buf, label_buf);
14663 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
14664 output_asm_insn (tmp_buf, 0);
14666 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14667 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14668 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
14669 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14675 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
14676 already there or not. */
14679 no_previous_def (tree function_name)
14682 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
14683 if (function_name == STUB_FUNCTION_NAME (stub))
14688 /* GET_PREV_LABEL gets the label name from the previous definition of
14692 get_prev_label (tree function_name)
14695 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
14696 if (function_name == STUB_FUNCTION_NAME (stub))
14697 return STUB_LABEL_NAME (stub);
14701 /* INSN is either a function call or a millicode call. It may have an
14702 unconditional jump in its delay slot.
14704 CALL_DEST is the routine we are calling. */
14707 output_call (rtx insn, rtx call_dest, int operand_number)
14709 static char buf[256];
14710 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
14713 tree funname = get_identifier (XSTR (call_dest, 0));
14715 if (no_previous_def (funname))
14717 int line_number = 0;
14718 rtx label_rtx = gen_label_rtx ();
14719 char *label_buf, temp_buf[256];
14720 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
14721 CODE_LABEL_NUMBER (label_rtx));
14722 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
14723 labelname = get_identifier (label_buf);
14724 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
14726 line_number = NOTE_LINE_NUMBER (insn);
14727 add_compiler_stub (labelname, funname, line_number);
14730 labelname = get_prev_label (funname);
14732 sprintf (buf, "jbsr %%z%d,%.246s",
14733 operand_number, IDENTIFIER_POINTER (labelname));
14738 sprintf (buf, "bl %%z%d", operand_number);
14743 #endif /* RS6000_LONG_BRANCH */
14745 /* Generate PIC and indirect symbol stubs. */
14748 machopic_output_stub (FILE *file, const char *symb, const char *stub)
14750 unsigned int length;
14751 char *symbol_name, *lazy_ptr_name;
14752 char *local_label_0;
14753 static int label = 0;
14755 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
14756 symb = (*targetm.strip_name_encoding) (symb);
14759 length = strlen (symb);
14760 symbol_name = alloca (length + 32);
14761 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
14763 lazy_ptr_name = alloca (length + 32);
14764 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
14767 machopic_picsymbol_stub1_section ();
14769 machopic_symbol_stub1_section ();
14770 fprintf (file, "\t.align 2\n");
14772 fprintf (file, "%s:\n", stub);
14773 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
14778 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
14779 sprintf (local_label_0, "\"L%011d$spb\"", label);
14781 fprintf (file, "\tmflr r0\n");
14782 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
14783 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
14784 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
14785 lazy_ptr_name, local_label_0);
14786 fprintf (file, "\tmtlr r0\n");
14787 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
14788 lazy_ptr_name, local_label_0);
14789 fprintf (file, "\tmtctr r12\n");
14790 fprintf (file, "\tbctr\n");
14794 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
14795 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
14796 fprintf (file, "\tmtctr r12\n");
14797 fprintf (file, "\tbctr\n");
14800 machopic_lazy_symbol_ptr_section ();
14801 fprintf (file, "%s:\n", lazy_ptr_name);
14802 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
14803 fprintf (file, "\t.long dyld_stub_binding_helper\n");
14806 /* Legitimize PIC addresses. If the address is already
14807 position-independent, we return ORIG. Newly generated
14808 position-independent addresses go into a reg. This is REG if non
14809 zero, otherwise we allocate register(s) as necessary. */
14811 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
14814 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
14819 if (reg == NULL && ! reload_in_progress && ! reload_completed)
14820 reg = gen_reg_rtx (Pmode);
14822 if (GET_CODE (orig) == CONST)
14824 if (GET_CODE (XEXP (orig, 0)) == PLUS
14825 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
14828 if (GET_CODE (XEXP (orig, 0)) == PLUS)
14830 /* Use a different reg for the intermediate value, as
14831 it will be marked UNCHANGING. */
14832 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
14835 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
14838 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
14844 if (GET_CODE (offset) == CONST_INT)
14846 if (SMALL_INT (offset))
14847 return plus_constant (base, INTVAL (offset));
14848 else if (! reload_in_progress && ! reload_completed)
14849 offset = force_reg (Pmode, offset);
14852 rtx mem = force_const_mem (Pmode, orig);
14853 return machopic_legitimize_pic_address (mem, Pmode, reg);
14856 return gen_rtx (PLUS, Pmode, base, offset);
14859 /* Fall back on generic machopic code. */
14860 return machopic_legitimize_pic_address (orig, mode, reg);
14863 /* This is just a placeholder to make linking work without having to
14864 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
14865 ever needed for Darwin (not too likely!) this would have to get a
14866 real definition. */
14873 #endif /* TARGET_MACHO */
14876 static unsigned int
14877 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
14880 = default_section_type_flags_1 (decl, name, reloc,
14881 flag_pic || DEFAULT_ABI == ABI_AIX);
14883 if (TARGET_RELOCATABLE)
14884 flags |= SECTION_WRITE;
14889 /* Record an element in the table of global constructors. SYMBOL is
14890 a SYMBOL_REF of the function to be called; PRIORITY is a number
14891 between 0 and MAX_INIT_PRIORITY.
14893 This differs from default_named_section_asm_out_constructor in
14894 that we have special handling for -mrelocatable. */
14897 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
14899 const char *section = ".ctors";
14902 if (priority != DEFAULT_INIT_PRIORITY)
14904 sprintf (buf, ".ctors.%.5u",
14905 /* Invert the numbering so the linker puts us in the proper
14906 order; constructors are run from right to left, and the
14907 linker sorts in increasing order. */
14908 MAX_INIT_PRIORITY - priority);
14912 named_section_flags (section, SECTION_WRITE);
14913 assemble_align (POINTER_SIZE);
14915 if (TARGET_RELOCATABLE)
14917 fputs ("\t.long (", asm_out_file);
14918 output_addr_const (asm_out_file, symbol);
14919 fputs (")@fixup\n", asm_out_file);
14922 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14926 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
14928 const char *section = ".dtors";
14931 if (priority != DEFAULT_INIT_PRIORITY)
14933 sprintf (buf, ".dtors.%.5u",
14934 /* Invert the numbering so the linker puts us in the proper
14935 order; constructors are run from right to left, and the
14936 linker sorts in increasing order. */
14937 MAX_INIT_PRIORITY - priority);
14941 named_section_flags (section, SECTION_WRITE);
14942 assemble_align (POINTER_SIZE);
14944 if (TARGET_RELOCATABLE)
14946 fputs ("\t.long (", asm_out_file);
14947 output_addr_const (asm_out_file, symbol);
14948 fputs (")@fixup\n", asm_out_file);
14951 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14955 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
14959 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
14960 ASM_OUTPUT_LABEL (file, name);
14961 fputs (DOUBLE_INT_ASM_OP, file);
14963 assemble_name (file, name);
14964 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
14965 assemble_name (file, name);
14966 fputs (",24\n\t.type\t.", file);
14967 assemble_name (file, name);
14968 fputs (",@function\n", file);
14969 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
14971 fputs ("\t.globl\t.", file);
14972 assemble_name (file, name);
14975 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14977 ASM_OUTPUT_LABEL (file, name);
14981 if (TARGET_RELOCATABLE
14982 && (get_pool_size () != 0 || current_function_profile)
14987 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
14989 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14990 fprintf (file, "\t.long ");
14991 assemble_name (file, buf);
14993 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14994 assemble_name (file, buf);
14998 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
14999 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15001 if (DEFAULT_ABI == ABI_AIX)
15003 const char *desc_name, *orig_name;
15005 orig_name = (*targetm.strip_name_encoding) (name);
15006 desc_name = orig_name;
15007 while (*desc_name == '.')
15010 if (TREE_PUBLIC (decl))
15011 fprintf (file, "\t.globl %s\n", desc_name);
15013 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15014 fprintf (file, "%s:\n", desc_name);
15015 fprintf (file, "\t.long %s\n", orig_name);
15016 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15017 if (DEFAULT_ABI == ABI_AIX)
15018 fputs ("\t.long 0\n", file);
15019 fprintf (file, "\t.previous\n");
15021 ASM_OUTPUT_LABEL (file, name);
15027 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15029 fputs (GLOBAL_ASM_OP, stream);
15030 RS6000_OUTPUT_BASENAME (stream, name);
15031 putc ('\n', stream);
15035 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15038 static const char * const suffix[3] = { "PR", "RO", "RW" };
15040 if (flags & SECTION_CODE)
15042 else if (flags & SECTION_WRITE)
15047 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15048 (flags & SECTION_CODE) ? "." : "",
15049 name, suffix[smclass], flags & SECTION_ENTSIZE);
15053 rs6000_xcoff_select_section (tree decl, int reloc,
15054 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15056 if (decl_readonly_section_1 (decl, reloc, 1))
15058 if (TREE_PUBLIC (decl))
15059 read_only_data_section ();
15061 read_only_private_data_section ();
15065 if (TREE_PUBLIC (decl))
15068 private_data_section ();
15073 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15077 /* Use select_section for private and uninitialized data. */
15078 if (!TREE_PUBLIC (decl)
15079 || DECL_COMMON (decl)
15080 || DECL_INITIAL (decl) == NULL_TREE
15081 || DECL_INITIAL (decl) == error_mark_node
15082 || (flag_zero_initialized_in_bss
15083 && initializer_zerop (DECL_INITIAL (decl))))
15086 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15087 name = (*targetm.strip_name_encoding) (name);
15088 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15091 /* Select section for constant in constant pool.
15093 On RS/6000, all constants are in the private read-only data area.
15094 However, if this is being placed in the TOC it must be output as a
15098 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15099 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15101 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15104 read_only_private_data_section ();
15107 /* Remove any trailing [DS] or the like from the symbol name. */
15109 static const char *
15110 rs6000_xcoff_strip_name_encoding (const char *name)
15115 len = strlen (name);
15116 if (name[len - 1] == ']')
15117 return ggc_alloc_string (name, len - 4);
15122 /* Section attributes. AIX is always PIC. */
15124 static unsigned int
15125 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15127 unsigned int align;
15128 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15130 /* Align to at least UNIT size. */
15131 if (flags & SECTION_CODE)
15132 align = MIN_UNITS_PER_WORD;
15134 /* Increase alignment of large objects if not already stricter. */
15135 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15136 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15137 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15139 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15142 /* Output at beginning of assembler file.
15144 Initialize the section names for the RS/6000 at this point.
15146 Specify filename, including full path, to assembler.
15148 We want to go into the TOC section so at least one .toc will be emitted.
15149 Also, in order to output proper .bs/.es pairs, we need at least one static
15150 [RW] section emitted.
15152 Finally, declare mcount when profiling to make the assembler happy. */
15155 rs6000_xcoff_file_start (void)
15157 rs6000_gen_section_name (&xcoff_bss_section_name,
15158 main_input_filename, ".bss_");
15159 rs6000_gen_section_name (&xcoff_private_data_section_name,
15160 main_input_filename, ".rw_");
15161 rs6000_gen_section_name (&xcoff_read_only_section_name,
15162 main_input_filename, ".ro_");
15164 fputs ("\t.file\t", asm_out_file);
15165 output_quoted_string (asm_out_file, main_input_filename);
15166 fputc ('\n', asm_out_file);
15168 if (write_symbols != NO_DEBUG)
15169 private_data_section ();
15172 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15173 rs6000_file_start ();
15176 /* Output at end of assembler file.
15177 On the RS/6000, referencing data should automatically pull in text. */
15180 rs6000_xcoff_file_end (void)
15183 fputs ("_section_.text:\n", asm_out_file);
15185 fputs (TARGET_32BIT
15186 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15189 #endif /* TARGET_XCOFF */
15192 /* Cross-module name binding. Darwin does not support overriding
15193 functions at dynamic-link time. */
15196 rs6000_binds_local_p (tree decl)
15198 return default_binds_local_p_1 (decl, 0);
15202 /* Compute a (partial) cost for rtx X. Return true if the complete
15203 cost has been computed, and false if subexpressions should be
15204 scanned. In either case, *TOTAL contains the cost result. */
15207 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15212 /* On the RS/6000, if it is valid in the insn, it is free.
15213 So this always returns 0. */
15224 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15225 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15226 + 0x8000) >= 0x10000)
15227 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15228 ? COSTS_N_INSNS (2)
15229 : COSTS_N_INSNS (1));
15235 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15236 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15237 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15238 ? COSTS_N_INSNS (2)
15239 : COSTS_N_INSNS (1));
15245 *total = COSTS_N_INSNS (2);
15248 switch (rs6000_cpu)
15250 case PROCESSOR_RIOS1:
15251 case PROCESSOR_PPC405:
15252 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15253 ? COSTS_N_INSNS (5)
15254 : (INTVAL (XEXP (x, 1)) >= -256
15255 && INTVAL (XEXP (x, 1)) <= 255)
15256 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15259 case PROCESSOR_PPC440:
15260 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15261 ? COSTS_N_INSNS (3)
15262 : COSTS_N_INSNS (2));
15265 case PROCESSOR_RS64A:
15266 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15267 ? GET_MODE (XEXP (x, 1)) != DImode
15268 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15269 : (INTVAL (XEXP (x, 1)) >= -256
15270 && INTVAL (XEXP (x, 1)) <= 255)
15271 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15274 case PROCESSOR_RIOS2:
15275 case PROCESSOR_MPCCORE:
15276 case PROCESSOR_PPC604e:
15277 *total = COSTS_N_INSNS (2);
15280 case PROCESSOR_PPC601:
15281 *total = COSTS_N_INSNS (5);
15284 case PROCESSOR_PPC603:
15285 case PROCESSOR_PPC7400:
15286 case PROCESSOR_PPC750:
15287 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15288 ? COSTS_N_INSNS (5)
15289 : (INTVAL (XEXP (x, 1)) >= -256
15290 && INTVAL (XEXP (x, 1)) <= 255)
15291 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15294 case PROCESSOR_PPC7450:
15295 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15296 ? COSTS_N_INSNS (4)
15297 : COSTS_N_INSNS (3));
15300 case PROCESSOR_PPC403:
15301 case PROCESSOR_PPC604:
15302 case PROCESSOR_PPC8540:
15303 *total = COSTS_N_INSNS (4);
15306 case PROCESSOR_PPC620:
15307 case PROCESSOR_PPC630:
15308 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15309 ? GET_MODE (XEXP (x, 1)) != DImode
15310 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15311 : (INTVAL (XEXP (x, 1)) >= -256
15312 && INTVAL (XEXP (x, 1)) <= 255)
15313 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15316 case PROCESSOR_POWER4:
15317 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15318 ? GET_MODE (XEXP (x, 1)) != DImode
15319 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15320 : COSTS_N_INSNS (2));
15329 if (GET_CODE (XEXP (x, 1)) == CONST_INT
15330 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
15332 *total = COSTS_N_INSNS (2);
15339 switch (rs6000_cpu)
15341 case PROCESSOR_RIOS1:
15342 *total = COSTS_N_INSNS (19);
15345 case PROCESSOR_RIOS2:
15346 *total = COSTS_N_INSNS (13);
15349 case PROCESSOR_RS64A:
15350 *total = (GET_MODE (XEXP (x, 1)) != DImode
15351 ? COSTS_N_INSNS (65)
15352 : COSTS_N_INSNS (67));
15355 case PROCESSOR_MPCCORE:
15356 *total = COSTS_N_INSNS (6);
15359 case PROCESSOR_PPC403:
15360 *total = COSTS_N_INSNS (33);
15363 case PROCESSOR_PPC405:
15364 *total = COSTS_N_INSNS (35);
15367 case PROCESSOR_PPC440:
15368 *total = COSTS_N_INSNS (34);
15371 case PROCESSOR_PPC601:
15372 *total = COSTS_N_INSNS (36);
15375 case PROCESSOR_PPC603:
15376 *total = COSTS_N_INSNS (37);
15379 case PROCESSOR_PPC604:
15380 case PROCESSOR_PPC604e:
15381 *total = COSTS_N_INSNS (20);
15384 case PROCESSOR_PPC620:
15385 case PROCESSOR_PPC630:
15386 *total = (GET_MODE (XEXP (x, 1)) != DImode
15387 ? COSTS_N_INSNS (21)
15388 : COSTS_N_INSNS (37));
15391 case PROCESSOR_PPC750:
15392 case PROCESSOR_PPC8540:
15393 case PROCESSOR_PPC7400:
15394 *total = COSTS_N_INSNS (19);
15397 case PROCESSOR_PPC7450:
15398 *total = COSTS_N_INSNS (23);
15401 case PROCESSOR_POWER4:
15402 *total = (GET_MODE (XEXP (x, 1)) != DImode
15403 ? COSTS_N_INSNS (18)
15404 : COSTS_N_INSNS (34));
15412 *total = COSTS_N_INSNS (4);
15416 /* MEM should be slightly more expensive than (plus (reg) (const)) */
15425 /* A C expression returning the cost of moving data from a register of class
15426 CLASS1 to one of CLASS2. */
15429 rs6000_register_move_cost (enum machine_mode mode,
15430 enum reg_class from, enum reg_class to)
15432 /* Moves from/to GENERAL_REGS. */
15433 if (reg_classes_intersect_p (to, GENERAL_REGS)
15434 || reg_classes_intersect_p (from, GENERAL_REGS))
15436 if (! reg_classes_intersect_p (to, GENERAL_REGS))
15439 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
15440 return (rs6000_memory_move_cost (mode, from, 0)
15441 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
15443 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
15444 else if (from == CR_REGS)
15448 /* A move will cost one instruction per GPR moved. */
15449 return 2 * HARD_REGNO_NREGS (0, mode);
15452 /* Moving between two similar registers is just one instruction. */
15453 else if (reg_classes_intersect_p (to, from))
15454 return mode == TFmode ? 4 : 2;
15456 /* Everything else has to go through GENERAL_REGS. */
15458 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
15459 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
15462 /* A C expressions returning the cost of moving data of MODE from a register to
15466 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
15467 int in ATTRIBUTE_UNUSED)
15469 if (reg_classes_intersect_p (class, GENERAL_REGS))
15470 return 4 * HARD_REGNO_NREGS (0, mode);
15471 else if (reg_classes_intersect_p (class, FLOAT_REGS))
15472 return 4 * HARD_REGNO_NREGS (32, mode);
15473 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
15474 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
15476 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
15479 /* Return an RTX representing where to find the function value of a
15480 function returning MODE. */
15482 rs6000_complex_function_value (enum machine_mode mode)
15484 unsigned int regno;
15486 enum machine_mode inner = GET_MODE_INNER (mode);
15488 if (FLOAT_MODE_P (mode))
15489 regno = FP_ARG_RETURN;
15492 regno = GP_ARG_RETURN;
15494 /* 32-bit is OK since it'll go in r3/r4. */
15496 && GET_MODE_BITSIZE (inner) >= 32)
15497 return gen_rtx_REG (mode, regno);
15500 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
15502 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
15503 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
15504 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
15507 /* Define how to find the value returned by a function.
15508 VALTYPE is the data type of the value (as a tree).
15509 If the precise function being called is known, FUNC is its FUNCTION_DECL;
15510 otherwise, FUNC is 0.
15512 On the SPE, both FPs and vectors are returned in r3.
15514 On RS/6000 an integer value is in r3 and a floating-point value is in
15515 fp1, unless -msoft-float. */
15518 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
15520 enum machine_mode mode;
15521 unsigned int regno;
15523 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
15525 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
15526 return gen_rtx_PARALLEL (DImode,
15528 gen_rtx_EXPR_LIST (VOIDmode,
15529 gen_rtx_REG (SImode, GP_ARG_RETURN),
15531 gen_rtx_EXPR_LIST (VOIDmode,
15532 gen_rtx_REG (SImode,
15533 GP_ARG_RETURN + 1),
15537 if ((INTEGRAL_TYPE_P (valtype)
15538 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
15539 || POINTER_TYPE_P (valtype))
15540 mode = TARGET_32BIT ? SImode : DImode;
15542 mode = TYPE_MODE (valtype);
15544 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
15545 regno = FP_ARG_RETURN;
15546 else if (TREE_CODE (valtype) == COMPLEX_TYPE
15547 && TARGET_HARD_FLOAT
15548 && SPLIT_COMPLEX_ARGS)
15549 return rs6000_complex_function_value (mode);
15550 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
15551 regno = ALTIVEC_ARG_RETURN;
15553 regno = GP_ARG_RETURN;
15555 return gen_rtx_REG (mode, regno);
15558 /* Define how to find the value returned by a library function
15559 assuming the value has mode MODE. */
15561 rs6000_libcall_value (enum machine_mode mode)
15563 unsigned int regno;
15565 if (GET_MODE_CLASS (mode) == MODE_FLOAT
15566 && TARGET_HARD_FLOAT && TARGET_FPRS)
15567 regno = FP_ARG_RETURN;
15568 else if (ALTIVEC_VECTOR_MODE (mode))
15569 regno = ALTIVEC_ARG_RETURN;
15570 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
15571 return rs6000_complex_function_value (mode);
15573 regno = GP_ARG_RETURN;
15575 return gen_rtx_REG (mode, regno);
15578 /* Return true if TYPE is of type __ev64_opaque__. */
15581 is_ev64_opaque_type (tree type)
15584 && (type == opaque_V2SI_type_node
15585 || type == opaque_V2SF_type_node
15586 || type == opaque_p_V2SI_type_node));
15590 rs6000_dwarf_register_span (rtx reg)
15594 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
15597 regno = REGNO (reg);
15599 /* The duality of the SPE register size wreaks all kinds of havoc.
15600 This is a way of distinguishing r0 in 32-bits from r0 in
15603 gen_rtx_PARALLEL (VOIDmode,
15606 gen_rtx_REG (SImode, regno + 1200),
15607 gen_rtx_REG (SImode, regno))
15609 gen_rtx_REG (SImode, regno),
15610 gen_rtx_REG (SImode, regno + 1200)));
15613 #include "gt-rs6000.h"