1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
75 enum processor_type rs6000_cpu;
76 struct rs6000_cpu_select rs6000_select[3] =
78 /* switch name, tune arch */
79 { (const char *)0, "--with-cpu=", 1, 1 },
80 { (const char *)0, "-mcpu=", 1, 1 },
81 { (const char *)0, "-mtune=", 1, 0 },
84 /* Support adjust_priority scheduler hook
85 and -mprioritize-restricted-insns= option. */
86 const char *rs6000_sched_restricted_insns_priority_str;
87 int rs6000_sched_restricted_insns_priority;
89 /* Support for -msched-costly-dep option. */
90 const char *rs6000_sched_costly_dep_str;
91 enum rs6000_dependence_cost rs6000_sched_costly_dep;
93 /* Support for -minsert-sched-nops option. */
94 const char *rs6000_sched_insert_nops_str;
95 enum rs6000_nop_insertion rs6000_sched_insert_nops;
97 /* Size of long double */
98 const char *rs6000_long_double_size_string;
99 int rs6000_long_double_type_size;
101 /* Whether -mabi=altivec has appeared */
102 int rs6000_altivec_abi;
104 /* Whether VRSAVE instructions should be generated. */
105 int rs6000_altivec_vrsave;
107 /* String from -mvrsave= option. */
108 const char *rs6000_altivec_vrsave_string;
110 /* Nonzero if we want SPE ABI extensions. */
113 /* Whether isel instructions should be generated. */
116 /* Whether SPE simd instructions should be generated. */
119 /* Nonzero if floating point operations are done in the GPRs. */
120 int rs6000_float_gprs = 0;
122 /* String from -mfloat-gprs=. */
123 const char *rs6000_float_gprs_string;
125 /* String from -misel=. */
126 const char *rs6000_isel_string;
128 /* String from -mspe=. */
129 const char *rs6000_spe_string;
131 /* Set to nonzero once AIX common-mode calls have been defined. */
132 static GTY(()) int common_mode_defined;
134 /* Save information from a "cmpxx" operation until the branch or scc is
136 rtx rs6000_compare_op0, rs6000_compare_op1;
137 int rs6000_compare_fp_p;
139 /* Label number of label created for -mrelocatable, to call to so we can
140 get the address of the GOT section */
141 int rs6000_pic_labelno;
144 /* Which abi to adhere to */
145 const char *rs6000_abi_name;
147 /* Semantics of the small data area */
148 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
150 /* Which small data model to use */
151 const char *rs6000_sdata_name = (char *)0;
153 /* Counter for labels which are to be placed in .fixup. */
154 int fixuplabelno = 0;
157 /* Bit size of immediate TLS offsets and string from which it is decoded. */
158 int rs6000_tls_size = 32;
159 const char *rs6000_tls_size_string;
161 /* ABI enumeration available for subtarget to use. */
162 enum rs6000_abi rs6000_current_abi;
164 /* ABI string from -mabi= option. */
165 const char *rs6000_abi_string;
168 const char *rs6000_debug_name;
169 int rs6000_debug_stack; /* debug stack applications */
170 int rs6000_debug_arg; /* debug argument handling */
173 static GTY(()) tree opaque_V2SI_type_node;
174 static GTY(()) tree opaque_V2SF_type_node;
175 static GTY(()) tree opaque_p_V2SI_type_node;
177 const char *rs6000_traceback_name;
179 traceback_default = 0,
185 /* Flag to say the TOC is initialized */
187 char toc_label_name[10];
189 /* Alias set for saves and restores from the rs6000 stack. */
190 static int rs6000_sr_alias_set;
192 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
193 The only place that looks at this is rs6000_set_default_type_attributes;
194 everywhere else should rely on the presence or absence of a longcall
195 attribute on the function declaration. */
196 int rs6000_default_long_calls;
197 const char *rs6000_longcall_switch;
199 /* Control alignment for fields within structures. */
200 /* String from -malign-XXXXX. */
201 const char *rs6000_alignment_string;
202 int rs6000_alignment_flags;
204 struct builtin_description
206 /* mask is not const because we're going to alter it below. This
207 nonsense will go away when we rewrite the -march infrastructure
208 to give us more target flag bits. */
210 const enum insn_code icode;
211 const char *const name;
212 const enum rs6000_builtins code;
215 static bool rs6000_function_ok_for_sibcall (tree, tree);
216 static int num_insns_constant_wide (HOST_WIDE_INT);
217 static void validate_condition_mode (enum rtx_code, enum machine_mode);
218 static rtx rs6000_generate_compare (enum rtx_code);
219 static void rs6000_maybe_dead (rtx);
220 static void rs6000_emit_stack_tie (void);
221 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
222 static rtx spe_synthesize_frame_save (rtx);
223 static bool spe_func_has_64bit_regs_p (void);
224 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
226 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
227 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
228 static unsigned rs6000_hash_constant (rtx);
229 static unsigned toc_hash_function (const void *);
230 static int toc_hash_eq (const void *, const void *);
231 static int constant_pool_expr_1 (rtx, int *, int *);
232 static bool constant_pool_expr_p (rtx);
233 static bool toc_relative_expr_p (rtx);
234 static bool legitimate_small_data_p (enum machine_mode, rtx);
235 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
236 static bool legitimate_indexed_address_p (rtx, int);
237 static bool legitimate_indirect_address_p (rtx, int);
238 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
239 static struct machine_function * rs6000_init_machine_status (void);
240 static bool rs6000_assemble_integer (rtx, unsigned int, int);
241 #ifdef HAVE_GAS_HIDDEN
242 static void rs6000_assemble_visibility (tree, int);
244 static int rs6000_ra_ever_killed (void);
245 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
246 extern const struct attribute_spec rs6000_attribute_table[];
247 static void rs6000_set_default_type_attributes (tree);
248 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
249 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
250 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
252 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
253 static bool rs6000_return_in_memory (tree, tree);
254 static void rs6000_file_start (void);
256 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
257 static void rs6000_elf_asm_out_constructor (rtx, int);
258 static void rs6000_elf_asm_out_destructor (rtx, int);
259 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
260 static void rs6000_elf_unique_section (tree, int);
261 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
262 unsigned HOST_WIDE_INT);
263 static void rs6000_elf_encode_section_info (tree, rtx, int)
265 static bool rs6000_elf_in_small_data_p (tree);
268 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
269 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
270 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
271 static void rs6000_xcoff_unique_section (tree, int);
272 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
273 unsigned HOST_WIDE_INT);
274 static const char * rs6000_xcoff_strip_name_encoding (const char *);
275 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
276 static void rs6000_xcoff_file_start (void);
277 static void rs6000_xcoff_file_end (void);
280 static bool rs6000_binds_local_p (tree);
282 static int rs6000_use_dfa_pipeline_interface (void);
283 static int rs6000_variable_issue (FILE *, int, rtx, int);
284 static bool rs6000_rtx_costs (rtx, int, int, int *);
285 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
286 static bool is_microcoded_insn (rtx);
287 static int is_dispatch_slot_restricted (rtx);
288 static bool is_cracked_insn (rtx);
289 static bool is_branch_slot_insn (rtx);
290 static int rs6000_adjust_priority (rtx, int);
291 static int rs6000_issue_rate (void);
292 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
293 static rtx get_next_active_insn (rtx, rtx);
294 static bool insn_terminates_group_p (rtx , enum group_termination);
295 static bool is_costly_group (rtx *, rtx);
296 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
297 static int redefine_groups (FILE *, int, rtx, rtx);
298 static int pad_groups (FILE *, int, rtx, rtx);
299 static void rs6000_sched_finish (FILE *, int);
300 static int rs6000_use_sched_lookahead (void);
302 static void rs6000_init_builtins (void);
303 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
304 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
305 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
306 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
307 static void altivec_init_builtins (void);
308 static void rs6000_common_init_builtins (void);
309 static void rs6000_init_libfuncs (void);
311 static void enable_mask_for_builtins (struct builtin_description *, int,
312 enum rs6000_builtins,
313 enum rs6000_builtins);
314 static void spe_init_builtins (void);
315 static rtx spe_expand_builtin (tree, rtx, bool *);
316 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
317 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
318 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
320 static rtx altivec_expand_builtin (tree, rtx, bool *);
321 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
322 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
323 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
324 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
325 static rtx altivec_expand_predicate_builtin (enum insn_code,
326 const char *, tree, rtx);
327 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
328 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
329 static void rs6000_parse_abi_options (void);
330 static void rs6000_parse_alignment_option (void);
331 static void rs6000_parse_tls_size_option (void);
332 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
333 static int first_altivec_reg_to_save (void);
334 static unsigned int compute_vrsave_mask (void);
335 static void is_altivec_return_reg (rtx, void *);
336 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
337 int easy_vector_constant (rtx, enum machine_mode);
338 static int easy_vector_same (rtx, enum machine_mode);
339 static bool is_ev64_opaque_type (tree);
340 static rtx rs6000_dwarf_register_span (rtx);
341 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
342 static rtx rs6000_tls_get_addr (void);
343 static rtx rs6000_got_sym (void);
344 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
345 static const char *rs6000_get_some_local_dynamic_name (void);
346 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
347 static rtx rs6000_complex_function_value (enum machine_mode);
348 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
349 enum machine_mode, tree);
350 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
351 enum machine_mode, tree, int);
352 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
353 enum machine_mode, tree,
355 static tree rs6000_build_builtin_va_list (void);
357 /* Hash table stuff for keeping track of TOC entries. */
359 struct toc_hash_struct GTY(())
361 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
362 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
364 enum machine_mode key_mode;
368 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
370 /* Default register names. */
371 char rs6000_reg_names[][8] =
373 "0", "1", "2", "3", "4", "5", "6", "7",
374 "8", "9", "10", "11", "12", "13", "14", "15",
375 "16", "17", "18", "19", "20", "21", "22", "23",
376 "24", "25", "26", "27", "28", "29", "30", "31",
377 "0", "1", "2", "3", "4", "5", "6", "7",
378 "8", "9", "10", "11", "12", "13", "14", "15",
379 "16", "17", "18", "19", "20", "21", "22", "23",
380 "24", "25", "26", "27", "28", "29", "30", "31",
381 "mq", "lr", "ctr","ap",
382 "0", "1", "2", "3", "4", "5", "6", "7",
384 /* AltiVec registers. */
385 "0", "1", "2", "3", "4", "5", "6", "7",
386 "8", "9", "10", "11", "12", "13", "14", "15",
387 "16", "17", "18", "19", "20", "21", "22", "23",
388 "24", "25", "26", "27", "28", "29", "30", "31",
394 #ifdef TARGET_REGNAMES
395 static const char alt_reg_names[][8] =
397 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
398 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
399 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
400 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
401 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
402 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
403 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
404 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
405 "mq", "lr", "ctr", "ap",
406 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
408 /* AltiVec registers. */
409 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
410 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
411 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
412 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
419 #ifndef MASK_STRICT_ALIGN
420 #define MASK_STRICT_ALIGN 0
422 #ifndef TARGET_PROFILE_KERNEL
423 #define TARGET_PROFILE_KERNEL 0
426 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
427 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
429 /* Return 1 for a symbol ref for a thread-local storage symbol. */
430 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
431 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
433 /* Initialize the GCC target structure. */
434 #undef TARGET_ATTRIBUTE_TABLE
435 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
436 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
437 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
439 #undef TARGET_ASM_ALIGNED_DI_OP
440 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
442 /* Default unaligned ops are only provided for ELF. Find the ops needed
443 for non-ELF systems. */
444 #ifndef OBJECT_FORMAT_ELF
446 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
448 #undef TARGET_ASM_UNALIGNED_HI_OP
449 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
450 #undef TARGET_ASM_UNALIGNED_SI_OP
451 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
452 #undef TARGET_ASM_UNALIGNED_DI_OP
453 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
456 #undef TARGET_ASM_UNALIGNED_HI_OP
457 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
458 #undef TARGET_ASM_UNALIGNED_SI_OP
459 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
463 /* This hook deals with fixups for relocatable code and DI-mode objects
465 #undef TARGET_ASM_INTEGER
466 #define TARGET_ASM_INTEGER rs6000_assemble_integer
468 #ifdef HAVE_GAS_HIDDEN
469 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
470 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
473 #undef TARGET_HAVE_TLS
474 #define TARGET_HAVE_TLS HAVE_AS_TLS
476 #undef TARGET_CANNOT_FORCE_CONST_MEM
477 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
479 #undef TARGET_ASM_FUNCTION_PROLOGUE
480 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
481 #undef TARGET_ASM_FUNCTION_EPILOGUE
482 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
484 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
485 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
486 #undef TARGET_SCHED_VARIABLE_ISSUE
487 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
489 #undef TARGET_SCHED_ISSUE_RATE
490 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
491 #undef TARGET_SCHED_ADJUST_COST
492 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
493 #undef TARGET_SCHED_ADJUST_PRIORITY
494 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
495 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
496 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
497 #undef TARGET_SCHED_FINISH
498 #define TARGET_SCHED_FINISH rs6000_sched_finish
500 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
501 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
503 #undef TARGET_INIT_BUILTINS
504 #define TARGET_INIT_BUILTINS rs6000_init_builtins
506 #undef TARGET_EXPAND_BUILTIN
507 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
509 #undef TARGET_INIT_LIBFUNCS
510 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
513 #undef TARGET_BINDS_LOCAL_P
514 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
517 #undef TARGET_ASM_OUTPUT_MI_THUNK
518 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
520 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
521 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
523 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
524 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
526 #undef TARGET_RTX_COSTS
527 #define TARGET_RTX_COSTS rs6000_rtx_costs
528 #undef TARGET_ADDRESS_COST
529 #define TARGET_ADDRESS_COST hook_int_rtx_0
531 #undef TARGET_VECTOR_OPAQUE_P
532 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
534 #undef TARGET_DWARF_REGISTER_SPAN
535 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
537 /* On rs6000, function arguments are promoted, as are function return
539 #undef TARGET_PROMOTE_FUNCTION_ARGS
540 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
541 #undef TARGET_PROMOTE_FUNCTION_RETURN
542 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
544 /* Structure return values are passed as an extra parameter. */
545 #undef TARGET_STRUCT_VALUE_RTX
546 #define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
548 #undef TARGET_RETURN_IN_MEMORY
549 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
551 #undef TARGET_SETUP_INCOMING_VARARGS
552 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
554 /* Always strict argument naming on rs6000. */
555 #undef TARGET_STRICT_ARGUMENT_NAMING
556 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
557 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
558 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
560 #undef TARGET_BUILD_BUILTIN_VA_LIST
561 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
563 struct gcc_target targetm = TARGET_INITIALIZER;
565 /* Override command line options. Mostly we process the processor
566 type and sometimes adjust other TARGET_ options. */
569 rs6000_override_options (const char *default_cpu)
572 struct rs6000_cpu_select *ptr;
574 /* Simplify the entries below by making a mask for any POWER
575 variant and any PowerPC variant. */
577 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
578 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
579 | MASK_PPC_GFXOPT | MASK_POWERPC64)
580 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
584 const char *const name; /* Canonical processor name. */
585 const enum processor_type processor; /* Processor type enum value. */
586 const int target_enable; /* Target flags to enable. */
587 const int target_disable; /* Target flags to disable. */
588 } const processor_target_table[]
589 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
590 POWER_MASKS | POWERPC_MASKS},
591 {"power", PROCESSOR_POWER,
592 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
593 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
594 {"power2", PROCESSOR_POWER,
595 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
596 POWERPC_MASKS | MASK_NEW_MNEMONICS},
597 {"power3", PROCESSOR_PPC630,
598 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
600 {"power4", PROCESSOR_POWER4,
601 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
603 {"powerpc", PROCESSOR_POWERPC,
604 MASK_POWERPC | MASK_NEW_MNEMONICS,
605 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
606 {"powerpc64", PROCESSOR_POWERPC64,
607 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
608 POWER_MASKS | POWERPC_OPT_MASKS},
609 {"rios", PROCESSOR_RIOS1,
610 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
611 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
612 {"rios1", PROCESSOR_RIOS1,
613 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
614 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
615 {"rsc", PROCESSOR_PPC601,
616 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
617 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
618 {"rsc1", PROCESSOR_PPC601,
619 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
620 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
621 {"rios2", PROCESSOR_RIOS2,
622 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
623 POWERPC_MASKS | MASK_NEW_MNEMONICS},
624 {"rs64a", PROCESSOR_RS64A,
625 MASK_POWERPC | MASK_NEW_MNEMONICS,
626 POWER_MASKS | POWERPC_OPT_MASKS},
627 {"401", PROCESSOR_PPC403,
628 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
629 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
630 {"403", PROCESSOR_PPC403,
631 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
632 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
633 {"405", PROCESSOR_PPC405,
634 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
635 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
636 {"405fp", PROCESSOR_PPC405,
637 MASK_POWERPC | MASK_NEW_MNEMONICS,
638 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
639 {"440", PROCESSOR_PPC440,
640 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
641 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
642 {"440fp", PROCESSOR_PPC440,
643 MASK_POWERPC | MASK_NEW_MNEMONICS,
644 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
645 {"505", PROCESSOR_MPCCORE,
646 MASK_POWERPC | MASK_NEW_MNEMONICS,
647 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
648 {"601", PROCESSOR_PPC601,
649 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
650 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
651 {"602", PROCESSOR_PPC603,
652 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
653 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
654 {"603", PROCESSOR_PPC603,
655 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
656 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
657 {"603e", PROCESSOR_PPC603,
658 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
659 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
660 {"ec603e", PROCESSOR_PPC603,
661 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
662 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
663 {"604", PROCESSOR_PPC604,
664 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
665 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
666 {"604e", PROCESSOR_PPC604e,
667 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
668 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
669 {"620", PROCESSOR_PPC620,
670 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
672 {"630", PROCESSOR_PPC630,
673 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
675 {"740", PROCESSOR_PPC750,
676 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
677 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
678 {"750", PROCESSOR_PPC750,
679 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
680 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
681 {"G3", PROCESSOR_PPC750,
682 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
683 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
684 {"7400", PROCESSOR_PPC7400,
685 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
686 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
687 {"7450", PROCESSOR_PPC7450,
688 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
689 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
690 {"G4", PROCESSOR_PPC7450,
691 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
692 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
693 {"8540", PROCESSOR_PPC8540,
694 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
695 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
696 {"801", PROCESSOR_MPCCORE,
697 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
698 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
699 {"821", PROCESSOR_MPCCORE,
700 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
701 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
702 {"823", PROCESSOR_MPCCORE,
703 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
704 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
705 {"860", PROCESSOR_MPCCORE,
706 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
707 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
708 {"970", PROCESSOR_POWER4,
709 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
711 {"G5", PROCESSOR_POWER4,
712 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
715 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
717 /* Save current -mmultiple/-mno-multiple status. */
718 int multiple = TARGET_MULTIPLE;
719 /* Save current -mstring/-mno-string status. */
720 int string = TARGET_STRING;
722 /* Identify the processor type. */
723 rs6000_select[0].string = default_cpu;
724 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
726 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
728 ptr = &rs6000_select[i];
729 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
731 for (j = 0; j < ptt_size; j++)
732 if (! strcmp (ptr->string, processor_target_table[j].name))
735 rs6000_cpu = processor_target_table[j].processor;
739 target_flags |= processor_target_table[j].target_enable;
740 target_flags &= ~processor_target_table[j].target_disable;
746 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
753 /* If we are optimizing big endian systems for space, use the load/store
754 multiple and string instructions. */
755 if (BYTES_BIG_ENDIAN && optimize_size)
756 target_flags |= MASK_MULTIPLE | MASK_STRING;
758 /* If -mmultiple or -mno-multiple was explicitly used, don't
759 override with the processor default */
760 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
761 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
763 /* If -mstring or -mno-string was explicitly used, don't override
764 with the processor default. */
765 if ((target_flags_explicit & MASK_STRING) != 0)
766 target_flags = (target_flags & ~MASK_STRING) | string;
768 /* Don't allow -mmultiple or -mstring on little endian systems
769 unless the cpu is a 750, because the hardware doesn't support the
770 instructions used in little endian mode, and causes an alignment
771 trap. The 750 does not cause an alignment trap (except when the
772 target is unaligned). */
774 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
778 target_flags &= ~MASK_MULTIPLE;
779 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
780 warning ("-mmultiple is not supported on little endian systems");
785 target_flags &= ~MASK_STRING;
786 if ((target_flags_explicit & MASK_STRING) != 0)
787 warning ("-mstring is not supported on little endian systems");
791 /* Set debug flags */
792 if (rs6000_debug_name)
794 if (! strcmp (rs6000_debug_name, "all"))
795 rs6000_debug_stack = rs6000_debug_arg = 1;
796 else if (! strcmp (rs6000_debug_name, "stack"))
797 rs6000_debug_stack = 1;
798 else if (! strcmp (rs6000_debug_name, "arg"))
799 rs6000_debug_arg = 1;
801 error ("unknown -mdebug-%s switch", rs6000_debug_name);
804 if (rs6000_traceback_name)
806 if (! strncmp (rs6000_traceback_name, "full", 4))
807 rs6000_traceback = traceback_full;
808 else if (! strncmp (rs6000_traceback_name, "part", 4))
809 rs6000_traceback = traceback_part;
810 else if (! strncmp (rs6000_traceback_name, "no", 2))
811 rs6000_traceback = traceback_none;
813 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
814 rs6000_traceback_name);
817 /* Set size of long double */
818 rs6000_long_double_type_size = 64;
819 if (rs6000_long_double_size_string)
822 int size = strtol (rs6000_long_double_size_string, &tail, 10);
823 if (*tail != '\0' || (size != 64 && size != 128))
824 error ("Unknown switch -mlong-double-%s",
825 rs6000_long_double_size_string);
827 rs6000_long_double_type_size = size;
830 /* Handle -mabi= options. */
831 rs6000_parse_abi_options ();
833 /* Handle -malign-XXXXX option. */
834 rs6000_parse_alignment_option ();
836 /* Handle generic -mFOO=YES/NO options. */
837 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
838 &rs6000_altivec_vrsave);
839 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
841 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
842 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
845 /* Handle -mtls-size option. */
846 rs6000_parse_tls_size_option ();
848 #ifdef SUBTARGET_OVERRIDE_OPTIONS
849 SUBTARGET_OVERRIDE_OPTIONS;
851 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
852 SUBSUBTARGET_OVERRIDE_OPTIONS;
857 /* The e500 does not have string instructions, and we set
858 MASK_STRING above when optimizing for size. */
859 if ((target_flags & MASK_STRING) != 0)
860 target_flags = target_flags & ~MASK_STRING;
862 /* No SPE means 64-bit long doubles, even if an E500. */
863 if (rs6000_spe_string != 0
864 && !strcmp (rs6000_spe_string, "no"))
865 rs6000_long_double_type_size = 64;
867 else if (rs6000_select[1].string != NULL)
869 /* For the powerpc-eabispe configuration, we set all these by
870 default, so let's unset them if we manually set another
871 CPU that is not the E500. */
872 if (rs6000_abi_string == 0)
874 if (rs6000_spe_string == 0)
876 if (rs6000_float_gprs_string == 0)
877 rs6000_float_gprs = 0;
878 if (rs6000_isel_string == 0)
880 if (rs6000_long_double_size_string == 0)
881 rs6000_long_double_type_size = 64;
884 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
885 using TARGET_OPTIONS to handle a toggle switch, but we're out of
886 bits in target_flags so TARGET_SWITCHES cannot be used.
887 Assumption here is that rs6000_longcall_switch points into the
888 text of the complete option, rather than being a copy, so we can
889 scan back for the presence or absence of the no- modifier. */
890 if (rs6000_longcall_switch)
892 const char *base = rs6000_longcall_switch;
893 while (base[-1] != 'm') base--;
895 if (*rs6000_longcall_switch != '\0')
896 error ("invalid option `%s'", base);
897 rs6000_default_long_calls = (base[0] != 'n');
900 /* Handle -mprioritize-restricted-insns option. */
901 rs6000_sched_restricted_insns_priority = DEFAULT_RESTRICTED_INSNS_PRIORITY;
902 if (rs6000_sched_restricted_insns_priority_str)
903 rs6000_sched_restricted_insns_priority =
904 atoi (rs6000_sched_restricted_insns_priority_str);
906 /* Handle -msched-costly-dep option. */
907 rs6000_sched_costly_dep = DEFAULT_SCHED_COSTLY_DEP;
908 if (rs6000_sched_costly_dep_str)
910 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
911 rs6000_sched_costly_dep = no_dep_costly;
912 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
913 rs6000_sched_costly_dep = all_deps_costly;
914 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
915 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
916 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
917 rs6000_sched_costly_dep = store_to_load_dep_costly;
919 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
922 /* Handle -minsert-sched-nops option. */
923 rs6000_sched_insert_nops = DEFAULT_SCHED_FINISH_NOP_INSERTION_SCHEME;
924 if (rs6000_sched_insert_nops_str)
926 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
927 rs6000_sched_insert_nops = sched_finish_none;
928 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
929 rs6000_sched_insert_nops = sched_finish_pad_groups;
930 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
931 rs6000_sched_insert_nops = sched_finish_regroup_exact;
933 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
936 #ifdef TARGET_REGNAMES
937 /* If the user desires alternate register names, copy in the
938 alternate names now. */
940 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
943 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
944 If -maix-struct-return or -msvr4-struct-return was explicitly
945 used, don't override with the ABI default. */
946 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
948 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
949 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
951 target_flags |= MASK_AIX_STRUCT_RET;
954 if (TARGET_LONG_DOUBLE_128
955 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
956 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
958 /* Allocate an alias set for register saves & restores from stack. */
959 rs6000_sr_alias_set = new_alias_set ();
962 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
964 /* We can only guarantee the availability of DI pseudo-ops when
965 assembling for 64-bit targets. */
968 targetm.asm_out.aligned_op.di = NULL;
969 targetm.asm_out.unaligned_op.di = NULL;
972 /* Set maximum branch target alignment at two instructions, eight bytes. */
973 align_jumps_max_skip = 8;
974 align_loops_max_skip = 8;
976 /* Arrange to save and restore machine status around nested functions. */
977 init_machine_status = rs6000_init_machine_status;
980 /* Handle generic options of the form -mfoo=yes/no.
981 NAME is the option name.
982 VALUE is the option value.
983 FLAG is the pointer to the flag where to store a 1 or 0, depending on
984 whether the option value is 'yes' or 'no' respectively. */
986 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
990 else if (!strcmp (value, "yes"))
992 else if (!strcmp (value, "no"))
995 error ("unknown -m%s= option specified: '%s'", name, value);
998 /* Handle -mabi= options. */
1000 rs6000_parse_abi_options (void)
1002 if (rs6000_abi_string == 0)
1004 else if (! strcmp (rs6000_abi_string, "altivec"))
1005 rs6000_altivec_abi = 1;
1006 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1007 rs6000_altivec_abi = 0;
1008 else if (! strcmp (rs6000_abi_string, "spe"))
1011 if (!TARGET_SPE_ABI)
1012 error ("not configured for ABI: '%s'", rs6000_abi_string);
1015 else if (! strcmp (rs6000_abi_string, "no-spe"))
1018 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1021 /* Handle -malign-XXXXXX options. */
1023 rs6000_parse_alignment_option (void)
1025 if (rs6000_alignment_string == 0)
1027 else if (! strcmp (rs6000_alignment_string, "power"))
1028 rs6000_alignment_flags = MASK_ALIGN_POWER;
1029 else if (! strcmp (rs6000_alignment_string, "natural"))
1030 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1032 error ("unknown -malign-XXXXX option specified: '%s'",
1033 rs6000_alignment_string);
1036 /* Validate and record the size specified with the -mtls-size option. */
1039 rs6000_parse_tls_size_option (void)
1041 if (rs6000_tls_size_string == 0)
1043 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1044 rs6000_tls_size = 16;
1045 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1046 rs6000_tls_size = 32;
1047 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1048 rs6000_tls_size = 64;
1050 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1054 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1058 /* Do anything needed at the start of the asm file. */
1061 rs6000_file_start (void)
1065 const char *start = buffer;
1066 struct rs6000_cpu_select *ptr;
1067 const char *default_cpu = TARGET_CPU_DEFAULT;
1068 FILE *file = asm_out_file;
1070 default_file_start ();
1072 #ifdef TARGET_BI_ARCH
1073 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1077 if (flag_verbose_asm)
1079 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1080 rs6000_select[0].string = default_cpu;
1082 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1084 ptr = &rs6000_select[i];
1085 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1087 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1092 #ifdef USING_ELFOS_H
1093 switch (rs6000_sdata)
1095 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1096 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1097 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1098 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1101 if (rs6000_sdata && g_switch_value)
1103 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1114 /* Return nonzero if this function is known to have a null epilogue. */
1117 direct_return (void)
1119 if (reload_completed)
1121 rs6000_stack_t *info = rs6000_stack_info ();
1123 if (info->first_gp_reg_save == 32
1124 && info->first_fp_reg_save == 64
1125 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1126 && ! info->lr_save_p
1127 && ! info->cr_save_p
1128 && info->vrsave_mask == 0
1136 /* Returns 1 always. */
1139 any_operand (rtx op ATTRIBUTE_UNUSED,
1140 enum machine_mode mode ATTRIBUTE_UNUSED)
1145 /* Returns 1 if op is the count register. */
1147 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1149 if (GET_CODE (op) != REG)
1152 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1155 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1161 /* Returns 1 if op is an altivec register. */
1163 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1166 return (register_operand (op, mode)
1167 && (GET_CODE (op) != REG
1168 || REGNO (op) > FIRST_PSEUDO_REGISTER
1169 || ALTIVEC_REGNO_P (REGNO (op))));
1173 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1175 if (GET_CODE (op) != REG)
1178 if (XER_REGNO_P (REGNO (op)))
1184 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1185 by such constants completes more quickly. */
1188 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1190 return ( GET_CODE (op) == CONST_INT
1191 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1194 /* Return 1 if OP is a constant that can fit in a D field. */
1197 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1199 return (GET_CODE (op) == CONST_INT
1200 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1203 /* Similar for an unsigned D field. */
1206 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1208 return (GET_CODE (op) == CONST_INT
1209 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1212 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1215 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1217 return (GET_CODE (op) == CONST_INT
1218 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1221 /* Returns 1 if OP is a CONST_INT that is a positive value
1222 and an exact power of 2. */
1225 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1227 return (GET_CODE (op) == CONST_INT
1229 && exact_log2 (INTVAL (op)) >= 0);
1232 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1236 gpc_reg_operand (rtx op, enum machine_mode mode)
1238 return (register_operand (op, mode)
1239 && (GET_CODE (op) != REG
1240 || (REGNO (op) >= ARG_POINTER_REGNUM
1241 && !XER_REGNO_P (REGNO (op)))
1242 || REGNO (op) < MQ_REGNO));
1245 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1249 cc_reg_operand (rtx op, enum machine_mode mode)
1251 return (register_operand (op, mode)
1252 && (GET_CODE (op) != REG
1253 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1254 || CR_REGNO_P (REGNO (op))));
1257 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1258 CR field that isn't CR0. */
1261 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1263 return (register_operand (op, mode)
1264 && (GET_CODE (op) != REG
1265 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1266 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1269 /* Returns 1 if OP is either a constant integer valid for a D-field or
1270 a non-special register. If a register, it must be in the proper
1271 mode unless MODE is VOIDmode. */
1274 reg_or_short_operand (rtx op, enum machine_mode mode)
1276 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1279 /* Similar, except check if the negation of the constant would be
1280 valid for a D-field. */
1283 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1285 if (GET_CODE (op) == CONST_INT)
1286 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1288 return gpc_reg_operand (op, mode);
1291 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1292 a non-special register. If a register, it must be in the proper
1293 mode unless MODE is VOIDmode. */
1296 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1298 if (gpc_reg_operand (op, mode))
1300 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1307 /* Return 1 if the operand is either a register or an integer whose
1308 high-order 16 bits are zero. */
1311 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1313 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1316 /* Return 1 is the operand is either a non-special register or ANY
1317 constant integer. */
1320 reg_or_cint_operand (rtx op, enum machine_mode mode)
1322 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1325 /* Return 1 is the operand is either a non-special register or ANY
1326 32-bit signed constant integer. */
1329 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1331 return (gpc_reg_operand (op, mode)
1332 || (GET_CODE (op) == CONST_INT
1333 #if HOST_BITS_PER_WIDE_INT != 32
1334 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1335 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1340 /* Return 1 is the operand is either a non-special register or a 32-bit
1341 signed constant integer valid for 64-bit addition. */
1344 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1346 return (gpc_reg_operand (op, mode)
1347 || (GET_CODE (op) == CONST_INT
1348 #if HOST_BITS_PER_WIDE_INT == 32
1349 && INTVAL (op) < 0x7fff8000
1351 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1357 /* Return 1 is the operand is either a non-special register or a 32-bit
1358 signed constant integer valid for 64-bit subtraction. */
1361 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1363 return (gpc_reg_operand (op, mode)
1364 || (GET_CODE (op) == CONST_INT
1365 #if HOST_BITS_PER_WIDE_INT == 32
1366 && (- INTVAL (op)) < 0x7fff8000
1368 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1374 /* Return 1 is the operand is either a non-special register or ANY
1375 32-bit unsigned constant integer. */
1378 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1380 if (GET_CODE (op) == CONST_INT)
1382 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1384 if (GET_MODE_BITSIZE (mode) <= 32)
1387 if (INTVAL (op) < 0)
1391 return ((INTVAL (op) & GET_MODE_MASK (mode)
1392 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1394 else if (GET_CODE (op) == CONST_DOUBLE)
1396 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1400 return CONST_DOUBLE_HIGH (op) == 0;
1403 return gpc_reg_operand (op, mode);
1406 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1409 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1411 return (GET_CODE (op) == SYMBOL_REF
1412 || GET_CODE (op) == CONST
1413 || GET_CODE (op) == LABEL_REF);
1416 /* Return 1 if the operand is a simple references that can be loaded via
1417 the GOT (labels involving addition aren't allowed). */
1420 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1422 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1425 /* Return the number of instructions it takes to form a constant in an
1426 integer register. */
1429 num_insns_constant_wide (HOST_WIDE_INT value)
1431 /* signed constant loadable with {cal|addi} */
1432 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1435 /* constant loadable with {cau|addis} */
1436 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1439 #if HOST_BITS_PER_WIDE_INT == 64
1440 else if (TARGET_POWERPC64)
1442 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1443 HOST_WIDE_INT high = value >> 31;
1445 if (high == 0 || high == -1)
1451 return num_insns_constant_wide (high) + 1;
1453 return (num_insns_constant_wide (high)
1454 + num_insns_constant_wide (low) + 1);
1463 num_insns_constant (rtx op, enum machine_mode mode)
1465 if (GET_CODE (op) == CONST_INT)
1467 #if HOST_BITS_PER_WIDE_INT == 64
1468 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1469 && mask64_operand (op, mode))
1473 return num_insns_constant_wide (INTVAL (op));
1476 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1481 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1482 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1483 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1486 else if (GET_CODE (op) == CONST_DOUBLE)
1492 int endian = (WORDS_BIG_ENDIAN == 0);
1494 if (mode == VOIDmode || mode == DImode)
1496 high = CONST_DOUBLE_HIGH (op);
1497 low = CONST_DOUBLE_LOW (op);
1501 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1502 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1504 low = l[1 - endian];
1508 return (num_insns_constant_wide (low)
1509 + num_insns_constant_wide (high));
1513 if (high == 0 && low >= 0)
1514 return num_insns_constant_wide (low);
1516 else if (high == -1 && low < 0)
1517 return num_insns_constant_wide (low);
1519 else if (mask64_operand (op, mode))
1523 return num_insns_constant_wide (high) + 1;
1526 return (num_insns_constant_wide (high)
1527 + num_insns_constant_wide (low) + 1);
1535 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1536 register with one instruction per word. We only do this if we can
1537 safely read CONST_DOUBLE_{LOW,HIGH}. */
1540 easy_fp_constant (rtx op, enum machine_mode mode)
1542 if (GET_CODE (op) != CONST_DOUBLE
1543 || GET_MODE (op) != mode
1544 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1547 /* Consider all constants with -msoft-float to be easy. */
1548 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1552 /* If we are using V.4 style PIC, consider all constants to be hard. */
1553 if (flag_pic && DEFAULT_ABI == ABI_V4)
1556 #ifdef TARGET_RELOCATABLE
1557 /* Similarly if we are using -mrelocatable, consider all constants
1559 if (TARGET_RELOCATABLE)
1568 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1569 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1571 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1572 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1573 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1574 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1577 else if (mode == DFmode)
1582 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1583 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1585 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1586 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1589 else if (mode == SFmode)
1594 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1595 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1597 return num_insns_constant_wide (l) == 1;
1600 else if (mode == DImode)
1601 return ((TARGET_POWERPC64
1602 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1603 || (num_insns_constant (op, DImode) <= 2));
1605 else if (mode == SImode)
1611 /* Return nonzero if all elements of a vector have the same value. */
1614 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1618 units = CONST_VECTOR_NUNITS (op);
1620 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1621 for (i = 1; i < units; ++i)
1622 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1629 /* Return 1 if the operand is a CONST_INT and can be put into a
1630 register without using memory. */
1633 easy_vector_constant (rtx op, enum machine_mode mode)
1637 if (GET_CODE (op) != CONST_VECTOR
1642 if (zero_constant (op, mode)
1643 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1644 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1647 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1650 if (TARGET_SPE && mode == V1DImode)
1653 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1654 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1656 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1658 evmergelo r0, r0, r0
1661 I don't know how efficient it would be to allow bigger constants,
1662 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1663 instructions is better than a 64-bit memory load, but I don't
1664 have the e500 timing specs. */
1665 if (TARGET_SPE && mode == V2SImode
1666 && cst >= -0x7fff && cst <= 0x7fff
1667 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1670 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1673 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1679 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1682 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1686 if (!easy_vector_constant (op, mode))
1689 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1691 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1695 output_vec_const_move (rtx *operands)
1698 enum machine_mode mode;
1704 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1705 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1706 mode = GET_MODE (dest);
1710 if (zero_constant (vec, mode))
1711 return "vxor %0,%0,%0";
1712 else if (EASY_VECTOR_15 (cst, vec, mode))
1714 operands[1] = GEN_INT (cst);
1718 return "vspltisw %0,%1";
1720 return "vspltish %0,%1";
1722 return "vspltisb %0,%1";
1727 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1735 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1736 pattern of V1DI, V4HI, and V2SF.
1738 FIXME: We should probably return # and add post reload
1739 splitters for these, but this way is so easy ;-).
1741 operands[1] = GEN_INT (cst);
1742 operands[2] = GEN_INT (cst2);
1744 return "li %0,%1\n\tevmergelo %0,%0,%0";
1746 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1752 /* Return 1 if the operand is the constant 0. This works for scalars
1753 as well as vectors. */
1755 zero_constant (rtx op, enum machine_mode mode)
1757 return op == CONST0_RTX (mode);
1760 /* Return 1 if the operand is 0.0. */
1762 zero_fp_constant (rtx op, enum machine_mode mode)
1764 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1767 /* Return 1 if the operand is in volatile memory. Note that during
1768 the RTL generation phase, memory_operand does not return TRUE for
1769 volatile memory references. So this function allows us to
1770 recognize volatile references where its safe. */
1773 volatile_mem_operand (rtx op, enum machine_mode mode)
1775 if (GET_CODE (op) != MEM)
1778 if (!MEM_VOLATILE_P (op))
1781 if (mode != GET_MODE (op))
1784 if (reload_completed)
1785 return memory_operand (op, mode);
1787 if (reload_in_progress)
1788 return strict_memory_address_p (mode, XEXP (op, 0));
1790 return memory_address_p (mode, XEXP (op, 0));
1793 /* Return 1 if the operand is an offsettable memory operand. */
1796 offsettable_mem_operand (rtx op, enum machine_mode mode)
1798 return ((GET_CODE (op) == MEM)
1799 && offsettable_address_p (reload_completed || reload_in_progress,
1800 mode, XEXP (op, 0)));
1803 /* Return 1 if the operand is either an easy FP constant (see above) or
1807 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1809 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1812 /* Return 1 if the operand is either a non-special register or an item
1813 that can be used as the operand of a `mode' add insn. */
1816 add_operand (rtx op, enum machine_mode mode)
1818 if (GET_CODE (op) == CONST_INT)
1819 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1820 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1822 return gpc_reg_operand (op, mode);
1825 /* Return 1 if OP is a constant but not a valid add_operand. */
1828 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1830 return (GET_CODE (op) == CONST_INT
1831 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1832 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1835 /* Return 1 if the operand is a non-special register or a constant that
1836 can be used as the operand of an OR or XOR insn on the RS/6000. */
1839 logical_operand (rtx op, enum machine_mode mode)
1841 HOST_WIDE_INT opl, oph;
1843 if (gpc_reg_operand (op, mode))
1846 if (GET_CODE (op) == CONST_INT)
1848 opl = INTVAL (op) & GET_MODE_MASK (mode);
1850 #if HOST_BITS_PER_WIDE_INT <= 32
1851 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1855 else if (GET_CODE (op) == CONST_DOUBLE)
1857 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1860 opl = CONST_DOUBLE_LOW (op);
1861 oph = CONST_DOUBLE_HIGH (op);
1868 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1869 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1872 /* Return 1 if C is a constant that is not a logical operand (as
1873 above), but could be split into one. */
1876 non_logical_cint_operand (rtx op, enum machine_mode mode)
1878 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1879 && ! logical_operand (op, mode)
1880 && reg_or_logical_cint_operand (op, mode));
1883 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1884 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1885 Reject all ones and all zeros, since these should have been optimized
1886 away and confuse the making of MB and ME. */
1889 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1891 HOST_WIDE_INT c, lsb;
1893 if (GET_CODE (op) != CONST_INT)
1898 /* Fail in 64-bit mode if the mask wraps around because the upper
1899 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1900 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1903 /* We don't change the number of transitions by inverting,
1904 so make sure we start with the LS bit zero. */
1908 /* Reject all zeros or all ones. */
1912 /* Find the first transition. */
1915 /* Invert to look for a second transition. */
1918 /* Erase first transition. */
1921 /* Find the second transition (if any). */
1924 /* Match if all the bits above are 1's (or c is zero). */
1928 /* Return 1 for the PowerPC64 rlwinm corner case. */
1931 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1933 HOST_WIDE_INT c, lsb;
1935 if (GET_CODE (op) != CONST_INT)
1940 if ((c & 0x80000001) != 0x80000001)
1954 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1955 It is if there are no more than one 1->0 or 0->1 transitions.
1956 Reject all zeros, since zero should have been optimized away and
1957 confuses the making of MB and ME. */
1960 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1962 if (GET_CODE (op) == CONST_INT)
1964 HOST_WIDE_INT c, lsb;
1968 /* Reject all zeros. */
1972 /* We don't change the number of transitions by inverting,
1973 so make sure we start with the LS bit zero. */
1977 /* Find the transition, and check that all bits above are 1's. */
1980 /* Match if all the bits above are 1's (or c is zero). */
1986 /* Like mask64_operand, but allow up to three transitions. This
1987 predicate is used by insn patterns that generate two rldicl or
1988 rldicr machine insns. */
1991 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1993 if (GET_CODE (op) == CONST_INT)
1995 HOST_WIDE_INT c, lsb;
1999 /* Disallow all zeros. */
2003 /* We don't change the number of transitions by inverting,
2004 so make sure we start with the LS bit zero. */
2008 /* Find the first transition. */
2011 /* Invert to look for a second transition. */
2014 /* Erase first transition. */
2017 /* Find the second transition. */
2020 /* Invert to look for a third transition. */
2023 /* Erase second transition. */
2026 /* Find the third transition (if any). */
2029 /* Match if all the bits above are 1's (or c is zero). */
2035 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2036 implement ANDing by the mask IN. */
2038 build_mask64_2_operands (rtx in, rtx *out)
2040 #if HOST_BITS_PER_WIDE_INT >= 64
2041 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2044 if (GET_CODE (in) != CONST_INT)
2050 /* Assume c initially something like 0x00fff000000fffff. The idea
2051 is to rotate the word so that the middle ^^^^^^ group of zeros
2052 is at the MS end and can be cleared with an rldicl mask. We then
2053 rotate back and clear off the MS ^^ group of zeros with a
2055 c = ~c; /* c == 0xff000ffffff00000 */
2056 lsb = c & -c; /* lsb == 0x0000000000100000 */
2057 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2058 c = ~c; /* c == 0x00fff000000fffff */
2059 c &= -lsb; /* c == 0x00fff00000000000 */
2060 lsb = c & -c; /* lsb == 0x0000100000000000 */
2061 c = ~c; /* c == 0xff000fffffffffff */
2062 c &= -lsb; /* c == 0xff00000000000000 */
2064 while ((lsb >>= 1) != 0)
2065 shift++; /* shift == 44 on exit from loop */
2066 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2067 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2068 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2072 /* Assume c initially something like 0xff000f0000000000. The idea
2073 is to rotate the word so that the ^^^ middle group of zeros
2074 is at the LS end and can be cleared with an rldicr mask. We then
2075 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2077 lsb = c & -c; /* lsb == 0x0000010000000000 */
2078 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2079 c = ~c; /* c == 0x00fff0ffffffffff */
2080 c &= -lsb; /* c == 0x00fff00000000000 */
2081 lsb = c & -c; /* lsb == 0x0000100000000000 */
2082 c = ~c; /* c == 0xff000fffffffffff */
2083 c &= -lsb; /* c == 0xff00000000000000 */
2085 while ((lsb >>= 1) != 0)
2086 shift++; /* shift == 44 on exit from loop */
2087 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2088 m1 >>= shift; /* m1 == 0x0000000000000fff */
2089 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2092 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2093 masks will be all 1's. We are guaranteed more than one transition. */
2094 out[0] = GEN_INT (64 - shift);
2095 out[1] = GEN_INT (m1);
2096 out[2] = GEN_INT (shift);
2097 out[3] = GEN_INT (m2);
2105 /* Return 1 if the operand is either a non-special register or a constant
2106 that can be used as the operand of a PowerPC64 logical AND insn. */
2109 and64_operand (rtx op, enum machine_mode mode)
2111 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2112 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2114 return (logical_operand (op, mode) || mask64_operand (op, mode));
2117 /* Like the above, but also match constants that can be implemented
2118 with two rldicl or rldicr insns. */
2121 and64_2_operand (rtx op, enum machine_mode mode)
2123 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2124 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2126 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2129 /* Return 1 if the operand is either a non-special register or a
2130 constant that can be used as the operand of an RS/6000 logical AND insn. */
2133 and_operand (rtx op, enum machine_mode mode)
2135 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2136 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2138 return (logical_operand (op, mode) || mask_operand (op, mode));
2141 /* Return 1 if the operand is a general register or memory operand. */
2144 reg_or_mem_operand (rtx op, enum machine_mode mode)
2146 return (gpc_reg_operand (op, mode)
2147 || memory_operand (op, mode)
2148 || volatile_mem_operand (op, mode));
2151 /* Return 1 if the operand is a general register or memory operand without
2152 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2156 lwa_operand (rtx op, enum machine_mode mode)
2160 if (reload_completed && GET_CODE (inner) == SUBREG)
2161 inner = SUBREG_REG (inner);
2163 return gpc_reg_operand (inner, mode)
2164 || (memory_operand (inner, mode)
2165 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2166 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2167 && (GET_CODE (XEXP (inner, 0)) != PLUS
2168 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2169 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2172 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2175 symbol_ref_operand (rtx op, enum machine_mode mode)
2177 if (mode != VOIDmode && GET_MODE (op) != mode)
2180 return (GET_CODE (op) == SYMBOL_REF
2181 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2184 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2185 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2188 call_operand (rtx op, enum machine_mode mode)
2190 if (mode != VOIDmode && GET_MODE (op) != mode)
2193 return (GET_CODE (op) == SYMBOL_REF
2194 || (GET_CODE (op) == REG
2195 && (REGNO (op) == LINK_REGISTER_REGNUM
2196 || REGNO (op) == COUNT_REGISTER_REGNUM
2197 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2200 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2204 current_file_function_operand (rtx op,
2205 enum machine_mode mode ATTRIBUTE_UNUSED)
2207 return (GET_CODE (op) == SYMBOL_REF
2208 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2209 && (SYMBOL_REF_LOCAL_P (op)
2210 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2213 /* Return 1 if this operand is a valid input for a move insn. */
2216 input_operand (rtx op, enum machine_mode mode)
2218 /* Memory is always valid. */
2219 if (memory_operand (op, mode))
2222 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2223 if (GET_CODE (op) == CONSTANT_P_RTX)
2226 /* For floating-point, easy constants are valid. */
2227 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2229 && easy_fp_constant (op, mode))
2232 /* Allow any integer constant. */
2233 if (GET_MODE_CLASS (mode) == MODE_INT
2234 && (GET_CODE (op) == CONST_INT
2235 || GET_CODE (op) == CONST_DOUBLE))
2238 /* Allow easy vector constants. */
2239 if (GET_CODE (op) == CONST_VECTOR
2240 && easy_vector_constant (op, mode))
2243 /* For floating-point or multi-word mode, the only remaining valid type
2245 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2246 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2247 return register_operand (op, mode);
2249 /* The only cases left are integral modes one word or smaller (we
2250 do not get called for MODE_CC values). These can be in any
2252 if (register_operand (op, mode))
2255 /* A SYMBOL_REF referring to the TOC is valid. */
2256 if (legitimate_constant_pool_address_p (op))
2259 /* A constant pool expression (relative to the TOC) is valid */
2260 if (toc_relative_expr_p (op))
2263 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2265 if (DEFAULT_ABI == ABI_V4
2266 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2267 && small_data_operand (op, Pmode))
2273 /* Return 1 for an operand in small memory on V.4/eabi. */
2276 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2277 enum machine_mode mode ATTRIBUTE_UNUSED)
2282 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2285 if (DEFAULT_ABI != ABI_V4)
2288 if (GET_CODE (op) == SYMBOL_REF)
2291 else if (GET_CODE (op) != CONST
2292 || GET_CODE (XEXP (op, 0)) != PLUS
2293 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2294 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2299 rtx sum = XEXP (op, 0);
2300 HOST_WIDE_INT summand;
2302 /* We have to be careful here, because it is the referenced address
2303 that must be 32k from _SDA_BASE_, not just the symbol. */
2304 summand = INTVAL (XEXP (sum, 1));
2305 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2308 sym_ref = XEXP (sum, 0);
2311 return SYMBOL_REF_SMALL_P (sym_ref);
2317 /* Return true if either operand is a general purpose register. */
2320 gpr_or_gpr_p (rtx op0, rtx op1)
2322 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2323 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2327 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2330 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2332 switch (GET_CODE(op))
2335 if (RS6000_SYMBOL_REF_TLS_P (op))
2337 else if (CONSTANT_POOL_ADDRESS_P (op))
2339 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2347 else if (! strcmp (XSTR (op, 0), toc_label_name))
2356 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2357 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2359 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2368 constant_pool_expr_p (rtx op)
2372 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2376 toc_relative_expr_p (rtx op)
2380 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2383 /* SPE offset addressing is limited to 5-bits worth of double words. */
2384 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2387 legitimate_constant_pool_address_p (rtx x)
2390 && GET_CODE (x) == PLUS
2391 && GET_CODE (XEXP (x, 0)) == REG
2392 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2393 && constant_pool_expr_p (XEXP (x, 1)));
2397 legitimate_small_data_p (enum machine_mode mode, rtx x)
2399 return (DEFAULT_ABI == ABI_V4
2400 && !flag_pic && !TARGET_TOC
2401 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2402 && small_data_operand (x, mode));
2406 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2408 unsigned HOST_WIDE_INT offset, extra;
2410 if (GET_CODE (x) != PLUS)
2412 if (GET_CODE (XEXP (x, 0)) != REG)
2414 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2416 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2419 offset = INTVAL (XEXP (x, 1));
2427 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2428 which leaves the only valid constant offset of zero, which by
2429 canonicalization rules is also invalid. */
2436 /* SPE vector modes. */
2437 return SPE_CONST_OFFSET_OK (offset);
2443 else if (offset & 3)
2451 else if (offset & 3)
2461 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2465 legitimate_indexed_address_p (rtx x, int strict)
2469 if (GET_CODE (x) != PLUS)
2474 if (!REG_P (op0) || !REG_P (op1))
2477 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2478 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2479 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2480 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2484 legitimate_indirect_address_p (rtx x, int strict)
2486 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2490 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2492 if (GET_CODE (x) != LO_SUM)
2494 if (GET_CODE (XEXP (x, 0)) != REG)
2496 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2502 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2506 if (GET_MODE_NUNITS (mode) != 1)
2508 if (GET_MODE_BITSIZE (mode) > 32
2509 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2512 return CONSTANT_P (x);
2519 /* Try machine-dependent ways of modifying an illegitimate address
2520 to be legitimate. If we find one, return the new, valid address.
2521 This is used from only one place: `memory_address' in explow.c.
2523 OLDX is the address as it was before break_out_memory_refs was
2524 called. In some cases it is useful to look at this to decide what
2527 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2529 It is always safe for this function to do nothing. It exists to
2530 recognize opportunities to optimize the output.
2532 On RS/6000, first check for the sum of a register with a constant
2533 integer that is out of range. If so, generate code to add the
2534 constant with the low-order 16 bits masked to the register and force
2535 this result into another register (this can be done with `cau').
2536 Then generate an address of REG+(CONST&0xffff), allowing for the
2537 possibility of bit 16 being a one.
2539 Then check for the sum of a register and something not constant, try to
2540 load the other things into a register and return the sum. */
2543 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2544 enum machine_mode mode)
2546 if (GET_CODE (x) == SYMBOL_REF)
2548 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2550 return rs6000_legitimize_tls_address (x, model);
2553 if (GET_CODE (x) == PLUS
2554 && GET_CODE (XEXP (x, 0)) == REG
2555 && GET_CODE (XEXP (x, 1)) == CONST_INT
2556 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2558 HOST_WIDE_INT high_int, low_int;
2560 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2561 high_int = INTVAL (XEXP (x, 1)) - low_int;
2562 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2563 GEN_INT (high_int)), 0);
2564 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2566 else if (GET_CODE (x) == PLUS
2567 && GET_CODE (XEXP (x, 0)) == REG
2568 && GET_CODE (XEXP (x, 1)) != CONST_INT
2569 && GET_MODE_NUNITS (mode) == 1
2570 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2572 || (mode != DFmode && mode != TFmode))
2573 && (TARGET_POWERPC64 || mode != DImode)
2576 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2577 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2579 else if (ALTIVEC_VECTOR_MODE (mode))
2583 /* Make sure both operands are registers. */
2584 if (GET_CODE (x) == PLUS)
2585 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2586 force_reg (Pmode, XEXP (x, 1)));
2588 reg = force_reg (Pmode, x);
2591 else if (SPE_VECTOR_MODE (mode))
2593 /* We accept [reg + reg] and [reg + OFFSET]. */
2595 if (GET_CODE (x) == PLUS)
2597 rtx op1 = XEXP (x, 0);
2598 rtx op2 = XEXP (x, 1);
2600 op1 = force_reg (Pmode, op1);
2602 if (GET_CODE (op2) != REG
2603 && (GET_CODE (op2) != CONST_INT
2604 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2605 op2 = force_reg (Pmode, op2);
2607 return gen_rtx_PLUS (Pmode, op1, op2);
2610 return force_reg (Pmode, x);
2616 && GET_CODE (x) != CONST_INT
2617 && GET_CODE (x) != CONST_DOUBLE
2619 && GET_MODE_NUNITS (mode) == 1
2620 && (GET_MODE_BITSIZE (mode) <= 32
2621 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2623 rtx reg = gen_reg_rtx (Pmode);
2624 emit_insn (gen_elf_high (reg, (x)));
2625 return gen_rtx_LO_SUM (Pmode, reg, (x));
2627 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2630 && ! MACHO_DYNAMIC_NO_PIC_P
2632 && GET_CODE (x) != CONST_INT
2633 && GET_CODE (x) != CONST_DOUBLE
2635 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2639 rtx reg = gen_reg_rtx (Pmode);
2640 emit_insn (gen_macho_high (reg, (x)));
2641 return gen_rtx_LO_SUM (Pmode, reg, (x));
2644 && constant_pool_expr_p (x)
2645 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2647 return create_TOC_reference (x);
2653 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2655 static GTY(()) rtx rs6000_tls_symbol;
2657 rs6000_tls_get_addr (void)
2659 if (!rs6000_tls_symbol)
2660 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2662 return rs6000_tls_symbol;
2665 /* Construct the SYMBOL_REF for TLS GOT references. */
2667 static GTY(()) rtx rs6000_got_symbol;
2669 rs6000_got_sym (void)
2671 if (!rs6000_got_symbol)
2673 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2674 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2675 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2678 return rs6000_got_symbol;
2681 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2682 this (thread-local) address. */
2685 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2689 dest = gen_reg_rtx (Pmode);
2690 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2696 tlsreg = gen_rtx_REG (Pmode, 13);
2697 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2701 tlsreg = gen_rtx_REG (Pmode, 2);
2702 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2706 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2710 tmp = gen_reg_rtx (Pmode);
2713 tlsreg = gen_rtx_REG (Pmode, 13);
2714 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2718 tlsreg = gen_rtx_REG (Pmode, 2);
2719 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2723 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2725 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2730 rtx r3, got, tga, tmp1, tmp2, eqv;
2733 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2737 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2740 rtx gsym = rs6000_got_sym ();
2741 got = gen_reg_rtx (Pmode);
2743 rs6000_emit_move (got, gsym, Pmode);
2747 static int tls_got_labelno = 0;
2748 rtx tempLR, lab, tmp3, mem;
2751 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2752 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2753 tempLR = gen_reg_rtx (Pmode);
2754 tmp1 = gen_reg_rtx (Pmode);
2755 tmp2 = gen_reg_rtx (Pmode);
2756 tmp3 = gen_reg_rtx (Pmode);
2757 mem = gen_rtx_MEM (Pmode, tmp1);
2758 RTX_UNCHANGING_P (mem) = 1;
2760 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2762 emit_move_insn (tmp1, tempLR);
2763 emit_move_insn (tmp2, mem);
2764 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2765 last = emit_move_insn (got, tmp3);
2766 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2768 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2770 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2776 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2778 r3 = gen_rtx_REG (Pmode, 3);
2780 insn = gen_tls_gd_64 (r3, got, addr);
2782 insn = gen_tls_gd_32 (r3, got, addr);
2785 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2786 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2787 insn = emit_call_insn (insn);
2788 CONST_OR_PURE_CALL_P (insn) = 1;
2789 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2790 insn = get_insns ();
2792 emit_libcall_block (insn, dest, r3, addr);
2794 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2796 r3 = gen_rtx_REG (Pmode, 3);
2798 insn = gen_tls_ld_64 (r3, got);
2800 insn = gen_tls_ld_32 (r3, got);
2803 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2804 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2805 insn = emit_call_insn (insn);
2806 CONST_OR_PURE_CALL_P (insn) = 1;
2807 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2808 insn = get_insns ();
2810 tmp1 = gen_reg_rtx (Pmode);
2811 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2813 emit_libcall_block (insn, tmp1, r3, eqv);
2814 if (rs6000_tls_size == 16)
2817 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2819 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2821 else if (rs6000_tls_size == 32)
2823 tmp2 = gen_reg_rtx (Pmode);
2825 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2827 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2830 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2832 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2836 tmp2 = gen_reg_rtx (Pmode);
2838 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2840 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2842 insn = gen_rtx_SET (Pmode, dest,
2843 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2849 /* IE, or 64 bit offset LE. */
2850 tmp2 = gen_reg_rtx (Pmode);
2852 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2854 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2857 insn = gen_tls_tls_64 (dest, tmp2, addr);
2859 insn = gen_tls_tls_32 (dest, tmp2, addr);
2867 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2868 instruction definitions. */
2871 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
2873 return RS6000_SYMBOL_REF_TLS_P (x);
2876 /* Return 1 if X contains a thread-local symbol. */
2879 rs6000_tls_referenced_p (rtx x)
2881 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2884 /* Return 1 if *X is a thread-local symbol. This is the same as
2885 rs6000_tls_symbol_ref except for the type of the unused argument. */
2888 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
2890 return RS6000_SYMBOL_REF_TLS_P (*x);
2893 /* The convention appears to be to define this wherever it is used.
2894 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2895 is now used here. */
2896 #ifndef REG_MODE_OK_FOR_BASE_P
2897 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2900 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2901 replace the input X, or the original X if no replacement is called for.
2902 The output parameter *WIN is 1 if the calling macro should goto WIN,
2905 For RS/6000, we wish to handle large displacements off a base
2906 register by splitting the addend across an addiu/addis and the mem insn.
2907 This cuts number of extra insns needed from 3 to 1.
2909 On Darwin, we use this to generate code for floating point constants.
2910 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2911 The Darwin code is inside #if TARGET_MACHO because only then is
2912 machopic_function_base_name() defined. */
2914 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
2915 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
2917 /* We must recognize output that we have already generated ourselves. */
2918 if (GET_CODE (x) == PLUS
2919 && GET_CODE (XEXP (x, 0)) == PLUS
2920 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2921 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2922 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2924 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2925 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2926 opnum, (enum reload_type)type);
2932 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2933 && GET_CODE (x) == LO_SUM
2934 && GET_CODE (XEXP (x, 0)) == PLUS
2935 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2936 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2937 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2938 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2939 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2940 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2941 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2943 /* Result of previous invocation of this function on Darwin
2944 floating point constant. */
2945 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2946 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2947 opnum, (enum reload_type)type);
2952 if (GET_CODE (x) == PLUS
2953 && GET_CODE (XEXP (x, 0)) == REG
2954 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2955 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2956 && GET_CODE (XEXP (x, 1)) == CONST_INT
2957 && !SPE_VECTOR_MODE (mode)
2958 && !ALTIVEC_VECTOR_MODE (mode))
2960 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2961 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2963 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2965 /* Check for 32-bit overflow. */
2966 if (high + low != val)
2972 /* Reload the high part into a base reg; leave the low part
2973 in the mem directly. */
2975 x = gen_rtx_PLUS (GET_MODE (x),
2976 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2980 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2981 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2982 opnum, (enum reload_type)type);
2987 if (GET_CODE (x) == SYMBOL_REF
2988 && DEFAULT_ABI == ABI_DARWIN
2989 && !ALTIVEC_VECTOR_MODE (mode)
2992 /* Darwin load of floating point constant. */
2993 rtx offset = gen_rtx (CONST, Pmode,
2994 gen_rtx (MINUS, Pmode, x,
2995 gen_rtx (SYMBOL_REF, Pmode,
2996 machopic_function_base_name ())));
2997 x = gen_rtx (LO_SUM, GET_MODE (x),
2998 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2999 gen_rtx (HIGH, Pmode, offset)), offset);
3000 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3001 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3002 opnum, (enum reload_type)type);
3006 if (GET_CODE (x) == SYMBOL_REF
3007 && DEFAULT_ABI == ABI_DARWIN
3008 && !ALTIVEC_VECTOR_MODE (mode)
3009 && MACHO_DYNAMIC_NO_PIC_P)
3011 /* Darwin load of floating point constant. */
3012 x = gen_rtx (LO_SUM, GET_MODE (x),
3013 gen_rtx (HIGH, Pmode, x), x);
3014 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3015 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3016 opnum, (enum reload_type)type);
3022 && constant_pool_expr_p (x)
3023 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3025 (x) = create_TOC_reference (x);
3033 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3034 that is a valid memory address for an instruction.
3035 The MODE argument is the machine mode for the MEM expression
3036 that wants to use this address.
3038 On the RS/6000, there are four valid address: a SYMBOL_REF that
3039 refers to a constant pool entry of an address (or the sum of it
3040 plus a constant), a short (16-bit signed) constant plus a register,
3041 the sum of two registers, or a register indirect, possibly with an
3042 auto-increment. For DFmode and DImode with a constant plus register,
3043 we must ensure that both words are addressable or PowerPC64 with offset
3046 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3047 32-bit DImode, TImode), indexed addressing cannot be used because
3048 adjacent memory cells are accessed by adding word-sized offsets
3049 during assembly output. */
3051 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3053 if (RS6000_SYMBOL_REF_TLS_P (x))
3055 if (legitimate_indirect_address_p (x, reg_ok_strict))
3057 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3058 && !ALTIVEC_VECTOR_MODE (mode)
3059 && !SPE_VECTOR_MODE (mode)
3061 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3063 if (legitimate_small_data_p (mode, x))
3065 if (legitimate_constant_pool_address_p (x))
3067 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3069 && GET_CODE (x) == PLUS
3070 && GET_CODE (XEXP (x, 0)) == REG
3071 && XEXP (x, 0) == virtual_stack_vars_rtx
3072 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3074 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3077 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3079 || (mode != DFmode && mode != TFmode))
3080 && (TARGET_POWERPC64 || mode != DImode)
3081 && legitimate_indexed_address_p (x, reg_ok_strict))
3083 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3088 /* Go to LABEL if ADDR (a legitimate address expression)
3089 has an effect that depends on the machine mode it is used for.
3091 On the RS/6000 this is true of all integral offsets (since AltiVec
3092 modes don't allow them) or is a pre-increment or decrement.
3094 ??? Except that due to conceptual problems in offsettable_address_p
3095 we can't really report the problems of integral offsets. So leave
3096 this assuming that the adjustable offset must be valid for the
3097 sub-words of a TFmode operand, which is what we had before. */
3100 rs6000_mode_dependent_address (rtx addr)
3102 switch (GET_CODE (addr))
3105 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3107 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3108 return val + 12 + 0x8000 >= 0x10000;
3117 return TARGET_UPDATE;
3126 /* Try to output insns to set TARGET equal to the constant C if it can
3127 be done in less than N insns. Do all computations in MODE.
3128 Returns the place where the output has been placed if it can be
3129 done and the insns have been emitted. If it would take more than N
3130 insns, zero is returned and no insns and emitted. */
3133 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3134 rtx source, int n ATTRIBUTE_UNUSED)
3136 rtx result, insn, set;
3137 HOST_WIDE_INT c0, c1;
3139 if (mode == QImode || mode == HImode)
3142 dest = gen_reg_rtx (mode);
3143 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3146 else if (mode == SImode)
3148 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3150 emit_insn (gen_rtx_SET (VOIDmode, result,
3151 GEN_INT (INTVAL (source)
3152 & (~ (HOST_WIDE_INT) 0xffff))));
3153 emit_insn (gen_rtx_SET (VOIDmode, dest,
3154 gen_rtx_IOR (SImode, result,
3155 GEN_INT (INTVAL (source) & 0xffff))));
3158 else if (mode == DImode)
3160 if (GET_CODE (source) == CONST_INT)
3162 c0 = INTVAL (source);
3165 else if (GET_CODE (source) == CONST_DOUBLE)
3167 #if HOST_BITS_PER_WIDE_INT >= 64
3168 c0 = CONST_DOUBLE_LOW (source);
3171 c0 = CONST_DOUBLE_LOW (source);
3172 c1 = CONST_DOUBLE_HIGH (source);
3178 result = rs6000_emit_set_long_const (dest, c0, c1);
3183 insn = get_last_insn ();
3184 set = single_set (insn);
3185 if (! CONSTANT_P (SET_SRC (set)))
3186 set_unique_reg_note (insn, REG_EQUAL, source);
3191 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3192 fall back to a straight forward decomposition. We do this to avoid
3193 exponential run times encountered when looking for longer sequences
3194 with rs6000_emit_set_const. */
3196 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3198 if (!TARGET_POWERPC64)
3200 rtx operand1, operand2;
3202 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3204 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3206 emit_move_insn (operand1, GEN_INT (c1));
3207 emit_move_insn (operand2, GEN_INT (c2));
3211 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3214 ud2 = (c1 & 0xffff0000) >> 16;
3215 #if HOST_BITS_PER_WIDE_INT >= 64
3219 ud4 = (c2 & 0xffff0000) >> 16;
3221 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3222 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3225 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3227 emit_move_insn (dest, GEN_INT (ud1));
3230 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3231 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3234 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3237 emit_move_insn (dest, GEN_INT (ud2 << 16));
3239 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3241 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3242 || (ud4 == 0 && ! (ud3 & 0x8000)))
3245 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3248 emit_move_insn (dest, GEN_INT (ud3 << 16));
3251 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3252 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3254 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3259 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3262 emit_move_insn (dest, GEN_INT (ud4 << 16));
3265 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3267 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3269 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3270 GEN_INT (ud2 << 16)));
3272 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3278 /* Emit a move from SOURCE to DEST in mode MODE. */
3280 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3284 operands[1] = source;
3286 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3287 if (GET_CODE (operands[1]) == CONST_DOUBLE
3288 && ! FLOAT_MODE_P (mode)
3289 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3291 /* FIXME. This should never happen. */
3292 /* Since it seems that it does, do the safe thing and convert
3294 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3296 if (GET_CODE (operands[1]) == CONST_DOUBLE
3297 && ! FLOAT_MODE_P (mode)
3298 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3299 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3300 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3301 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3304 /* Check if GCC is setting up a block move that will end up using FP
3305 registers as temporaries. We must make sure this is acceptable. */
3306 if (GET_CODE (operands[0]) == MEM
3307 && GET_CODE (operands[1]) == MEM
3309 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3310 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3311 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3312 ? 32 : MEM_ALIGN (operands[0])))
3313 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3315 : MEM_ALIGN (operands[1]))))
3316 && ! MEM_VOLATILE_P (operands [0])
3317 && ! MEM_VOLATILE_P (operands [1]))
3319 emit_move_insn (adjust_address (operands[0], SImode, 0),
3320 adjust_address (operands[1], SImode, 0));
3321 emit_move_insn (adjust_address (operands[0], SImode, 4),
3322 adjust_address (operands[1], SImode, 4));
3326 if (!no_new_pseudos)
3328 if (GET_CODE (operands[1]) == MEM && optimize > 0
3329 && (mode == QImode || mode == HImode || mode == SImode)
3330 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3332 rtx reg = gen_reg_rtx (word_mode);
3334 emit_insn (gen_rtx_SET (word_mode, reg,
3335 gen_rtx_ZERO_EXTEND (word_mode,
3337 operands[1] = gen_lowpart (mode, reg);
3339 if (GET_CODE (operands[0]) != REG)
3340 operands[1] = force_reg (mode, operands[1]);
3343 if (mode == SFmode && ! TARGET_POWERPC
3344 && TARGET_HARD_FLOAT && TARGET_FPRS
3345 && GET_CODE (operands[0]) == MEM)
3349 if (reload_in_progress || reload_completed)
3350 regnum = true_regnum (operands[1]);
3351 else if (GET_CODE (operands[1]) == REG)
3352 regnum = REGNO (operands[1]);
3356 /* If operands[1] is a register, on POWER it may have
3357 double-precision data in it, so truncate it to single
3359 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3362 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3363 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3364 operands[1] = newreg;
3368 /* Recognize the case where operand[1] is a reference to thread-local
3369 data and load its address to a register. */
3370 if (GET_CODE (operands[1]) == SYMBOL_REF)
3372 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3374 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3377 /* Handle the case where reload calls us with an invalid address. */
3378 if (reload_in_progress && mode == Pmode
3379 && (! general_operand (operands[1], mode)
3380 || ! nonimmediate_operand (operands[0], mode)))
3383 /* Handle the case of CONSTANT_P_RTX. */
3384 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3387 /* FIXME: In the long term, this switch statement should go away
3388 and be replaced by a sequence of tests based on things like
3394 if (CONSTANT_P (operands[1])
3395 && GET_CODE (operands[1]) != CONST_INT)
3396 operands[1] = force_const_mem (mode, operands[1]);
3402 if (CONSTANT_P (operands[1])
3403 && ! easy_fp_constant (operands[1], mode))
3404 operands[1] = force_const_mem (mode, operands[1]);
3415 if (CONSTANT_P (operands[1])
3416 && !easy_vector_constant (operands[1], mode))
3417 operands[1] = force_const_mem (mode, operands[1]);
3422 /* Use default pattern for address of ELF small data */
3425 && DEFAULT_ABI == ABI_V4
3426 && (GET_CODE (operands[1]) == SYMBOL_REF
3427 || GET_CODE (operands[1]) == CONST)
3428 && small_data_operand (operands[1], mode))
3430 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3434 if (DEFAULT_ABI == ABI_V4
3435 && mode == Pmode && mode == SImode
3436 && flag_pic == 1 && got_operand (operands[1], mode))
3438 emit_insn (gen_movsi_got (operands[0], operands[1]));
3442 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3446 && CONSTANT_P (operands[1])
3447 && GET_CODE (operands[1]) != HIGH
3448 && GET_CODE (operands[1]) != CONST_INT)
3450 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3452 /* If this is a function address on -mcall-aixdesc,
3453 convert it to the address of the descriptor. */
3454 if (DEFAULT_ABI == ABI_AIX
3455 && GET_CODE (operands[1]) == SYMBOL_REF
3456 && XSTR (operands[1], 0)[0] == '.')
3458 const char *name = XSTR (operands[1], 0);
3460 while (*name == '.')
3462 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3463 CONSTANT_POOL_ADDRESS_P (new_ref)
3464 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3465 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3466 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3467 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3468 operands[1] = new_ref;
3471 if (DEFAULT_ABI == ABI_DARWIN)
3474 if (MACHO_DYNAMIC_NO_PIC_P)
3476 /* Take care of any required data indirection. */
3477 operands[1] = rs6000_machopic_legitimize_pic_address (
3478 operands[1], mode, operands[0]);
3479 if (operands[0] != operands[1])
3480 emit_insn (gen_rtx_SET (VOIDmode,
3481 operands[0], operands[1]));
3485 emit_insn (gen_macho_high (target, operands[1]));
3486 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3490 emit_insn (gen_elf_high (target, operands[1]));
3491 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3495 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3496 and we have put it in the TOC, we just need to make a TOC-relative
3499 && GET_CODE (operands[1]) == SYMBOL_REF
3500 && constant_pool_expr_p (operands[1])
3501 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3502 get_pool_mode (operands[1])))
3504 operands[1] = create_TOC_reference (operands[1]);
3506 else if (mode == Pmode
3507 && CONSTANT_P (operands[1])
3508 && ((GET_CODE (operands[1]) != CONST_INT
3509 && ! easy_fp_constant (operands[1], mode))
3510 || (GET_CODE (operands[1]) == CONST_INT
3511 && num_insns_constant (operands[1], mode) > 2)
3512 || (GET_CODE (operands[0]) == REG
3513 && FP_REGNO_P (REGNO (operands[0]))))
3514 && GET_CODE (operands[1]) != HIGH
3515 && ! legitimate_constant_pool_address_p (operands[1])
3516 && ! toc_relative_expr_p (operands[1]))
3518 /* Emit a USE operation so that the constant isn't deleted if
3519 expensive optimizations are turned on because nobody
3520 references it. This should only be done for operands that
3521 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3522 This should not be done for operands that contain LABEL_REFs.
3523 For now, we just handle the obvious case. */
3524 if (GET_CODE (operands[1]) != LABEL_REF)
3525 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3528 /* Darwin uses a special PIC legitimizer. */
3529 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3532 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3534 if (operands[0] != operands[1])
3535 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3540 /* If we are to limit the number of things we put in the TOC and
3541 this is a symbol plus a constant we can add in one insn,
3542 just put the symbol in the TOC and add the constant. Don't do
3543 this if reload is in progress. */
3544 if (GET_CODE (operands[1]) == CONST
3545 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3546 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3547 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3548 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3549 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3550 && ! side_effects_p (operands[0]))
3553 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3554 rtx other = XEXP (XEXP (operands[1], 0), 1);
3556 sym = force_reg (mode, sym);
3558 emit_insn (gen_addsi3 (operands[0], sym, other));
3560 emit_insn (gen_adddi3 (operands[0], sym, other));
3564 operands[1] = force_const_mem (mode, operands[1]);
3567 && constant_pool_expr_p (XEXP (operands[1], 0))
3568 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3569 get_pool_constant (XEXP (operands[1], 0)),
3570 get_pool_mode (XEXP (operands[1], 0))))
3573 = gen_rtx_MEM (mode,
3574 create_TOC_reference (XEXP (operands[1], 0)));
3575 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3576 RTX_UNCHANGING_P (operands[1]) = 1;
3582 if (GET_CODE (operands[0]) == MEM
3583 && GET_CODE (XEXP (operands[0], 0)) != REG
3584 && ! reload_in_progress)
3586 = replace_equiv_address (operands[0],
3587 copy_addr_to_reg (XEXP (operands[0], 0)));
3589 if (GET_CODE (operands[1]) == MEM
3590 && GET_CODE (XEXP (operands[1], 0)) != REG
3591 && ! reload_in_progress)
3593 = replace_equiv_address (operands[1],
3594 copy_addr_to_reg (XEXP (operands[1], 0)));
3597 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3599 gen_rtx_SET (VOIDmode,
3600 operands[0], operands[1]),
3601 gen_rtx_CLOBBER (VOIDmode,
3602 gen_rtx_SCRATCH (SImode)))));
3611 /* Above, we may have called force_const_mem which may have returned
3612 an invalid address. If we can, fix this up; otherwise, reload will
3613 have to deal with it. */
3614 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3615 operands[1] = validize_mem (operands[1]);
3618 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3621 /* Nonzero if we can use a floating-point register to pass this arg. */
3622 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3623 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3624 && (CUM)->fregno <= FP_ARG_MAX_REG \
3625 && TARGET_HARD_FLOAT && TARGET_FPRS)
3627 /* Nonzero if we can use an AltiVec register to pass this arg. */
3628 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3629 (ALTIVEC_VECTOR_MODE (MODE) \
3630 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3631 && TARGET_ALTIVEC_ABI \
3632 && (DEFAULT_ABI == ABI_V4 || (NAMED)))
3634 /* Return a nonzero value to say to return the function value in
3635 memory, just as large structures are always returned. TYPE will be
3636 the data type of the value, and FNTYPE will be the type of the
3637 function doing the returning, or @code{NULL} for libcalls.
3639 The AIX ABI for the RS/6000 specifies that all structures are
3640 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3641 specifies that structures <= 8 bytes are returned in r3/r4, but a
3642 draft put them in memory, and GCC used to implement the draft
3643 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3644 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3645 compatibility can change DRAFT_V4_STRUCT_RET to override the
3646 default, and -m switches get the final word. See
3647 rs6000_override_options for more details.
3649 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3650 long double support is enabled. These values are returned in memory.
3652 int_size_in_bytes returns -1 for variable size objects, which go in
3653 memory always. The cast to unsigned makes -1 > 8. */
3656 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3658 if (AGGREGATE_TYPE_P (type)
3659 && (TARGET_AIX_STRUCT_RET
3660 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3662 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3667 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3668 for a call to a function whose data type is FNTYPE.
3669 For a library call, FNTYPE is 0.
3671 For incoming args we set the number of arguments in the prototype large
3672 so we never return a PARALLEL. */
3675 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3676 rtx libname ATTRIBUTE_UNUSED, int incoming, int libcall)
3678 static CUMULATIVE_ARGS zero_cumulative;
3680 *cum = zero_cumulative;
3682 cum->fregno = FP_ARG_MIN_REG;
3683 cum->vregno = ALTIVEC_ARG_MIN_REG;
3684 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3685 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3686 ? CALL_LIBCALL : CALL_NORMAL);
3687 cum->sysv_gregno = GP_ARG_MIN_REG;
3688 cum->stdarg = fntype
3689 && (TYPE_ARG_TYPES (fntype) != 0
3690 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3691 != void_type_node));
3694 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3696 else if (cum->prototype)
3697 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3698 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3699 || rs6000_return_in_memory (TREE_TYPE (fntype),
3703 cum->nargs_prototype = 0;
3705 /* Check for a longcall attribute. */
3707 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3708 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3709 cum->call_cookie = CALL_LONG;
3711 if (TARGET_DEBUG_ARG)
3713 fprintf (stderr, "\ninit_cumulative_args:");
3716 tree ret_type = TREE_TYPE (fntype);
3717 fprintf (stderr, " ret code = %s,",
3718 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3721 if (cum->call_cookie & CALL_LONG)
3722 fprintf (stderr, " longcall,");
3724 fprintf (stderr, " proto = %d, nargs = %d\n",
3725 cum->prototype, cum->nargs_prototype);
3729 /* If defined, a C expression which determines whether, and in which
3730 direction, to pad out an argument with extra space. The value
3731 should be of type `enum direction': either `upward' to pad above
3732 the argument, `downward' to pad below, or `none' to inhibit
3735 For the AIX ABI structs are always stored left shifted in their
3739 function_arg_padding (enum machine_mode mode, tree type)
3741 #ifndef AGGREGATE_PADDING_FIXED
3742 #define AGGREGATE_PADDING_FIXED 0
3744 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3745 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3748 if (!AGGREGATE_PADDING_FIXED)
3750 /* GCC used to pass structures of the same size as integer types as
3751 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3752 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3753 passed padded downward, except that -mstrict-align further
3754 muddied the water in that multi-component structures of 2 and 4
3755 bytes in size were passed padded upward.
3757 The following arranges for best compatibility with previous
3758 versions of gcc, but removes the -mstrict-align dependency. */
3759 if (BYTES_BIG_ENDIAN)
3761 HOST_WIDE_INT size = 0;
3763 if (mode == BLKmode)
3765 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3766 size = int_size_in_bytes (type);
3769 size = GET_MODE_SIZE (mode);
3771 if (size == 1 || size == 2 || size == 4)
3777 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3779 if (type != 0 && AGGREGATE_TYPE_P (type))
3783 /* Fall back to the default. */
3784 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
3787 /* If defined, a C expression that gives the alignment boundary, in bits,
3788 of an argument with the specified mode and type. If it is not defined,
3789 PARM_BOUNDARY is used for all arguments.
3791 V.4 wants long longs to be double word aligned. */
3794 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
3796 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3798 else if (SPE_VECTOR_MODE (mode))
3800 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3803 return PARM_BOUNDARY;
3806 /* Update the data in CUM to advance over an argument
3807 of mode MODE and data type TYPE.
3808 (TYPE is null for libcalls where that information may not be available.) */
3811 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3812 tree type, int named)
3814 cum->nargs_prototype--;
3816 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3818 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
3821 /* In variable-argument functions, vector arguments get GPRs allocated
3822 even if they are going to be passed in a vector register. */
3823 if (cum->stdarg && DEFAULT_ABI != ABI_V4)
3827 /* Vector parameters must be 16-byte aligned. This places
3828 them at 2 mod 4 in terms of words in 32-bit mode, since
3829 the parameter save area starts at offset 24 from the
3830 stack. In 64-bit mode, they just have to start on an
3831 even word, since the parameter save area is 16-byte
3832 aligned. Space for GPRs is reserved even if the argument
3833 will be passed in memory. */
3835 align = ((6 - (cum->words & 3)) & 3);
3837 align = cum->words & 1;
3838 cum->words += align + RS6000_ARG_SIZE (mode, type);
3840 if (TARGET_DEBUG_ARG)
3842 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
3844 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
3845 cum->nargs_prototype, cum->prototype,
3846 GET_MODE_NAME (mode));
3850 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3852 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3854 else if (DEFAULT_ABI == ABI_V4)
3856 if (TARGET_HARD_FLOAT && TARGET_FPRS
3857 && (mode == SFmode || mode == DFmode))
3859 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3864 cum->words += cum->words & 1;
3865 cum->words += RS6000_ARG_SIZE (mode, type);
3871 int gregno = cum->sysv_gregno;
3873 /* Aggregates and IEEE quad get passed by reference. */
3874 if ((type && AGGREGATE_TYPE_P (type))
3878 n_words = RS6000_ARG_SIZE (mode, type);
3880 /* Long long and SPE vectors are put in odd registers. */
3881 if (n_words == 2 && (gregno & 1) == 0)
3884 /* Long long and SPE vectors are not split between registers
3886 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3888 /* Long long is aligned on the stack. */
3890 cum->words += cum->words & 1;
3891 cum->words += n_words;
3894 /* Note: continuing to accumulate gregno past when we've started
3895 spilling to the stack indicates the fact that we've started
3896 spilling to the stack to expand_builtin_saveregs. */
3897 cum->sysv_gregno = gregno + n_words;
3900 if (TARGET_DEBUG_ARG)
3902 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3903 cum->words, cum->fregno);
3904 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3905 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3906 fprintf (stderr, "mode = %4s, named = %d\n",
3907 GET_MODE_NAME (mode), named);
3912 int align = (TARGET_32BIT && (cum->words & 1) != 0
3913 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3915 cum->words += align + RS6000_ARG_SIZE (mode, type);
3917 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3918 && TARGET_HARD_FLOAT && TARGET_FPRS)
3919 cum->fregno += (mode == TFmode ? 2 : 1);
3921 if (TARGET_DEBUG_ARG)
3923 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3924 cum->words, cum->fregno);
3925 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3926 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3927 fprintf (stderr, "named = %d, align = %d\n", named, align);
3932 /* Determine where to put a SIMD argument on the SPE. */
3935 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3940 int gregno = cum->sysv_gregno;
3941 int n_words = RS6000_ARG_SIZE (mode, type);
3943 /* SPE vectors are put in odd registers. */
3944 if (n_words == 2 && (gregno & 1) == 0)
3947 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3950 enum machine_mode m = SImode;
3952 r1 = gen_rtx_REG (m, gregno);
3953 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3954 r2 = gen_rtx_REG (m, gregno + 1);
3955 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3956 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3963 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3964 return gen_rtx_REG (mode, cum->sysv_gregno);
3970 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
3973 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3974 tree type, int align_words)
3978 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
3979 in vararg list into zero, one or two GPRs */
3980 if (align_words >= GP_ARG_NUM_REG)
3981 return gen_rtx_PARALLEL (DFmode,
3983 gen_rtx_EXPR_LIST (VOIDmode,
3984 NULL_RTX, const0_rtx),
3985 gen_rtx_EXPR_LIST (VOIDmode,
3989 else if (align_words + RS6000_ARG_SIZE (mode, type)
3991 /* If this is partially on the stack, then we only
3992 include the portion actually in registers here. */
3993 return gen_rtx_PARALLEL (DFmode,
3995 gen_rtx_EXPR_LIST (VOIDmode,
3996 gen_rtx_REG (SImode,
4000 gen_rtx_EXPR_LIST (VOIDmode,
4005 /* split a DFmode arg into two GPRs */
4006 return gen_rtx_PARALLEL (DFmode,
4008 gen_rtx_EXPR_LIST (VOIDmode,
4009 gen_rtx_REG (SImode,
4013 gen_rtx_EXPR_LIST (VOIDmode,
4014 gen_rtx_REG (SImode,
4018 gen_rtx_EXPR_LIST (VOIDmode,
4019 gen_rtx_REG (mode, cum->fregno),
4022 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4024 else if (mode == DImode)
4026 if (align_words < GP_ARG_NUM_REG - 1)
4027 return gen_rtx_PARALLEL (DImode,
4029 gen_rtx_EXPR_LIST (VOIDmode,
4030 gen_rtx_REG (SImode,
4034 gen_rtx_EXPR_LIST (VOIDmode,
4035 gen_rtx_REG (SImode,
4039 else if (align_words == GP_ARG_NUM_REG - 1)
4040 return gen_rtx_PARALLEL (DImode,
4042 gen_rtx_EXPR_LIST (VOIDmode,
4043 NULL_RTX, const0_rtx),
4044 gen_rtx_EXPR_LIST (VOIDmode,
4045 gen_rtx_REG (SImode,
4050 else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4053 int size = int_size_in_bytes (type);
4054 int no_units = ((size - 1) / 4) + 1;
4055 int max_no_words = GP_ARG_NUM_REG - align_words;
4056 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4057 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4059 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4061 for (k=0; k < rtlvec_len; k++)
4062 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4063 gen_rtx_REG (SImode,
4066 k == 0 ? const0_rtx : GEN_INT (k*4));
4068 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4074 /* Determine where to put an argument to a function.
4075 Value is zero to push the argument on the stack,
4076 or a hard register in which to store the argument.
4078 MODE is the argument's machine mode.
4079 TYPE is the data type of the argument (as a tree).
4080 This is null for libcalls where that information may
4082 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4083 the preceding args and about the function being called.
4084 NAMED is nonzero if this argument is a named parameter
4085 (otherwise it is an extra parameter matching an ellipsis).
4087 On RS/6000 the first eight words of non-FP are normally in registers
4088 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4089 Under V.4, the first 8 FP args are in registers.
4091 If this is floating-point and no prototype is specified, we use
4092 both an FP and integer register (or possibly FP reg and stack). Library
4093 functions (when CALL_LIBCALL is set) always have the proper types for args,
4094 so we can pass the FP value just in one register. emit_library_function
4095 doesn't support PARALLEL anyway. */
4098 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4099 tree type, int named)
4101 enum rs6000_abi abi = DEFAULT_ABI;
4103 /* Return a marker to indicate whether CR1 needs to set or clear the
4104 bit that V.4 uses to say fp args were passed in registers.
4105 Assume that we don't need the marker for software floating point,
4106 or compiler generated library calls. */
4107 if (mode == VOIDmode)
4110 && cum->nargs_prototype < 0
4111 && (cum->call_cookie & CALL_LIBCALL) == 0
4112 && (cum->prototype || TARGET_NO_PROTOTYPE))
4114 /* For the SPE, we need to crxor CR6 always. */
4116 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4117 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4118 return GEN_INT (cum->call_cookie
4119 | ((cum->fregno == FP_ARG_MIN_REG)
4120 ? CALL_V4_SET_FP_ARGS
4121 : CALL_V4_CLEAR_FP_ARGS));
4124 return GEN_INT (cum->call_cookie);
4127 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4128 return gen_rtx_REG (mode, cum->vregno);
4129 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4131 if (named || abi == ABI_V4)
4135 /* Vector parameters to varargs functions under AIX or Darwin
4136 get passed in memory and possibly also in GPRs. */
4137 int align, align_words;
4138 enum machine_mode part_mode = mode;
4140 /* Vector parameters must be 16-byte aligned. This places them at
4141 2 mod 4 in terms of words in 32-bit mode, since the parameter
4142 save area starts at offset 24 from the stack. In 64-bit mode,
4143 they just have to start on an even word, since the parameter
4144 save area is 16-byte aligned. */
4146 align = ((6 - (cum->words & 3)) & 3);
4148 align = cum->words & 1;
4149 align_words = cum->words + align;
4151 /* Out of registers? Memory, then. */
4152 if (align_words >= GP_ARG_NUM_REG)
4155 /* The vector value goes in GPRs. Only the part of the
4156 value in GPRs is reported here. */
4157 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4159 /* Fortunately, there are only two possibilites, the value
4160 is either wholly in GPRs or half in GPRs and half not. */
4163 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4166 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4167 return rs6000_spe_function_arg (cum, mode, type);
4168 else if (abi == ABI_V4)
4170 if (TARGET_HARD_FLOAT && TARGET_FPRS
4171 && (mode == SFmode || mode == DFmode))
4173 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4174 return gen_rtx_REG (mode, cum->fregno);
4181 int gregno = cum->sysv_gregno;
4183 /* Aggregates and IEEE quad get passed by reference. */
4184 if ((type && AGGREGATE_TYPE_P (type))
4188 n_words = RS6000_ARG_SIZE (mode, type);
4190 /* Long long and SPE vectors are put in odd registers. */
4191 if (n_words == 2 && (gregno & 1) == 0)
4194 /* Long long do not split between registers and stack. */
4195 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4196 return gen_rtx_REG (mode, gregno);
4203 int align = (TARGET_32BIT && (cum->words & 1) != 0
4204 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4205 int align_words = cum->words + align;
4207 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4210 if (TARGET_32BIT && TARGET_POWERPC64
4211 && (mode == DFmode || mode == DImode || mode == BLKmode))
4212 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4214 if (USE_FP_FOR_ARG_P (cum, mode, type))
4217 || ((cum->nargs_prototype > 0)
4218 /* IBM AIX extended its linkage convention definition always
4219 to require FP args after register save area hole on the
4221 && (DEFAULT_ABI != ABI_AIX
4223 || (align_words < GP_ARG_NUM_REG))))
4224 return gen_rtx_REG (mode, cum->fregno);
4226 return gen_rtx_PARALLEL (mode,
4228 gen_rtx_EXPR_LIST (VOIDmode,
4229 ((align_words >= GP_ARG_NUM_REG)
4232 + RS6000_ARG_SIZE (mode, type)
4234 /* If this is partially on the stack, then
4235 we only include the portion actually
4236 in registers here. */
4237 ? gen_rtx_REG (SImode,
4238 GP_ARG_MIN_REG + align_words)
4239 : gen_rtx_REG (mode,
4240 GP_ARG_MIN_REG + align_words))),
4242 gen_rtx_EXPR_LIST (VOIDmode,
4243 gen_rtx_REG (mode, cum->fregno),
4246 else if (align_words < GP_ARG_NUM_REG)
4247 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4253 /* For an arg passed partly in registers and partly in memory,
4254 this is the number of registers used.
4255 For args passed entirely in registers or entirely in memory, zero. */
4258 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4259 tree type, int named)
4261 if (DEFAULT_ABI == ABI_V4)
4264 if (USE_FP_FOR_ARG_P (cum, mode, type)
4265 || USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4267 if (cum->nargs_prototype >= 0)
4271 if (cum->words < GP_ARG_NUM_REG
4272 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4274 int ret = GP_ARG_NUM_REG - cum->words;
4275 if (ret && TARGET_DEBUG_ARG)
4276 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4284 /* A C expression that indicates when an argument must be passed by
4285 reference. If nonzero for an argument, a copy of that argument is
4286 made in memory and a pointer to the argument is passed instead of
4287 the argument itself. The pointer is passed in whatever way is
4288 appropriate for passing a pointer to that type.
4290 Under V.4, structures and unions are passed by reference.
4292 As an extension to all ABIs, variable sized types are passed by
4296 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4297 enum machine_mode mode ATTRIBUTE_UNUSED,
4298 tree type, int named ATTRIBUTE_UNUSED)
4300 if (DEFAULT_ABI == ABI_V4
4301 && ((type && AGGREGATE_TYPE_P (type))
4304 if (TARGET_DEBUG_ARG)
4305 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4309 return type && int_size_in_bytes (type) < 0;
4312 /* Perform any needed actions needed for a function that is receiving a
4313 variable number of arguments.
4317 MODE and TYPE are the mode and type of the current parameter.
4319 PRETEND_SIZE is a variable that should be set to the amount of stack
4320 that must be pushed by the prolog to pretend that our caller pushed
4323 Normally, this macro will push all remaining incoming registers on the
4324 stack and set PRETEND_SIZE to the length of the registers pushed. */
4327 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4328 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4330 CUMULATIVE_ARGS next_cum;
4331 int reg_size = TARGET_32BIT ? 4 : 8;
4332 rtx save_area = NULL_RTX, mem;
4333 int first_reg_offset, set;
4335 /* Skip the last named argument. */
4337 function_arg_advance (&next_cum, mode, type, 1);
4339 if (DEFAULT_ABI == ABI_V4)
4341 /* Indicate to allocate space on the stack for varargs save area. */
4342 cfun->machine->sysv_varargs_p = 1;
4344 save_area = plus_constant (virtual_stack_vars_rtx,
4345 - RS6000_VARARGS_SIZE);
4347 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4351 first_reg_offset = next_cum.words;
4352 save_area = virtual_incoming_args_rtx;
4353 cfun->machine->sysv_varargs_p = 0;
4355 if (MUST_PASS_IN_STACK (mode, type))
4356 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4359 set = get_varargs_alias_set ();
4360 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4362 mem = gen_rtx_MEM (BLKmode,
4363 plus_constant (save_area,
4364 first_reg_offset * reg_size)),
4365 set_mem_alias_set (mem, set);
4366 set_mem_align (mem, BITS_PER_WORD);
4368 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4369 GP_ARG_NUM_REG - first_reg_offset);
4372 /* Save FP registers if needed. */
4373 if (DEFAULT_ABI == ABI_V4
4374 && TARGET_HARD_FLOAT && TARGET_FPRS
4376 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4378 int fregno = next_cum.fregno;
4379 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4380 rtx lab = gen_label_rtx ();
4381 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4383 emit_jump_insn (gen_rtx_SET (VOIDmode,
4385 gen_rtx_IF_THEN_ELSE (VOIDmode,
4386 gen_rtx_NE (VOIDmode, cr1,
4388 gen_rtx_LABEL_REF (VOIDmode, lab),
4391 while (fregno <= FP_ARG_V4_MAX_REG)
4393 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4394 set_mem_alias_set (mem, set);
4395 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4404 /* Create the va_list data type. */
4407 rs6000_build_builtin_va_list (void)
4409 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4411 /* For AIX, prefer 'char *' because that's what the system
4412 header files like. */
4413 if (DEFAULT_ABI != ABI_V4)
4414 return build_pointer_type (char_type_node);
4416 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4417 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4419 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4420 unsigned_char_type_node);
4421 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4422 unsigned_char_type_node);
4423 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4425 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4426 short_unsigned_type_node);
4427 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4429 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4432 DECL_FIELD_CONTEXT (f_gpr) = record;
4433 DECL_FIELD_CONTEXT (f_fpr) = record;
4434 DECL_FIELD_CONTEXT (f_res) = record;
4435 DECL_FIELD_CONTEXT (f_ovf) = record;
4436 DECL_FIELD_CONTEXT (f_sav) = record;
4438 TREE_CHAIN (record) = type_decl;
4439 TYPE_NAME (record) = type_decl;
4440 TYPE_FIELDS (record) = f_gpr;
4441 TREE_CHAIN (f_gpr) = f_fpr;
4442 TREE_CHAIN (f_fpr) = f_res;
4443 TREE_CHAIN (f_res) = f_ovf;
4444 TREE_CHAIN (f_ovf) = f_sav;
4446 layout_type (record);
4448 /* The correct type is an array type of one element. */
4449 return build_array_type (record, build_index_type (size_zero_node));
4452 /* Implement va_start. */
4455 rs6000_va_start (tree valist, rtx nextarg)
4457 HOST_WIDE_INT words, n_gpr, n_fpr;
4458 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4459 tree gpr, fpr, ovf, sav, t;
4461 /* Only SVR4 needs something special. */
4462 if (DEFAULT_ABI != ABI_V4)
4464 std_expand_builtin_va_start (valist, nextarg);
4468 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4469 f_fpr = TREE_CHAIN (f_gpr);
4470 f_res = TREE_CHAIN (f_fpr);
4471 f_ovf = TREE_CHAIN (f_res);
4472 f_sav = TREE_CHAIN (f_ovf);
4474 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4475 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4476 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4477 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4478 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4480 /* Count number of gp and fp argument registers used. */
4481 words = current_function_args_info.words;
4482 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4483 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4485 if (TARGET_DEBUG_ARG)
4486 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4487 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4488 words, n_gpr, n_fpr);
4490 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4491 TREE_SIDE_EFFECTS (t) = 1;
4492 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4494 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4495 TREE_SIDE_EFFECTS (t) = 1;
4496 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4498 /* Find the overflow area. */
4499 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4501 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4502 build_int_2 (words * UNITS_PER_WORD, 0));
4503 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4504 TREE_SIDE_EFFECTS (t) = 1;
4505 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4507 /* Find the register save area. */
4508 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4509 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4510 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4511 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4512 TREE_SIDE_EFFECTS (t) = 1;
4513 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4516 /* Implement va_arg. */
4519 rs6000_va_arg (tree valist, tree type)
4521 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4522 tree gpr, fpr, ovf, sav, reg, t, u;
4523 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4524 rtx lab_false, lab_over, addr_rtx, r;
4526 if (DEFAULT_ABI != ABI_V4)
4528 /* Variable sized types are passed by reference. */
4529 if (int_size_in_bytes (type) < 0)
4531 u = build_pointer_type (type);
4533 /* Args grow upward. */
4534 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4535 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4536 TREE_SIDE_EFFECTS (t) = 1;
4538 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4539 TREE_SIDE_EFFECTS (t) = 1;
4541 t = build1 (INDIRECT_REF, u, t);
4542 TREE_SIDE_EFFECTS (t) = 1;
4544 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4548 /* Altivec arguments must be aligned to a 128-bit boundary. */
4549 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4551 tree vtype = TREE_TYPE (valist);
4552 tree new_valist, modify;
4554 /* Round address up to multiple of 16. Computes
4556 new_valist = fold (build (BIT_AND_EXPR, vtype,
4557 fold (build (PLUS_EXPR, vtype, valist,
4558 build_int_2 (15, 0))),
4559 build_int_2 (~15, -1)));
4561 /* Update valist. */
4562 modify = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
4564 TREE_SIDE_EFFECTS (modify) = 1;
4565 expand_expr (modify, const0_rtx, VOIDmode, EXPAND_NORMAL);
4568 return std_expand_builtin_va_arg (valist, type);
4572 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4573 f_fpr = TREE_CHAIN (f_gpr);
4574 f_res = TREE_CHAIN (f_fpr);
4575 f_ovf = TREE_CHAIN (f_res);
4576 f_sav = TREE_CHAIN (f_ovf);
4578 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4579 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4580 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4581 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4582 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4584 size = int_size_in_bytes (type);
4585 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4587 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4589 /* Aggregates and long doubles are passed by reference. */
4595 size = UNITS_PER_WORD;
4598 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4600 /* FP args go in FP registers, if present. */
4609 /* Otherwise into GP registers. */
4617 /* Pull the value out of the saved registers ... */
4619 lab_false = gen_label_rtx ();
4620 lab_over = gen_label_rtx ();
4621 addr_rtx = gen_reg_rtx (Pmode);
4623 /* AltiVec vectors never go in registers. */
4624 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4626 TREE_THIS_VOLATILE (reg) = 1;
4627 emit_cmp_and_jump_insns
4628 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4629 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4632 /* Long long is aligned in the registers. */
4635 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4636 build_int_2 (n_reg - 1, 0));
4637 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4638 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4639 TREE_SIDE_EFFECTS (u) = 1;
4640 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4644 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4648 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4649 build_int_2 (n_reg, 0));
4650 TREE_SIDE_EFFECTS (u) = 1;
4652 u = build1 (CONVERT_EXPR, integer_type_node, u);
4653 TREE_SIDE_EFFECTS (u) = 1;
4655 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4656 TREE_SIDE_EFFECTS (u) = 1;
4658 t = build (PLUS_EXPR, ptr_type_node, t, u);
4659 TREE_SIDE_EFFECTS (t) = 1;
4661 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4663 emit_move_insn (addr_rtx, r);
4665 emit_jump_insn (gen_jump (lab_over));
4669 emit_label (lab_false);
4671 /* ... otherwise out of the overflow area. */
4673 /* Make sure we don't find reg 7 for the next int arg.
4675 All AltiVec vectors go in the overflow area. So in the AltiVec
4676 case we need to get the vectors from the overflow area, but
4677 remember where the GPRs and FPRs are. */
4678 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4679 || !TARGET_ALTIVEC))
4681 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4682 TREE_SIDE_EFFECTS (t) = 1;
4683 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4686 /* Care for on-stack alignment if needed. */
4693 /* AltiVec vectors are 16 byte aligned. */
4694 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4699 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4700 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4704 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4706 emit_move_insn (addr_rtx, r);
4708 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4709 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4710 TREE_SIDE_EFFECTS (t) = 1;
4711 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4713 emit_label (lab_over);
4717 r = gen_rtx_MEM (Pmode, addr_rtx);
4718 set_mem_alias_set (r, get_varargs_alias_set ());
4719 emit_move_insn (addr_rtx, r);
4727 #define def_builtin(MASK, NAME, TYPE, CODE) \
4729 if ((MASK) & target_flags) \
4730 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4734 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4736 static const struct builtin_description bdesc_3arg[] =
4738 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4739 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4740 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4741 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4742 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4743 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4744 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4745 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4746 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4747 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4748 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4749 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4750 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4751 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4752 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4753 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4754 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4755 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4756 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4757 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4758 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4759 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4760 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4763 /* DST operations: void foo (void *, const int, const char). */
4765 static const struct builtin_description bdesc_dst[] =
4767 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4768 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4769 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4770 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4773 /* Simple binary operations: VECc = foo (VECa, VECb). */
4775 static struct builtin_description bdesc_2arg[] =
4777 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4778 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4779 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4780 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4781 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4782 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4783 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4784 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4785 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4786 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4787 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4788 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4789 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4790 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4791 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4792 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4793 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4794 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4795 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4796 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4797 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4798 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4799 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4800 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4801 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4802 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4803 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4804 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4805 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4806 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4807 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4808 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4809 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4810 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4811 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4812 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4813 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4814 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4815 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4816 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4817 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4818 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4819 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4820 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4821 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4822 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4823 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4824 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4825 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4826 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4827 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4828 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4829 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4830 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4831 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4832 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4833 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4834 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4835 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4836 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4837 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4838 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4839 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4840 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4841 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4842 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4843 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4844 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4845 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4846 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4847 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4848 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4849 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4850 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4851 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4852 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4853 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4854 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4855 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4856 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4857 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4858 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4859 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4860 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4861 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4862 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4863 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4864 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4865 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4866 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4867 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4868 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4869 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4870 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4871 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4872 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4873 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4874 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4875 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4876 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4877 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4878 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4879 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4880 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4881 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4882 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4883 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4884 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4885 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4886 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4887 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4888 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4889 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4891 /* Place holder, leave as first spe builtin. */
4892 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4893 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4894 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4895 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4896 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4897 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4898 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4899 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4900 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4901 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4902 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4903 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4904 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4905 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4906 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4907 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4908 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4909 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4910 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4911 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4912 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4913 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4914 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4915 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4916 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4917 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4918 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4919 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4920 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4921 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4922 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4923 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4924 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4925 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4926 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4927 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4928 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4929 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4930 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4931 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4932 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4933 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4934 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4935 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4936 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4937 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4938 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4939 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4940 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4941 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4942 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4943 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4944 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4945 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4946 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4947 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4948 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4949 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4950 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4951 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4952 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4953 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4954 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4955 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4956 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4957 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4958 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4959 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4960 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4961 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4962 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4963 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4964 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4965 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4966 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4967 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4968 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4969 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4970 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4971 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4972 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4973 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4974 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4975 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4976 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4977 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4978 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4979 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4980 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4981 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4982 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4983 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4984 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4985 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4986 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4987 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4988 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4989 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4990 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4991 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4992 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4993 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4994 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4995 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4996 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4997 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4998 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4999 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5000 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5002 /* SPE binary operations expecting a 5-bit unsigned literal. */
5003 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5005 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5006 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5007 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5008 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5009 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5010 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5011 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5012 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5013 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5014 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5015 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5016 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5017 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5018 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5019 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5020 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5021 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5022 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5023 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5024 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5025 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5026 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5027 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5028 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5029 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5030 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5032 /* Place-holder. Leave as last binary SPE builtin. */
5033 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5036 /* AltiVec predicates. */
5038 struct builtin_description_predicates
5040 const unsigned int mask;
5041 const enum insn_code icode;
5043 const char *const name;
5044 const enum rs6000_builtins code;
5047 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5049 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5050 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5051 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5052 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5053 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5054 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5055 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5056 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5057 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5058 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5059 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5060 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5061 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5064 /* SPE predicates. */
5065 static struct builtin_description bdesc_spe_predicates[] =
5067 /* Place-holder. Leave as first. */
5068 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5069 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5070 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5071 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5072 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5073 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5074 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5075 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5076 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5077 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5078 /* Place-holder. Leave as last. */
5079 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5082 /* SPE evsel predicates. */
5083 static struct builtin_description bdesc_spe_evsel[] =
5085 /* Place-holder. Leave as first. */
5086 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5087 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5088 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5089 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5090 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5091 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5092 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5093 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5094 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5095 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5096 /* Place-holder. Leave as last. */
5097 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5100 /* ABS* operations. */
5102 static const struct builtin_description bdesc_abs[] =
5104 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5105 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5106 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5107 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5108 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5109 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5110 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5113 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5116 static struct builtin_description bdesc_1arg[] =
5118 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5119 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5120 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5121 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5122 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5123 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5124 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5125 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5126 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5127 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5128 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5129 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5130 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5131 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5132 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5133 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5134 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5136 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5137 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5138 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5139 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5140 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5141 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5142 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5143 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5144 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5145 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5146 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5147 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5148 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5149 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5150 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5151 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5152 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5153 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5154 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5155 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5156 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5157 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5158 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5159 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5160 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5161 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5162 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5163 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5164 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5165 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5167 /* Place-holder. Leave as last unary SPE builtin. */
5168 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5172 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5175 tree arg0 = TREE_VALUE (arglist);
5176 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5177 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5178 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5180 if (icode == CODE_FOR_nothing)
5181 /* Builtin not supported on this processor. */
5184 /* If we got invalid arguments bail out before generating bad rtl. */
5185 if (arg0 == error_mark_node)
5188 if (icode == CODE_FOR_altivec_vspltisb
5189 || icode == CODE_FOR_altivec_vspltish
5190 || icode == CODE_FOR_altivec_vspltisw
5191 || icode == CODE_FOR_spe_evsplatfi
5192 || icode == CODE_FOR_spe_evsplati)
5194 /* Only allow 5-bit *signed* literals. */
5195 if (GET_CODE (op0) != CONST_INT
5196 || INTVAL (op0) > 0x1f
5197 || INTVAL (op0) < -0x1f)
5199 error ("argument 1 must be a 5-bit signed literal");
5205 || GET_MODE (target) != tmode
5206 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5207 target = gen_reg_rtx (tmode);
5209 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5210 op0 = copy_to_mode_reg (mode0, op0);
5212 pat = GEN_FCN (icode) (target, op0);
5221 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5223 rtx pat, scratch1, scratch2;
5224 tree arg0 = TREE_VALUE (arglist);
5225 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5226 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5227 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5229 /* If we have invalid arguments, bail out before generating bad rtl. */
5230 if (arg0 == error_mark_node)
5234 || GET_MODE (target) != tmode
5235 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5236 target = gen_reg_rtx (tmode);
5238 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5239 op0 = copy_to_mode_reg (mode0, op0);
5241 scratch1 = gen_reg_rtx (mode0);
5242 scratch2 = gen_reg_rtx (mode0);
5244 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5253 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5256 tree arg0 = TREE_VALUE (arglist);
5257 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5258 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5259 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5260 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5261 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5262 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5264 if (icode == CODE_FOR_nothing)
5265 /* Builtin not supported on this processor. */
5268 /* If we got invalid arguments bail out before generating bad rtl. */
5269 if (arg0 == error_mark_node || arg1 == error_mark_node)
5272 if (icode == CODE_FOR_altivec_vcfux
5273 || icode == CODE_FOR_altivec_vcfsx
5274 || icode == CODE_FOR_altivec_vctsxs
5275 || icode == CODE_FOR_altivec_vctuxs
5276 || icode == CODE_FOR_altivec_vspltb
5277 || icode == CODE_FOR_altivec_vsplth
5278 || icode == CODE_FOR_altivec_vspltw
5279 || icode == CODE_FOR_spe_evaddiw
5280 || icode == CODE_FOR_spe_evldd
5281 || icode == CODE_FOR_spe_evldh
5282 || icode == CODE_FOR_spe_evldw
5283 || icode == CODE_FOR_spe_evlhhesplat
5284 || icode == CODE_FOR_spe_evlhhossplat
5285 || icode == CODE_FOR_spe_evlhhousplat
5286 || icode == CODE_FOR_spe_evlwhe
5287 || icode == CODE_FOR_spe_evlwhos
5288 || icode == CODE_FOR_spe_evlwhou
5289 || icode == CODE_FOR_spe_evlwhsplat
5290 || icode == CODE_FOR_spe_evlwwsplat
5291 || icode == CODE_FOR_spe_evrlwi
5292 || icode == CODE_FOR_spe_evslwi
5293 || icode == CODE_FOR_spe_evsrwis
5294 || icode == CODE_FOR_spe_evsubifw
5295 || icode == CODE_FOR_spe_evsrwiu)
5297 /* Only allow 5-bit unsigned literals. */
5298 if (TREE_CODE (arg1) != INTEGER_CST
5299 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5301 error ("argument 2 must be a 5-bit unsigned literal");
5307 || GET_MODE (target) != tmode
5308 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5309 target = gen_reg_rtx (tmode);
5311 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5312 op0 = copy_to_mode_reg (mode0, op0);
5313 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5314 op1 = copy_to_mode_reg (mode1, op1);
5316 pat = GEN_FCN (icode) (target, op0, op1);
5325 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5326 tree arglist, rtx target)
5329 tree cr6_form = TREE_VALUE (arglist);
5330 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5331 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5332 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5333 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5334 enum machine_mode tmode = SImode;
5335 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5336 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5339 if (TREE_CODE (cr6_form) != INTEGER_CST)
5341 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5345 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5350 /* If we have invalid arguments, bail out before generating bad rtl. */
5351 if (arg0 == error_mark_node || arg1 == error_mark_node)
5355 || GET_MODE (target) != tmode
5356 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5357 target = gen_reg_rtx (tmode);
5359 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5360 op0 = copy_to_mode_reg (mode0, op0);
5361 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5362 op1 = copy_to_mode_reg (mode1, op1);
5364 scratch = gen_reg_rtx (mode0);
5366 pat = GEN_FCN (icode) (scratch, op0, op1,
5367 gen_rtx (SYMBOL_REF, Pmode, opcode));
5372 /* The vec_any* and vec_all* predicates use the same opcodes for two
5373 different operations, but the bits in CR6 will be different
5374 depending on what information we want. So we have to play tricks
5375 with CR6 to get the right bits out.
5377 If you think this is disgusting, look at the specs for the
5378 AltiVec predicates. */
5380 switch (cr6_form_int)
5383 emit_insn (gen_cr6_test_for_zero (target));
5386 emit_insn (gen_cr6_test_for_zero_reverse (target));
5389 emit_insn (gen_cr6_test_for_lt (target));
5392 emit_insn (gen_cr6_test_for_lt_reverse (target));
5395 error ("argument 1 of __builtin_altivec_predicate is out of range");
5403 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5406 tree arg0 = TREE_VALUE (arglist);
5407 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5408 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5409 enum machine_mode mode0 = Pmode;
5410 enum machine_mode mode1 = Pmode;
5411 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5412 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5414 if (icode == CODE_FOR_nothing)
5415 /* Builtin not supported on this processor. */
5418 /* If we got invalid arguments bail out before generating bad rtl. */
5419 if (arg0 == error_mark_node || arg1 == error_mark_node)
5423 || GET_MODE (target) != tmode
5424 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5425 target = gen_reg_rtx (tmode);
5427 op1 = copy_to_mode_reg (mode1, op1);
5429 if (op0 == const0_rtx)
5431 addr = gen_rtx_MEM (tmode, op1);
5435 op0 = copy_to_mode_reg (mode0, op0);
5436 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5439 pat = GEN_FCN (icode) (target, addr);
5449 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5451 tree arg0 = TREE_VALUE (arglist);
5452 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5453 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5454 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5455 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5456 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5458 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5459 enum machine_mode mode1 = Pmode;
5460 enum machine_mode mode2 = Pmode;
5462 /* Invalid arguments. Bail before doing anything stoopid! */
5463 if (arg0 == error_mark_node
5464 || arg1 == error_mark_node
5465 || arg2 == error_mark_node)
5468 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5469 op0 = copy_to_mode_reg (tmode, op0);
5471 op2 = copy_to_mode_reg (mode2, op2);
5473 if (op1 == const0_rtx)
5475 addr = gen_rtx_MEM (tmode, op2);
5479 op1 = copy_to_mode_reg (mode1, op1);
5480 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5483 pat = GEN_FCN (icode) (addr, op0);
5490 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5493 tree arg0 = TREE_VALUE (arglist);
5494 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5495 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5496 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5497 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5498 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5499 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5500 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5501 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5502 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5504 if (icode == CODE_FOR_nothing)
5505 /* Builtin not supported on this processor. */
5508 /* If we got invalid arguments bail out before generating bad rtl. */
5509 if (arg0 == error_mark_node
5510 || arg1 == error_mark_node
5511 || arg2 == error_mark_node)
5514 if (icode == CODE_FOR_altivec_vsldoi_4sf
5515 || icode == CODE_FOR_altivec_vsldoi_4si
5516 || icode == CODE_FOR_altivec_vsldoi_8hi
5517 || icode == CODE_FOR_altivec_vsldoi_16qi)
5519 /* Only allow 4-bit unsigned literals. */
5520 if (TREE_CODE (arg2) != INTEGER_CST
5521 || TREE_INT_CST_LOW (arg2) & ~0xf)
5523 error ("argument 3 must be a 4-bit unsigned literal");
5529 || GET_MODE (target) != tmode
5530 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5531 target = gen_reg_rtx (tmode);
5533 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5534 op0 = copy_to_mode_reg (mode0, op0);
5535 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5536 op1 = copy_to_mode_reg (mode1, op1);
5537 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5538 op2 = copy_to_mode_reg (mode2, op2);
5540 pat = GEN_FCN (icode) (target, op0, op1, op2);
5548 /* Expand the lvx builtins. */
5550 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5552 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5553 tree arglist = TREE_OPERAND (exp, 1);
5554 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5556 enum machine_mode tmode, mode0;
5558 enum insn_code icode;
5562 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5563 icode = CODE_FOR_altivec_lvx_16qi;
5565 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5566 icode = CODE_FOR_altivec_lvx_8hi;
5568 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5569 icode = CODE_FOR_altivec_lvx_4si;
5571 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5572 icode = CODE_FOR_altivec_lvx_4sf;
5581 arg0 = TREE_VALUE (arglist);
5582 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5583 tmode = insn_data[icode].operand[0].mode;
5584 mode0 = insn_data[icode].operand[1].mode;
5587 || GET_MODE (target) != tmode
5588 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5589 target = gen_reg_rtx (tmode);
5591 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5592 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5594 pat = GEN_FCN (icode) (target, op0);
5601 /* Expand the stvx builtins. */
5603 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5606 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5607 tree arglist = TREE_OPERAND (exp, 1);
5608 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5610 enum machine_mode mode0, mode1;
5612 enum insn_code icode;
5616 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5617 icode = CODE_FOR_altivec_stvx_16qi;
5619 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5620 icode = CODE_FOR_altivec_stvx_8hi;
5622 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5623 icode = CODE_FOR_altivec_stvx_4si;
5625 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5626 icode = CODE_FOR_altivec_stvx_4sf;
5633 arg0 = TREE_VALUE (arglist);
5634 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5635 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5636 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5637 mode0 = insn_data[icode].operand[0].mode;
5638 mode1 = insn_data[icode].operand[1].mode;
5640 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5641 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5642 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5643 op1 = copy_to_mode_reg (mode1, op1);
5645 pat = GEN_FCN (icode) (op0, op1);
5653 /* Expand the dst builtins. */
5655 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5658 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5659 tree arglist = TREE_OPERAND (exp, 1);
5660 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5661 tree arg0, arg1, arg2;
5662 enum machine_mode mode0, mode1, mode2;
5663 rtx pat, op0, op1, op2;
5664 struct builtin_description *d;
5669 /* Handle DST variants. */
5670 d = (struct builtin_description *) bdesc_dst;
5671 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5672 if (d->code == fcode)
5674 arg0 = TREE_VALUE (arglist);
5675 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5676 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5677 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5678 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5679 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5680 mode0 = insn_data[d->icode].operand[0].mode;
5681 mode1 = insn_data[d->icode].operand[1].mode;
5682 mode2 = insn_data[d->icode].operand[2].mode;
5684 /* Invalid arguments, bail out before generating bad rtl. */
5685 if (arg0 == error_mark_node
5686 || arg1 == error_mark_node
5687 || arg2 == error_mark_node)
5690 if (TREE_CODE (arg2) != INTEGER_CST
5691 || TREE_INT_CST_LOW (arg2) & ~0x3)
5693 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5697 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5698 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5699 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5700 op1 = copy_to_mode_reg (mode1, op1);
5702 pat = GEN_FCN (d->icode) (op0, op1, op2);
5713 /* Expand the builtin in EXP and store the result in TARGET. Store
5714 true in *EXPANDEDP if we found a builtin to expand. */
5716 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
5718 struct builtin_description *d;
5719 struct builtin_description_predicates *dp;
5721 enum insn_code icode;
5722 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5723 tree arglist = TREE_OPERAND (exp, 1);
5726 enum machine_mode tmode, mode0;
5727 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5729 target = altivec_expand_ld_builtin (exp, target, expandedp);
5733 target = altivec_expand_st_builtin (exp, target, expandedp);
5737 target = altivec_expand_dst_builtin (exp, target, expandedp);
5745 case ALTIVEC_BUILTIN_STVX:
5746 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5747 case ALTIVEC_BUILTIN_STVEBX:
5748 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5749 case ALTIVEC_BUILTIN_STVEHX:
5750 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5751 case ALTIVEC_BUILTIN_STVEWX:
5752 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5753 case ALTIVEC_BUILTIN_STVXL:
5754 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5756 case ALTIVEC_BUILTIN_MFVSCR:
5757 icode = CODE_FOR_altivec_mfvscr;
5758 tmode = insn_data[icode].operand[0].mode;
5761 || GET_MODE (target) != tmode
5762 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5763 target = gen_reg_rtx (tmode);
5765 pat = GEN_FCN (icode) (target);
5771 case ALTIVEC_BUILTIN_MTVSCR:
5772 icode = CODE_FOR_altivec_mtvscr;
5773 arg0 = TREE_VALUE (arglist);
5774 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5775 mode0 = insn_data[icode].operand[0].mode;
5777 /* If we got invalid arguments bail out before generating bad rtl. */
5778 if (arg0 == error_mark_node)
5781 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5782 op0 = copy_to_mode_reg (mode0, op0);
5784 pat = GEN_FCN (icode) (op0);
5789 case ALTIVEC_BUILTIN_DSSALL:
5790 emit_insn (gen_altivec_dssall ());
5793 case ALTIVEC_BUILTIN_DSS:
5794 icode = CODE_FOR_altivec_dss;
5795 arg0 = TREE_VALUE (arglist);
5796 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5797 mode0 = insn_data[icode].operand[0].mode;
5799 /* If we got invalid arguments bail out before generating bad rtl. */
5800 if (arg0 == error_mark_node)
5803 if (TREE_CODE (arg0) != INTEGER_CST
5804 || TREE_INT_CST_LOW (arg0) & ~0x3)
5806 error ("argument to dss must be a 2-bit unsigned literal");
5810 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5811 op0 = copy_to_mode_reg (mode0, op0);
5813 emit_insn (gen_altivec_dss (op0));
5817 /* Expand abs* operations. */
5818 d = (struct builtin_description *) bdesc_abs;
5819 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5820 if (d->code == fcode)
5821 return altivec_expand_abs_builtin (d->icode, arglist, target);
5823 /* Expand the AltiVec predicates. */
5824 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5825 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5826 if (dp->code == fcode)
5827 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5829 /* LV* are funky. We initialized them differently. */
5832 case ALTIVEC_BUILTIN_LVSL:
5833 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5835 case ALTIVEC_BUILTIN_LVSR:
5836 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5838 case ALTIVEC_BUILTIN_LVEBX:
5839 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5841 case ALTIVEC_BUILTIN_LVEHX:
5842 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5844 case ALTIVEC_BUILTIN_LVEWX:
5845 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5847 case ALTIVEC_BUILTIN_LVXL:
5848 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5850 case ALTIVEC_BUILTIN_LVX:
5851 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5862 /* Binops that need to be initialized manually, but can be expanded
5863 automagically by rs6000_expand_binop_builtin. */
5864 static struct builtin_description bdesc_2arg_spe[] =
5866 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5867 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5868 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5869 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5870 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5871 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5872 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5873 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5874 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5875 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5876 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5877 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5878 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5879 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5880 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5881 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5882 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5883 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5884 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5885 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5886 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5887 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5890 /* Expand the builtin in EXP and store the result in TARGET. Store
5891 true in *EXPANDEDP if we found a builtin to expand.
5893 This expands the SPE builtins that are not simple unary and binary
5896 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
5898 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5899 tree arglist = TREE_OPERAND (exp, 1);
5901 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5902 enum insn_code icode;
5903 enum machine_mode tmode, mode0;
5905 struct builtin_description *d;
5910 /* Syntax check for a 5-bit unsigned immediate. */
5913 case SPE_BUILTIN_EVSTDD:
5914 case SPE_BUILTIN_EVSTDH:
5915 case SPE_BUILTIN_EVSTDW:
5916 case SPE_BUILTIN_EVSTWHE:
5917 case SPE_BUILTIN_EVSTWHO:
5918 case SPE_BUILTIN_EVSTWWE:
5919 case SPE_BUILTIN_EVSTWWO:
5920 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5921 if (TREE_CODE (arg1) != INTEGER_CST
5922 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5924 error ("argument 2 must be a 5-bit unsigned literal");
5932 /* The evsplat*i instructions are not quite generic. */
5935 case SPE_BUILTIN_EVSPLATFI:
5936 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5938 case SPE_BUILTIN_EVSPLATI:
5939 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5945 d = (struct builtin_description *) bdesc_2arg_spe;
5946 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5947 if (d->code == fcode)
5948 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5950 d = (struct builtin_description *) bdesc_spe_predicates;
5951 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5952 if (d->code == fcode)
5953 return spe_expand_predicate_builtin (d->icode, arglist, target);
5955 d = (struct builtin_description *) bdesc_spe_evsel;
5956 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5957 if (d->code == fcode)
5958 return spe_expand_evsel_builtin (d->icode, arglist, target);
5962 case SPE_BUILTIN_EVSTDDX:
5963 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5964 case SPE_BUILTIN_EVSTDHX:
5965 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5966 case SPE_BUILTIN_EVSTDWX:
5967 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5968 case SPE_BUILTIN_EVSTWHEX:
5969 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5970 case SPE_BUILTIN_EVSTWHOX:
5971 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5972 case SPE_BUILTIN_EVSTWWEX:
5973 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5974 case SPE_BUILTIN_EVSTWWOX:
5975 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5976 case SPE_BUILTIN_EVSTDD:
5977 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5978 case SPE_BUILTIN_EVSTDH:
5979 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5980 case SPE_BUILTIN_EVSTDW:
5981 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5982 case SPE_BUILTIN_EVSTWHE:
5983 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5984 case SPE_BUILTIN_EVSTWHO:
5985 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5986 case SPE_BUILTIN_EVSTWWE:
5987 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5988 case SPE_BUILTIN_EVSTWWO:
5989 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5990 case SPE_BUILTIN_MFSPEFSCR:
5991 icode = CODE_FOR_spe_mfspefscr;
5992 tmode = insn_data[icode].operand[0].mode;
5995 || GET_MODE (target) != tmode
5996 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5997 target = gen_reg_rtx (tmode);
5999 pat = GEN_FCN (icode) (target);
6004 case SPE_BUILTIN_MTSPEFSCR:
6005 icode = CODE_FOR_spe_mtspefscr;
6006 arg0 = TREE_VALUE (arglist);
6007 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6008 mode0 = insn_data[icode].operand[0].mode;
6010 if (arg0 == error_mark_node)
6013 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6014 op0 = copy_to_mode_reg (mode0, op0);
6016 pat = GEN_FCN (icode) (op0);
6029 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6031 rtx pat, scratch, tmp;
6032 tree form = TREE_VALUE (arglist);
6033 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6034 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6035 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6036 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6037 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6038 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6042 if (TREE_CODE (form) != INTEGER_CST)
6044 error ("argument 1 of __builtin_spe_predicate must be a constant");
6048 form_int = TREE_INT_CST_LOW (form);
6053 if (arg0 == error_mark_node || arg1 == error_mark_node)
6057 || GET_MODE (target) != SImode
6058 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6059 target = gen_reg_rtx (SImode);
6061 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6062 op0 = copy_to_mode_reg (mode0, op0);
6063 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6064 op1 = copy_to_mode_reg (mode1, op1);
6066 scratch = gen_reg_rtx (CCmode);
6068 pat = GEN_FCN (icode) (scratch, op0, op1);
6073 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6074 _lower_. We use one compare, but look in different bits of the
6075 CR for each variant.
6077 There are 2 elements in each SPE simd type (upper/lower). The CR
6078 bits are set as follows:
6080 BIT0 | BIT 1 | BIT 2 | BIT 3
6081 U | L | (U | L) | (U & L)
6083 So, for an "all" relationship, BIT 3 would be set.
6084 For an "any" relationship, BIT 2 would be set. Etc.
6086 Following traditional nomenclature, these bits map to:
6088 BIT0 | BIT 1 | BIT 2 | BIT 3
6091 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6096 /* All variant. OV bit. */
6098 /* We need to get to the OV bit, which is the ORDERED bit. We
6099 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6100 that's ugly and will trigger a validate_condition_mode abort.
6101 So let's just use another pattern. */
6102 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6104 /* Any variant. EQ bit. */
6108 /* Upper variant. LT bit. */
6112 /* Lower variant. GT bit. */
6117 error ("argument 1 of __builtin_spe_predicate is out of range");
6121 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6122 emit_move_insn (target, tmp);
6127 /* The evsel builtins look like this:
6129 e = __builtin_spe_evsel_OP (a, b, c, d);
6133 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6134 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6138 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6141 tree arg0 = TREE_VALUE (arglist);
6142 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6143 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6144 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6145 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6146 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6147 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6148 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6149 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6150 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6155 if (arg0 == error_mark_node || arg1 == error_mark_node
6156 || arg2 == error_mark_node || arg3 == error_mark_node)
6160 || GET_MODE (target) != mode0
6161 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6162 target = gen_reg_rtx (mode0);
6164 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6165 op0 = copy_to_mode_reg (mode0, op0);
6166 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6167 op1 = copy_to_mode_reg (mode0, op1);
6168 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6169 op2 = copy_to_mode_reg (mode0, op2);
6170 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6171 op3 = copy_to_mode_reg (mode0, op3);
6173 /* Generate the compare. */
6174 scratch = gen_reg_rtx (CCmode);
6175 pat = GEN_FCN (icode) (scratch, op0, op1);
6180 if (mode0 == V2SImode)
6181 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6183 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6188 /* Expand an expression EXP that calls a built-in function,
6189 with result going to TARGET if that's convenient
6190 (and in mode MODE if that's convenient).
6191 SUBTARGET may be used as the target for computing one of EXP's operands.
6192 IGNORE is nonzero if the value is to be ignored. */
6195 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6196 enum machine_mode mode ATTRIBUTE_UNUSED,
6197 int ignore ATTRIBUTE_UNUSED)
6199 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6200 tree arglist = TREE_OPERAND (exp, 1);
6201 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6202 struct builtin_description *d;
6209 ret = altivec_expand_builtin (exp, target, &success);
6216 ret = spe_expand_builtin (exp, target, &success);
6222 if (TARGET_ALTIVEC || TARGET_SPE)
6224 /* Handle simple unary operations. */
6225 d = (struct builtin_description *) bdesc_1arg;
6226 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6227 if (d->code == fcode)
6228 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6230 /* Handle simple binary operations. */
6231 d = (struct builtin_description *) bdesc_2arg;
6232 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6233 if (d->code == fcode)
6234 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6236 /* Handle simple ternary operations. */
6237 d = (struct builtin_description *) bdesc_3arg;
6238 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6239 if (d->code == fcode)
6240 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6248 rs6000_init_builtins (void)
6250 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6251 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6252 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6255 spe_init_builtins ();
6257 altivec_init_builtins ();
6258 if (TARGET_ALTIVEC || TARGET_SPE)
6259 rs6000_common_init_builtins ();
6262 /* Search through a set of builtins and enable the mask bits.
6263 DESC is an array of builtins.
6264 SIZE is the total number of builtins.
6265 START is the builtin enum at which to start.
6266 END is the builtin enum at which to end. */
6268 enable_mask_for_builtins (struct builtin_description *desc, int size,
6269 enum rs6000_builtins start,
6270 enum rs6000_builtins end)
6274 for (i = 0; i < size; ++i)
6275 if (desc[i].code == start)
6281 for (; i < size; ++i)
6283 /* Flip all the bits on. */
6284 desc[i].mask = target_flags;
6285 if (desc[i].code == end)
6291 spe_init_builtins (void)
6293 tree endlink = void_list_node;
6294 tree puint_type_node = build_pointer_type (unsigned_type_node);
6295 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6296 struct builtin_description *d;
6299 tree v2si_ftype_4_v2si
6300 = build_function_type
6301 (opaque_V2SI_type_node,
6302 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6303 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6304 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6305 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6308 tree v2sf_ftype_4_v2sf
6309 = build_function_type
6310 (opaque_V2SF_type_node,
6311 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6312 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6313 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6314 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6317 tree int_ftype_int_v2si_v2si
6318 = build_function_type
6320 tree_cons (NULL_TREE, integer_type_node,
6321 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6322 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6325 tree int_ftype_int_v2sf_v2sf
6326 = build_function_type
6328 tree_cons (NULL_TREE, integer_type_node,
6329 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6330 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6333 tree void_ftype_v2si_puint_int
6334 = build_function_type (void_type_node,
6335 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6336 tree_cons (NULL_TREE, puint_type_node,
6337 tree_cons (NULL_TREE,
6341 tree void_ftype_v2si_puint_char
6342 = build_function_type (void_type_node,
6343 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6344 tree_cons (NULL_TREE, puint_type_node,
6345 tree_cons (NULL_TREE,
6349 tree void_ftype_v2si_pv2si_int
6350 = build_function_type (void_type_node,
6351 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6352 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6353 tree_cons (NULL_TREE,
6357 tree void_ftype_v2si_pv2si_char
6358 = build_function_type (void_type_node,
6359 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6360 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6361 tree_cons (NULL_TREE,
6366 = build_function_type (void_type_node,
6367 tree_cons (NULL_TREE, integer_type_node, endlink));
6370 = build_function_type (integer_type_node, endlink);
6372 tree v2si_ftype_pv2si_int
6373 = build_function_type (opaque_V2SI_type_node,
6374 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6375 tree_cons (NULL_TREE, integer_type_node,
6378 tree v2si_ftype_puint_int
6379 = build_function_type (opaque_V2SI_type_node,
6380 tree_cons (NULL_TREE, puint_type_node,
6381 tree_cons (NULL_TREE, integer_type_node,
6384 tree v2si_ftype_pushort_int
6385 = build_function_type (opaque_V2SI_type_node,
6386 tree_cons (NULL_TREE, pushort_type_node,
6387 tree_cons (NULL_TREE, integer_type_node,
6390 tree v2si_ftype_signed_char
6391 = build_function_type (opaque_V2SI_type_node,
6392 tree_cons (NULL_TREE, signed_char_type_node,
6395 /* The initialization of the simple binary and unary builtins is
6396 done in rs6000_common_init_builtins, but we have to enable the
6397 mask bits here manually because we have run out of `target_flags'
6398 bits. We really need to redesign this mask business. */
6400 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6401 ARRAY_SIZE (bdesc_2arg),
6404 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6405 ARRAY_SIZE (bdesc_1arg),
6407 SPE_BUILTIN_EVSUBFUSIAAW);
6408 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6409 ARRAY_SIZE (bdesc_spe_predicates),
6410 SPE_BUILTIN_EVCMPEQ,
6411 SPE_BUILTIN_EVFSTSTLT);
6412 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6413 ARRAY_SIZE (bdesc_spe_evsel),
6414 SPE_BUILTIN_EVSEL_CMPGTS,
6415 SPE_BUILTIN_EVSEL_FSTSTEQ);
6417 (*lang_hooks.decls.pushdecl)
6418 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6419 opaque_V2SI_type_node));
6421 /* Initialize irregular SPE builtins. */
6423 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6424 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6425 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6426 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6427 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6428 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6429 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6430 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6431 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6432 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6433 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6434 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6435 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6436 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6437 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6438 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6439 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6440 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6443 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6444 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6445 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6446 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6447 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6448 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6449 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6450 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6451 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6452 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6453 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6454 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6455 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6456 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6457 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6458 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6459 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6460 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6461 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6462 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6463 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6464 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6467 d = (struct builtin_description *) bdesc_spe_predicates;
6468 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6472 switch (insn_data[d->icode].operand[1].mode)
6475 type = int_ftype_int_v2si_v2si;
6478 type = int_ftype_int_v2sf_v2sf;
6484 def_builtin (d->mask, d->name, type, d->code);
6487 /* Evsel predicates. */
6488 d = (struct builtin_description *) bdesc_spe_evsel;
6489 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6493 switch (insn_data[d->icode].operand[1].mode)
6496 type = v2si_ftype_4_v2si;
6499 type = v2sf_ftype_4_v2sf;
6505 def_builtin (d->mask, d->name, type, d->code);
6510 altivec_init_builtins (void)
6512 struct builtin_description *d;
6513 struct builtin_description_predicates *dp;
6515 tree pfloat_type_node = build_pointer_type (float_type_node);
6516 tree pint_type_node = build_pointer_type (integer_type_node);
6517 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6518 tree pchar_type_node = build_pointer_type (char_type_node);
6520 tree pvoid_type_node = build_pointer_type (void_type_node);
6522 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6523 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6524 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6525 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6527 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6529 tree int_ftype_int_v4si_v4si
6530 = build_function_type_list (integer_type_node,
6531 integer_type_node, V4SI_type_node,
6532 V4SI_type_node, NULL_TREE);
6533 tree v4sf_ftype_pcfloat
6534 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6535 tree void_ftype_pfloat_v4sf
6536 = build_function_type_list (void_type_node,
6537 pfloat_type_node, V4SF_type_node, NULL_TREE);
6538 tree v4si_ftype_pcint
6539 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6540 tree void_ftype_pint_v4si
6541 = build_function_type_list (void_type_node,
6542 pint_type_node, V4SI_type_node, NULL_TREE);
6543 tree v8hi_ftype_pcshort
6544 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6545 tree void_ftype_pshort_v8hi
6546 = build_function_type_list (void_type_node,
6547 pshort_type_node, V8HI_type_node, NULL_TREE);
6548 tree v16qi_ftype_pcchar
6549 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6550 tree void_ftype_pchar_v16qi
6551 = build_function_type_list (void_type_node,
6552 pchar_type_node, V16QI_type_node, NULL_TREE);
6553 tree void_ftype_v4si
6554 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6555 tree v8hi_ftype_void
6556 = build_function_type (V8HI_type_node, void_list_node);
6557 tree void_ftype_void
6558 = build_function_type (void_type_node, void_list_node);
6560 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6562 tree v16qi_ftype_long_pcvoid
6563 = build_function_type_list (V16QI_type_node,
6564 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6565 tree v8hi_ftype_long_pcvoid
6566 = build_function_type_list (V8HI_type_node,
6567 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6568 tree v4si_ftype_long_pcvoid
6569 = build_function_type_list (V4SI_type_node,
6570 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6572 tree void_ftype_v4si_long_pvoid
6573 = build_function_type_list (void_type_node,
6574 V4SI_type_node, long_integer_type_node,
6575 pvoid_type_node, NULL_TREE);
6576 tree void_ftype_v16qi_long_pvoid
6577 = build_function_type_list (void_type_node,
6578 V16QI_type_node, long_integer_type_node,
6579 pvoid_type_node, NULL_TREE);
6580 tree void_ftype_v8hi_long_pvoid
6581 = build_function_type_list (void_type_node,
6582 V8HI_type_node, long_integer_type_node,
6583 pvoid_type_node, NULL_TREE);
6584 tree int_ftype_int_v8hi_v8hi
6585 = build_function_type_list (integer_type_node,
6586 integer_type_node, V8HI_type_node,
6587 V8HI_type_node, NULL_TREE);
6588 tree int_ftype_int_v16qi_v16qi
6589 = build_function_type_list (integer_type_node,
6590 integer_type_node, V16QI_type_node,
6591 V16QI_type_node, NULL_TREE);
6592 tree int_ftype_int_v4sf_v4sf
6593 = build_function_type_list (integer_type_node,
6594 integer_type_node, V4SF_type_node,
6595 V4SF_type_node, NULL_TREE);
6596 tree v4si_ftype_v4si
6597 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6598 tree v8hi_ftype_v8hi
6599 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6600 tree v16qi_ftype_v16qi
6601 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6602 tree v4sf_ftype_v4sf
6603 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6604 tree void_ftype_pcvoid_int_char
6605 = build_function_type_list (void_type_node,
6606 pcvoid_type_node, integer_type_node,
6607 char_type_node, NULL_TREE);
6609 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6610 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6611 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6612 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6613 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6614 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6615 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6616 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6617 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6618 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6619 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6620 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6621 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6622 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6623 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6624 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6625 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6626 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6627 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6628 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6629 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
6630 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
6631 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6632 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6633 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6634 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
6635 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
6636 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
6637 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
6638 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
6639 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
6640 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
6642 /* Add the DST variants. */
6643 d = (struct builtin_description *) bdesc_dst;
6644 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6645 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6647 /* Initialize the predicates. */
6648 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6649 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6651 enum machine_mode mode1;
6654 mode1 = insn_data[dp->icode].operand[1].mode;
6659 type = int_ftype_int_v4si_v4si;
6662 type = int_ftype_int_v8hi_v8hi;
6665 type = int_ftype_int_v16qi_v16qi;
6668 type = int_ftype_int_v4sf_v4sf;
6674 def_builtin (dp->mask, dp->name, type, dp->code);
6677 /* Initialize the abs* operators. */
6678 d = (struct builtin_description *) bdesc_abs;
6679 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6681 enum machine_mode mode0;
6684 mode0 = insn_data[d->icode].operand[0].mode;
6689 type = v4si_ftype_v4si;
6692 type = v8hi_ftype_v8hi;
6695 type = v16qi_ftype_v16qi;
6698 type = v4sf_ftype_v4sf;
6704 def_builtin (d->mask, d->name, type, d->code);
6709 rs6000_common_init_builtins (void)
6711 struct builtin_description *d;
6714 tree v4sf_ftype_v4sf_v4sf_v16qi
6715 = build_function_type_list (V4SF_type_node,
6716 V4SF_type_node, V4SF_type_node,
6717 V16QI_type_node, NULL_TREE);
6718 tree v4si_ftype_v4si_v4si_v16qi
6719 = build_function_type_list (V4SI_type_node,
6720 V4SI_type_node, V4SI_type_node,
6721 V16QI_type_node, NULL_TREE);
6722 tree v8hi_ftype_v8hi_v8hi_v16qi
6723 = build_function_type_list (V8HI_type_node,
6724 V8HI_type_node, V8HI_type_node,
6725 V16QI_type_node, NULL_TREE);
6726 tree v16qi_ftype_v16qi_v16qi_v16qi
6727 = build_function_type_list (V16QI_type_node,
6728 V16QI_type_node, V16QI_type_node,
6729 V16QI_type_node, NULL_TREE);
6730 tree v4si_ftype_char
6731 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6732 tree v8hi_ftype_char
6733 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6734 tree v16qi_ftype_char
6735 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6736 tree v8hi_ftype_v16qi
6737 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6738 tree v4sf_ftype_v4sf
6739 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6741 tree v2si_ftype_v2si_v2si
6742 = build_function_type_list (opaque_V2SI_type_node,
6743 opaque_V2SI_type_node,
6744 opaque_V2SI_type_node, NULL_TREE);
6746 tree v2sf_ftype_v2sf_v2sf
6747 = build_function_type_list (opaque_V2SF_type_node,
6748 opaque_V2SF_type_node,
6749 opaque_V2SF_type_node, NULL_TREE);
6751 tree v2si_ftype_int_int
6752 = build_function_type_list (opaque_V2SI_type_node,
6753 integer_type_node, integer_type_node,
6756 tree v2si_ftype_v2si
6757 = build_function_type_list (opaque_V2SI_type_node,
6758 opaque_V2SI_type_node, NULL_TREE);
6760 tree v2sf_ftype_v2sf
6761 = build_function_type_list (opaque_V2SF_type_node,
6762 opaque_V2SF_type_node, NULL_TREE);
6764 tree v2sf_ftype_v2si
6765 = build_function_type_list (opaque_V2SF_type_node,
6766 opaque_V2SI_type_node, NULL_TREE);
6768 tree v2si_ftype_v2sf
6769 = build_function_type_list (opaque_V2SI_type_node,
6770 opaque_V2SF_type_node, NULL_TREE);
6772 tree v2si_ftype_v2si_char
6773 = build_function_type_list (opaque_V2SI_type_node,
6774 opaque_V2SI_type_node,
6775 char_type_node, NULL_TREE);
6777 tree v2si_ftype_int_char
6778 = build_function_type_list (opaque_V2SI_type_node,
6779 integer_type_node, char_type_node, NULL_TREE);
6781 tree v2si_ftype_char
6782 = build_function_type_list (opaque_V2SI_type_node,
6783 char_type_node, NULL_TREE);
6785 tree int_ftype_int_int
6786 = build_function_type_list (integer_type_node,
6787 integer_type_node, integer_type_node,
6790 tree v4si_ftype_v4si_v4si
6791 = build_function_type_list (V4SI_type_node,
6792 V4SI_type_node, V4SI_type_node, NULL_TREE);
6793 tree v4sf_ftype_v4si_char
6794 = build_function_type_list (V4SF_type_node,
6795 V4SI_type_node, char_type_node, NULL_TREE);
6796 tree v4si_ftype_v4sf_char
6797 = build_function_type_list (V4SI_type_node,
6798 V4SF_type_node, char_type_node, NULL_TREE);
6799 tree v4si_ftype_v4si_char
6800 = build_function_type_list (V4SI_type_node,
6801 V4SI_type_node, char_type_node, NULL_TREE);
6802 tree v8hi_ftype_v8hi_char
6803 = build_function_type_list (V8HI_type_node,
6804 V8HI_type_node, char_type_node, NULL_TREE);
6805 tree v16qi_ftype_v16qi_char
6806 = build_function_type_list (V16QI_type_node,
6807 V16QI_type_node, char_type_node, NULL_TREE);
6808 tree v16qi_ftype_v16qi_v16qi_char
6809 = build_function_type_list (V16QI_type_node,
6810 V16QI_type_node, V16QI_type_node,
6811 char_type_node, NULL_TREE);
6812 tree v8hi_ftype_v8hi_v8hi_char
6813 = build_function_type_list (V8HI_type_node,
6814 V8HI_type_node, V8HI_type_node,
6815 char_type_node, NULL_TREE);
6816 tree v4si_ftype_v4si_v4si_char
6817 = build_function_type_list (V4SI_type_node,
6818 V4SI_type_node, V4SI_type_node,
6819 char_type_node, NULL_TREE);
6820 tree v4sf_ftype_v4sf_v4sf_char
6821 = build_function_type_list (V4SF_type_node,
6822 V4SF_type_node, V4SF_type_node,
6823 char_type_node, NULL_TREE);
6824 tree v4sf_ftype_v4sf_v4sf
6825 = build_function_type_list (V4SF_type_node,
6826 V4SF_type_node, V4SF_type_node, NULL_TREE);
6827 tree v4sf_ftype_v4sf_v4sf_v4si
6828 = build_function_type_list (V4SF_type_node,
6829 V4SF_type_node, V4SF_type_node,
6830 V4SI_type_node, NULL_TREE);
6831 tree v4sf_ftype_v4sf_v4sf_v4sf
6832 = build_function_type_list (V4SF_type_node,
6833 V4SF_type_node, V4SF_type_node,
6834 V4SF_type_node, NULL_TREE);
6835 tree v4si_ftype_v4si_v4si_v4si
6836 = build_function_type_list (V4SI_type_node,
6837 V4SI_type_node, V4SI_type_node,
6838 V4SI_type_node, NULL_TREE);
6839 tree v8hi_ftype_v8hi_v8hi
6840 = build_function_type_list (V8HI_type_node,
6841 V8HI_type_node, V8HI_type_node, NULL_TREE);
6842 tree v8hi_ftype_v8hi_v8hi_v8hi
6843 = build_function_type_list (V8HI_type_node,
6844 V8HI_type_node, V8HI_type_node,
6845 V8HI_type_node, NULL_TREE);
6846 tree v4si_ftype_v8hi_v8hi_v4si
6847 = build_function_type_list (V4SI_type_node,
6848 V8HI_type_node, V8HI_type_node,
6849 V4SI_type_node, NULL_TREE);
6850 tree v4si_ftype_v16qi_v16qi_v4si
6851 = build_function_type_list (V4SI_type_node,
6852 V16QI_type_node, V16QI_type_node,
6853 V4SI_type_node, NULL_TREE);
6854 tree v16qi_ftype_v16qi_v16qi
6855 = build_function_type_list (V16QI_type_node,
6856 V16QI_type_node, V16QI_type_node, NULL_TREE);
6857 tree v4si_ftype_v4sf_v4sf
6858 = build_function_type_list (V4SI_type_node,
6859 V4SF_type_node, V4SF_type_node, NULL_TREE);
6860 tree v8hi_ftype_v16qi_v16qi
6861 = build_function_type_list (V8HI_type_node,
6862 V16QI_type_node, V16QI_type_node, NULL_TREE);
6863 tree v4si_ftype_v8hi_v8hi
6864 = build_function_type_list (V4SI_type_node,
6865 V8HI_type_node, V8HI_type_node, NULL_TREE);
6866 tree v8hi_ftype_v4si_v4si
6867 = build_function_type_list (V8HI_type_node,
6868 V4SI_type_node, V4SI_type_node, NULL_TREE);
6869 tree v16qi_ftype_v8hi_v8hi
6870 = build_function_type_list (V16QI_type_node,
6871 V8HI_type_node, V8HI_type_node, NULL_TREE);
6872 tree v4si_ftype_v16qi_v4si
6873 = build_function_type_list (V4SI_type_node,
6874 V16QI_type_node, V4SI_type_node, NULL_TREE);
6875 tree v4si_ftype_v16qi_v16qi
6876 = build_function_type_list (V4SI_type_node,
6877 V16QI_type_node, V16QI_type_node, NULL_TREE);
6878 tree v4si_ftype_v8hi_v4si
6879 = build_function_type_list (V4SI_type_node,
6880 V8HI_type_node, V4SI_type_node, NULL_TREE);
6881 tree v4si_ftype_v8hi
6882 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6883 tree int_ftype_v4si_v4si
6884 = build_function_type_list (integer_type_node,
6885 V4SI_type_node, V4SI_type_node, NULL_TREE);
6886 tree int_ftype_v4sf_v4sf
6887 = build_function_type_list (integer_type_node,
6888 V4SF_type_node, V4SF_type_node, NULL_TREE);
6889 tree int_ftype_v16qi_v16qi
6890 = build_function_type_list (integer_type_node,
6891 V16QI_type_node, V16QI_type_node, NULL_TREE);
6892 tree int_ftype_v8hi_v8hi
6893 = build_function_type_list (integer_type_node,
6894 V8HI_type_node, V8HI_type_node, NULL_TREE);
6896 /* Add the simple ternary operators. */
6897 d = (struct builtin_description *) bdesc_3arg;
6898 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6901 enum machine_mode mode0, mode1, mode2, mode3;
6904 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6907 mode0 = insn_data[d->icode].operand[0].mode;
6908 mode1 = insn_data[d->icode].operand[1].mode;
6909 mode2 = insn_data[d->icode].operand[2].mode;
6910 mode3 = insn_data[d->icode].operand[3].mode;
6912 /* When all four are of the same mode. */
6913 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6918 type = v4si_ftype_v4si_v4si_v4si;
6921 type = v4sf_ftype_v4sf_v4sf_v4sf;
6924 type = v8hi_ftype_v8hi_v8hi_v8hi;
6927 type = v16qi_ftype_v16qi_v16qi_v16qi;
6933 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6938 type = v4si_ftype_v4si_v4si_v16qi;
6941 type = v4sf_ftype_v4sf_v4sf_v16qi;
6944 type = v8hi_ftype_v8hi_v8hi_v16qi;
6947 type = v16qi_ftype_v16qi_v16qi_v16qi;
6953 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6954 && mode3 == V4SImode)
6955 type = v4si_ftype_v16qi_v16qi_v4si;
6956 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6957 && mode3 == V4SImode)
6958 type = v4si_ftype_v8hi_v8hi_v4si;
6959 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6960 && mode3 == V4SImode)
6961 type = v4sf_ftype_v4sf_v4sf_v4si;
6963 /* vchar, vchar, vchar, 4 bit literal. */
6964 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6966 type = v16qi_ftype_v16qi_v16qi_char;
6968 /* vshort, vshort, vshort, 4 bit literal. */
6969 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6971 type = v8hi_ftype_v8hi_v8hi_char;
6973 /* vint, vint, vint, 4 bit literal. */
6974 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6976 type = v4si_ftype_v4si_v4si_char;
6978 /* vfloat, vfloat, vfloat, 4 bit literal. */
6979 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6981 type = v4sf_ftype_v4sf_v4sf_char;
6986 def_builtin (d->mask, d->name, type, d->code);
6989 /* Add the simple binary operators. */
6990 d = (struct builtin_description *) bdesc_2arg;
6991 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6993 enum machine_mode mode0, mode1, mode2;
6996 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6999 mode0 = insn_data[d->icode].operand[0].mode;
7000 mode1 = insn_data[d->icode].operand[1].mode;
7001 mode2 = insn_data[d->icode].operand[2].mode;
7003 /* When all three operands are of the same mode. */
7004 if (mode0 == mode1 && mode1 == mode2)
7009 type = v4sf_ftype_v4sf_v4sf;
7012 type = v4si_ftype_v4si_v4si;
7015 type = v16qi_ftype_v16qi_v16qi;
7018 type = v8hi_ftype_v8hi_v8hi;
7021 type = v2si_ftype_v2si_v2si;
7024 type = v2sf_ftype_v2sf_v2sf;
7027 type = int_ftype_int_int;
7034 /* A few other combos we really don't want to do manually. */
7036 /* vint, vfloat, vfloat. */
7037 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7038 type = v4si_ftype_v4sf_v4sf;
7040 /* vshort, vchar, vchar. */
7041 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7042 type = v8hi_ftype_v16qi_v16qi;
7044 /* vint, vshort, vshort. */
7045 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7046 type = v4si_ftype_v8hi_v8hi;
7048 /* vshort, vint, vint. */
7049 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7050 type = v8hi_ftype_v4si_v4si;
7052 /* vchar, vshort, vshort. */
7053 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7054 type = v16qi_ftype_v8hi_v8hi;
7056 /* vint, vchar, vint. */
7057 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7058 type = v4si_ftype_v16qi_v4si;
7060 /* vint, vchar, vchar. */
7061 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7062 type = v4si_ftype_v16qi_v16qi;
7064 /* vint, vshort, vint. */
7065 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7066 type = v4si_ftype_v8hi_v4si;
7068 /* vint, vint, 5 bit literal. */
7069 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7070 type = v4si_ftype_v4si_char;
7072 /* vshort, vshort, 5 bit literal. */
7073 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7074 type = v8hi_ftype_v8hi_char;
7076 /* vchar, vchar, 5 bit literal. */
7077 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7078 type = v16qi_ftype_v16qi_char;
7080 /* vfloat, vint, 5 bit literal. */
7081 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7082 type = v4sf_ftype_v4si_char;
7084 /* vint, vfloat, 5 bit literal. */
7085 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7086 type = v4si_ftype_v4sf_char;
7088 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7089 type = v2si_ftype_int_int;
7091 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7092 type = v2si_ftype_v2si_char;
7094 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7095 type = v2si_ftype_int_char;
7098 else if (mode0 == SImode)
7103 type = int_ftype_v4si_v4si;
7106 type = int_ftype_v4sf_v4sf;
7109 type = int_ftype_v16qi_v16qi;
7112 type = int_ftype_v8hi_v8hi;
7122 def_builtin (d->mask, d->name, type, d->code);
7125 /* Add the simple unary operators. */
7126 d = (struct builtin_description *) bdesc_1arg;
7127 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7129 enum machine_mode mode0, mode1;
7132 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7135 mode0 = insn_data[d->icode].operand[0].mode;
7136 mode1 = insn_data[d->icode].operand[1].mode;
7138 if (mode0 == V4SImode && mode1 == QImode)
7139 type = v4si_ftype_char;
7140 else if (mode0 == V8HImode && mode1 == QImode)
7141 type = v8hi_ftype_char;
7142 else if (mode0 == V16QImode && mode1 == QImode)
7143 type = v16qi_ftype_char;
7144 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7145 type = v4sf_ftype_v4sf;
7146 else if (mode0 == V8HImode && mode1 == V16QImode)
7147 type = v8hi_ftype_v16qi;
7148 else if (mode0 == V4SImode && mode1 == V8HImode)
7149 type = v4si_ftype_v8hi;
7150 else if (mode0 == V2SImode && mode1 == V2SImode)
7151 type = v2si_ftype_v2si;
7152 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7153 type = v2sf_ftype_v2sf;
7154 else if (mode0 == V2SFmode && mode1 == V2SImode)
7155 type = v2sf_ftype_v2si;
7156 else if (mode0 == V2SImode && mode1 == V2SFmode)
7157 type = v2si_ftype_v2sf;
7158 else if (mode0 == V2SImode && mode1 == QImode)
7159 type = v2si_ftype_char;
7163 def_builtin (d->mask, d->name, type, d->code);
7168 rs6000_init_libfuncs (void)
7170 if (!TARGET_HARD_FLOAT)
7173 if (DEFAULT_ABI != ABI_V4)
7175 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7177 /* AIX library routines for float->int conversion. */
7178 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7179 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7182 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7183 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7184 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7185 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7186 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7190 /* 32-bit SVR4 quad floating point routines. */
7192 set_optab_libfunc (add_optab, TFmode, "_q_add");
7193 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7194 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7195 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7196 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7197 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7198 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7200 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7201 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7202 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7203 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7204 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7205 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7207 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7208 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7209 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7210 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7211 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7212 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7213 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7217 /* Expand a block move operation, and return 1 if successful. Return 0
7218 if we should let the compiler generate normal code.
7220 operands[0] is the destination
7221 operands[1] is the source
7222 operands[2] is the length
7223 operands[3] is the alignment */
7225 #define MAX_MOVE_REG 4
7228 expand_block_move (rtx operands[])
7230 rtx orig_dest = operands[0];
7231 rtx orig_src = operands[1];
7232 rtx bytes_rtx = operands[2];
7233 rtx align_rtx = operands[3];
7234 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7239 rtx stores[MAX_MOVE_REG];
7242 /* If this is not a fixed size move, just call memcpy */
7246 /* If this is not a fixed size alignment, abort */
7247 if (GET_CODE (align_rtx) != CONST_INT)
7249 align = INTVAL (align_rtx);
7251 /* Anything to move? */
7252 bytes = INTVAL (bytes_rtx);
7256 /* store_one_arg depends on expand_block_move to handle at least the size of
7257 reg_parm_stack_space. */
7258 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7261 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7264 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7265 rtx (*mov) (rtx, rtx);
7267 enum machine_mode mode = BLKmode;
7271 && bytes > 24 /* move up to 32 bytes at a time */
7279 && ! fixed_regs[12])
7281 move_bytes = (bytes > 32) ? 32 : bytes;
7282 gen_func.movstrsi = gen_movstrsi_8reg;
7284 else if (TARGET_STRING
7285 && bytes > 16 /* move up to 24 bytes at a time */
7291 && ! fixed_regs[10])
7293 move_bytes = (bytes > 24) ? 24 : bytes;
7294 gen_func.movstrsi = gen_movstrsi_6reg;
7296 else if (TARGET_STRING
7297 && bytes > 8 /* move up to 16 bytes at a time */
7303 move_bytes = (bytes > 16) ? 16 : bytes;
7304 gen_func.movstrsi = gen_movstrsi_4reg;
7306 else if (bytes >= 8 && TARGET_POWERPC64
7307 /* 64-bit loads and stores require word-aligned
7309 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7313 gen_func.mov = gen_movdi;
7315 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7316 { /* move up to 8 bytes at a time */
7317 move_bytes = (bytes > 8) ? 8 : bytes;
7318 gen_func.movstrsi = gen_movstrsi_2reg;
7320 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7321 { /* move 4 bytes */
7324 gen_func.mov = gen_movsi;
7326 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7327 { /* move 2 bytes */
7330 gen_func.mov = gen_movhi;
7332 else if (TARGET_STRING && bytes > 1)
7333 { /* move up to 4 bytes at a time */
7334 move_bytes = (bytes > 4) ? 4 : bytes;
7335 gen_func.movstrsi = gen_movstrsi_1reg;
7337 else /* move 1 byte at a time */
7341 gen_func.mov = gen_movqi;
7344 src = adjust_address (orig_src, mode, offset);
7345 dest = adjust_address (orig_dest, mode, offset);
7347 if (mode != BLKmode)
7349 rtx tmp_reg = gen_reg_rtx (mode);
7351 emit_insn ((*gen_func.mov) (tmp_reg, src));
7352 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7355 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7358 for (i = 0; i < num_reg; i++)
7359 emit_insn (stores[i]);
7363 if (mode == BLKmode)
7365 /* Move the address into scratch registers. The movstrsi
7366 patterns require zero offset. */
7367 if (!REG_P (XEXP (src, 0)))
7369 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7370 src = replace_equiv_address (src, src_reg);
7372 set_mem_size (src, GEN_INT (move_bytes));
7374 if (!REG_P (XEXP (dest, 0)))
7376 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7377 dest = replace_equiv_address (dest, dest_reg);
7379 set_mem_size (dest, GEN_INT (move_bytes));
7381 emit_insn ((*gen_func.movstrsi) (dest, src,
7382 GEN_INT (move_bytes & 31),
7391 /* Return 1 if OP is a load multiple operation. It is known to be a
7392 PARALLEL and the first section will be tested. */
7395 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7397 int count = XVECLEN (op, 0);
7398 unsigned int dest_regno;
7402 /* Perform a quick check so we don't blow up below. */
7404 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7405 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7406 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7409 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7410 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7412 for (i = 1; i < count; i++)
7414 rtx elt = XVECEXP (op, 0, i);
7416 if (GET_CODE (elt) != SET
7417 || GET_CODE (SET_DEST (elt)) != REG
7418 || GET_MODE (SET_DEST (elt)) != SImode
7419 || REGNO (SET_DEST (elt)) != dest_regno + i
7420 || GET_CODE (SET_SRC (elt)) != MEM
7421 || GET_MODE (SET_SRC (elt)) != SImode
7422 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7423 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7424 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7425 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7432 /* Similar, but tests for store multiple. Here, the second vector element
7433 is a CLOBBER. It will be tested later. */
7436 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7438 int count = XVECLEN (op, 0) - 1;
7439 unsigned int src_regno;
7443 /* Perform a quick check so we don't blow up below. */
7445 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7446 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7447 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7450 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7451 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7453 for (i = 1; i < count; i++)
7455 rtx elt = XVECEXP (op, 0, i + 1);
7457 if (GET_CODE (elt) != SET
7458 || GET_CODE (SET_SRC (elt)) != REG
7459 || GET_MODE (SET_SRC (elt)) != SImode
7460 || REGNO (SET_SRC (elt)) != src_regno + i
7461 || GET_CODE (SET_DEST (elt)) != MEM
7462 || GET_MODE (SET_DEST (elt)) != SImode
7463 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7464 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7465 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7466 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7473 /* Return a string to perform a load_multiple operation.
7474 operands[0] is the vector.
7475 operands[1] is the source address.
7476 operands[2] is the first destination register. */
7479 rs6000_output_load_multiple (rtx operands[3])
7481 /* We have to handle the case where the pseudo used to contain the address
7482 is assigned to one of the output registers. */
7484 int words = XVECLEN (operands[0], 0);
7487 if (XVECLEN (operands[0], 0) == 1)
7488 return "{l|lwz} %2,0(%1)";
7490 for (i = 0; i < words; i++)
7491 if (refers_to_regno_p (REGNO (operands[2]) + i,
7492 REGNO (operands[2]) + i + 1, operands[1], 0))
7496 xop[0] = GEN_INT (4 * (words-1));
7497 xop[1] = operands[1];
7498 xop[2] = operands[2];
7499 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7504 xop[0] = GEN_INT (4 * (words-1));
7505 xop[1] = operands[1];
7506 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7507 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7512 for (j = 0; j < words; j++)
7515 xop[0] = GEN_INT (j * 4);
7516 xop[1] = operands[1];
7517 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7518 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7520 xop[0] = GEN_INT (i * 4);
7521 xop[1] = operands[1];
7522 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7527 return "{lsi|lswi} %2,%1,%N0";
7530 /* Return 1 for a parallel vrsave operation. */
7533 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7535 int count = XVECLEN (op, 0);
7536 unsigned int dest_regno, src_regno;
7540 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7541 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7542 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7545 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7546 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7548 if (dest_regno != VRSAVE_REGNO
7549 && src_regno != VRSAVE_REGNO)
7552 for (i = 1; i < count; i++)
7554 rtx elt = XVECEXP (op, 0, i);
7556 if (GET_CODE (elt) != CLOBBER
7557 && GET_CODE (elt) != SET)
7564 /* Return 1 for an PARALLEL suitable for mfcr. */
7567 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7569 int count = XVECLEN (op, 0);
7572 /* Perform a quick check so we don't blow up below. */
7574 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7575 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7576 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7579 for (i = 0; i < count; i++)
7581 rtx exp = XVECEXP (op, 0, i);
7586 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7588 if (GET_CODE (src_reg) != REG
7589 || GET_MODE (src_reg) != CCmode
7590 || ! CR_REGNO_P (REGNO (src_reg)))
7593 if (GET_CODE (exp) != SET
7594 || GET_CODE (SET_DEST (exp)) != REG
7595 || GET_MODE (SET_DEST (exp)) != SImode
7596 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7598 unspec = SET_SRC (exp);
7599 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7601 if (GET_CODE (unspec) != UNSPEC
7602 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7603 || XVECLEN (unspec, 0) != 2
7604 || XVECEXP (unspec, 0, 0) != src_reg
7605 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7606 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7612 /* Return 1 for an PARALLEL suitable for mtcrf. */
7615 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7617 int count = XVECLEN (op, 0);
7621 /* Perform a quick check so we don't blow up below. */
7623 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7624 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7625 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7627 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7629 if (GET_CODE (src_reg) != REG
7630 || GET_MODE (src_reg) != SImode
7631 || ! INT_REGNO_P (REGNO (src_reg)))
7634 for (i = 0; i < count; i++)
7636 rtx exp = XVECEXP (op, 0, i);
7640 if (GET_CODE (exp) != SET
7641 || GET_CODE (SET_DEST (exp)) != REG
7642 || GET_MODE (SET_DEST (exp)) != CCmode
7643 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7645 unspec = SET_SRC (exp);
7646 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7648 if (GET_CODE (unspec) != UNSPEC
7649 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7650 || XVECLEN (unspec, 0) != 2
7651 || XVECEXP (unspec, 0, 0) != src_reg
7652 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7653 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7659 /* Return 1 for an PARALLEL suitable for lmw. */
7662 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7664 int count = XVECLEN (op, 0);
7665 unsigned int dest_regno;
7667 unsigned int base_regno;
7668 HOST_WIDE_INT offset;
7671 /* Perform a quick check so we don't blow up below. */
7673 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7674 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7675 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7678 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7679 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7682 || count != 32 - (int) dest_regno)
7685 if (legitimate_indirect_address_p (src_addr, 0))
7688 base_regno = REGNO (src_addr);
7689 if (base_regno == 0)
7692 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7694 offset = INTVAL (XEXP (src_addr, 1));
7695 base_regno = REGNO (XEXP (src_addr, 0));
7700 for (i = 0; i < count; i++)
7702 rtx elt = XVECEXP (op, 0, i);
7705 HOST_WIDE_INT newoffset;
7707 if (GET_CODE (elt) != SET
7708 || GET_CODE (SET_DEST (elt)) != REG
7709 || GET_MODE (SET_DEST (elt)) != SImode
7710 || REGNO (SET_DEST (elt)) != dest_regno + i
7711 || GET_CODE (SET_SRC (elt)) != MEM
7712 || GET_MODE (SET_SRC (elt)) != SImode)
7714 newaddr = XEXP (SET_SRC (elt), 0);
7715 if (legitimate_indirect_address_p (newaddr, 0))
7720 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7722 addr_reg = XEXP (newaddr, 0);
7723 newoffset = INTVAL (XEXP (newaddr, 1));
7727 if (REGNO (addr_reg) != base_regno
7728 || newoffset != offset + 4 * i)
7735 /* Return 1 for an PARALLEL suitable for stmw. */
7738 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7740 int count = XVECLEN (op, 0);
7741 unsigned int src_regno;
7743 unsigned int base_regno;
7744 HOST_WIDE_INT offset;
7747 /* Perform a quick check so we don't blow up below. */
7749 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7750 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7751 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7754 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7755 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7758 || count != 32 - (int) src_regno)
7761 if (legitimate_indirect_address_p (dest_addr, 0))
7764 base_regno = REGNO (dest_addr);
7765 if (base_regno == 0)
7768 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7770 offset = INTVAL (XEXP (dest_addr, 1));
7771 base_regno = REGNO (XEXP (dest_addr, 0));
7776 for (i = 0; i < count; i++)
7778 rtx elt = XVECEXP (op, 0, i);
7781 HOST_WIDE_INT newoffset;
7783 if (GET_CODE (elt) != SET
7784 || GET_CODE (SET_SRC (elt)) != REG
7785 || GET_MODE (SET_SRC (elt)) != SImode
7786 || REGNO (SET_SRC (elt)) != src_regno + i
7787 || GET_CODE (SET_DEST (elt)) != MEM
7788 || GET_MODE (SET_DEST (elt)) != SImode)
7790 newaddr = XEXP (SET_DEST (elt), 0);
7791 if (legitimate_indirect_address_p (newaddr, 0))
7796 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7798 addr_reg = XEXP (newaddr, 0);
7799 newoffset = INTVAL (XEXP (newaddr, 1));
7803 if (REGNO (addr_reg) != base_regno
7804 || newoffset != offset + 4 * i)
7811 /* A validation routine: say whether CODE, a condition code, and MODE
7812 match. The other alternatives either don't make sense or should
7813 never be generated. */
7816 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
7818 if (GET_RTX_CLASS (code) != '<'
7819 || GET_MODE_CLASS (mode) != MODE_CC)
7822 /* These don't make sense. */
7823 if ((code == GT || code == LT || code == GE || code == LE)
7824 && mode == CCUNSmode)
7827 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7828 && mode != CCUNSmode)
7831 if (mode != CCFPmode
7832 && (code == ORDERED || code == UNORDERED
7833 || code == UNEQ || code == LTGT
7834 || code == UNGT || code == UNLT
7835 || code == UNGE || code == UNLE))
7838 /* These should never be generated except for
7839 flag_finite_math_only. */
7840 if (mode == CCFPmode
7841 && ! flag_finite_math_only
7842 && (code == LE || code == GE
7843 || code == UNEQ || code == LTGT
7844 || code == UNGT || code == UNLT))
7847 /* These are invalid; the information is not there. */
7848 if (mode == CCEQmode
7849 && code != EQ && code != NE)
7853 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7854 We only check the opcode against the mode of the CC value here. */
7857 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7859 enum rtx_code code = GET_CODE (op);
7860 enum machine_mode cc_mode;
7862 if (GET_RTX_CLASS (code) != '<')
7865 cc_mode = GET_MODE (XEXP (op, 0));
7866 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7869 validate_condition_mode (code, cc_mode);
7874 /* Return 1 if OP is a comparison operation that is valid for a branch
7875 insn and which is true if the corresponding bit in the CC register
7879 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
7883 if (! branch_comparison_operator (op, mode))
7886 code = GET_CODE (op);
7887 return (code == EQ || code == LT || code == GT
7888 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7889 || code == LTU || code == GTU
7890 || code == UNORDERED);
7893 /* Return 1 if OP is a comparison operation that is valid for an scc
7894 insn: it must be a positive comparison. */
7897 scc_comparison_operator (rtx op, enum machine_mode mode)
7899 return branch_positive_comparison_operator (op, mode);
7903 trap_comparison_operator (rtx op, enum machine_mode mode)
7905 if (mode != VOIDmode && mode != GET_MODE (op))
7907 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7911 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7913 enum rtx_code code = GET_CODE (op);
7914 return (code == AND || code == IOR || code == XOR);
7918 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7920 enum rtx_code code = GET_CODE (op);
7921 return (code == IOR || code == XOR);
7925 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7927 enum rtx_code code = GET_CODE (op);
7928 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7931 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7932 mask required to convert the result of a rotate insn into a shift
7933 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7936 includes_lshift_p (rtx shiftop, rtx andop)
7938 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7940 shift_mask <<= INTVAL (shiftop);
7942 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7945 /* Similar, but for right shift. */
7948 includes_rshift_p (rtx shiftop, rtx andop)
7950 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7952 shift_mask >>= INTVAL (shiftop);
7954 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7957 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7958 to perform a left shift. It must have exactly SHIFTOP least
7959 significant 0's, then one or more 1's, then zero or more 0's. */
7962 includes_rldic_lshift_p (rtx shiftop, rtx andop)
7964 if (GET_CODE (andop) == CONST_INT)
7966 HOST_WIDE_INT c, lsb, shift_mask;
7969 if (c == 0 || c == ~0)
7973 shift_mask <<= INTVAL (shiftop);
7975 /* Find the least significant one bit. */
7978 /* It must coincide with the LSB of the shift mask. */
7979 if (-lsb != shift_mask)
7982 /* Invert to look for the next transition (if any). */
7985 /* Remove the low group of ones (originally low group of zeros). */
7988 /* Again find the lsb, and check we have all 1's above. */
7992 else if (GET_CODE (andop) == CONST_DOUBLE
7993 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7995 HOST_WIDE_INT low, high, lsb;
7996 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7998 low = CONST_DOUBLE_LOW (andop);
7999 if (HOST_BITS_PER_WIDE_INT < 64)
8000 high = CONST_DOUBLE_HIGH (andop);
8002 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8003 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8006 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8008 shift_mask_high = ~0;
8009 if (INTVAL (shiftop) > 32)
8010 shift_mask_high <<= INTVAL (shiftop) - 32;
8014 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8021 return high == -lsb;
8024 shift_mask_low = ~0;
8025 shift_mask_low <<= INTVAL (shiftop);
8029 if (-lsb != shift_mask_low)
8032 if (HOST_BITS_PER_WIDE_INT < 64)
8037 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8040 return high == -lsb;
8044 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8050 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8051 to perform a left shift. It must have SHIFTOP or more least
8052 significant 0's, with the remainder of the word 1's. */
8055 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8057 if (GET_CODE (andop) == CONST_INT)
8059 HOST_WIDE_INT c, lsb, shift_mask;
8062 shift_mask <<= INTVAL (shiftop);
8065 /* Find the least significant one bit. */
8068 /* It must be covered by the shift mask.
8069 This test also rejects c == 0. */
8070 if ((lsb & shift_mask) == 0)
8073 /* Check we have all 1's above the transition, and reject all 1's. */
8074 return c == -lsb && lsb != 1;
8076 else if (GET_CODE (andop) == CONST_DOUBLE
8077 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8079 HOST_WIDE_INT low, lsb, shift_mask_low;
8081 low = CONST_DOUBLE_LOW (andop);
8083 if (HOST_BITS_PER_WIDE_INT < 64)
8085 HOST_WIDE_INT high, shift_mask_high;
8087 high = CONST_DOUBLE_HIGH (andop);
8091 shift_mask_high = ~0;
8092 if (INTVAL (shiftop) > 32)
8093 shift_mask_high <<= INTVAL (shiftop) - 32;
8097 if ((lsb & shift_mask_high) == 0)
8100 return high == -lsb;
8106 shift_mask_low = ~0;
8107 shift_mask_low <<= INTVAL (shiftop);
8111 if ((lsb & shift_mask_low) == 0)
8114 return low == -lsb && lsb != 1;
8120 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8121 for lfq and stfq insns.
8123 Note reg1 and reg2 *must* be hard registers. To be sure we will
8124 abort if we are passed pseudo registers. */
8127 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8129 /* We might have been passed a SUBREG. */
8130 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8133 return (REGNO (reg1) == REGNO (reg2) - 1);
8136 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8137 addr1 and addr2 must be in consecutive memory locations
8138 (addr2 == addr1 + 8). */
8141 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8146 /* Extract an offset (if used) from the first addr. */
8147 if (GET_CODE (addr1) == PLUS)
8149 /* If not a REG, return zero. */
8150 if (GET_CODE (XEXP (addr1, 0)) != REG)
8154 reg1 = REGNO (XEXP (addr1, 0));
8155 /* The offset must be constant! */
8156 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8158 offset1 = INTVAL (XEXP (addr1, 1));
8161 else if (GET_CODE (addr1) != REG)
8165 reg1 = REGNO (addr1);
8166 /* This was a simple (mem (reg)) expression. Offset is 0. */
8170 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8171 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8172 register as addr1. */
8173 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8175 if (GET_CODE (addr2) != PLUS)
8178 if (GET_CODE (XEXP (addr2, 0)) != REG
8179 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8182 if (reg1 != REGNO (XEXP (addr2, 0)))
8185 /* The offset for the second addr must be 8 more than the first addr. */
8186 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8189 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8194 /* Return the register class of a scratch register needed to copy IN into
8195 or out of a register in CLASS in MODE. If it can be done directly,
8196 NO_REGS is returned. */
8199 secondary_reload_class (enum reg_class class,
8200 enum machine_mode mode ATTRIBUTE_UNUSED, rtx in)
8204 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8206 && MACHOPIC_INDIRECT
8210 /* We cannot copy a symbolic operand directly into anything
8211 other than BASE_REGS for TARGET_ELF. So indicate that a
8212 register from BASE_REGS is needed as an intermediate
8215 On Darwin, pic addresses require a load from memory, which
8216 needs a base register. */
8217 if (class != BASE_REGS
8218 && (GET_CODE (in) == SYMBOL_REF
8219 || GET_CODE (in) == HIGH
8220 || GET_CODE (in) == LABEL_REF
8221 || GET_CODE (in) == CONST))
8225 if (GET_CODE (in) == REG)
8228 if (regno >= FIRST_PSEUDO_REGISTER)
8230 regno = true_regnum (in);
8231 if (regno >= FIRST_PSEUDO_REGISTER)
8235 else if (GET_CODE (in) == SUBREG)
8237 regno = true_regnum (in);
8238 if (regno >= FIRST_PSEUDO_REGISTER)
8244 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8246 if (class == GENERAL_REGS || class == BASE_REGS
8247 || (regno >= 0 && INT_REGNO_P (regno)))
8250 /* Constants, memory, and FP registers can go into FP registers. */
8251 if ((regno == -1 || FP_REGNO_P (regno))
8252 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8255 /* Memory, and AltiVec registers can go into AltiVec registers. */
8256 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8257 && class == ALTIVEC_REGS)
8260 /* We can copy among the CR registers. */
8261 if ((class == CR_REGS || class == CR0_REGS)
8262 && regno >= 0 && CR_REGNO_P (regno))
8265 /* Otherwise, we need GENERAL_REGS. */
8266 return GENERAL_REGS;
8269 /* Given a comparison operation, return the bit number in CCR to test. We
8270 know this is a valid comparison.
8272 SCC_P is 1 if this is for an scc. That means that %D will have been
8273 used instead of %C, so the bits will be in different places.
8275 Return -1 if OP isn't a valid comparison for some reason. */
8278 ccr_bit (rtx op, int scc_p)
8280 enum rtx_code code = GET_CODE (op);
8281 enum machine_mode cc_mode;
8286 if (GET_RTX_CLASS (code) != '<')
8291 if (GET_CODE (reg) != REG
8292 || ! CR_REGNO_P (REGNO (reg)))
8295 cc_mode = GET_MODE (reg);
8296 cc_regnum = REGNO (reg);
8297 base_bit = 4 * (cc_regnum - CR0_REGNO);
8299 validate_condition_mode (code, cc_mode);
8301 /* When generating a sCOND operation, only positive conditions are
8303 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8304 && code != GTU && code != LTU)
8310 if (TARGET_E500 && !TARGET_FPRS
8311 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8312 return base_bit + 1;
8313 return scc_p ? base_bit + 3 : base_bit + 2;
8315 if (TARGET_E500 && !TARGET_FPRS
8316 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8317 return base_bit + 1;
8318 return base_bit + 2;
8319 case GT: case GTU: case UNLE:
8320 return base_bit + 1;
8321 case LT: case LTU: case UNGE:
8323 case ORDERED: case UNORDERED:
8324 return base_bit + 3;
8327 /* If scc, we will have done a cror to put the bit in the
8328 unordered position. So test that bit. For integer, this is ! LT
8329 unless this is an scc insn. */
8330 return scc_p ? base_bit + 3 : base_bit;
8333 return scc_p ? base_bit + 3 : base_bit + 1;
8340 /* Return the GOT register. */
8343 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8345 /* The second flow pass currently (June 1999) can't update
8346 regs_ever_live without disturbing other parts of the compiler, so
8347 update it here to make the prolog/epilogue code happy. */
8348 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8349 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8351 current_function_uses_pic_offset_table = 1;
8353 return pic_offset_table_rtx;
8356 /* Function to init struct machine_function.
8357 This will be called, via a pointer variable,
8358 from push_function_context. */
8360 static struct machine_function *
8361 rs6000_init_machine_status (void)
8363 return ggc_alloc_cleared (sizeof (machine_function));
8366 /* These macros test for integers and extract the low-order bits. */
8368 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8369 && GET_MODE (X) == VOIDmode)
8371 #define INT_LOWPART(X) \
8372 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8378 unsigned long val = INT_LOWPART (op);
8380 /* If the high bit is zero, the value is the first 1 bit we find
8382 if ((val & 0x80000000) == 0)
8384 if ((val & 0xffffffff) == 0)
8388 while (((val <<= 1) & 0x80000000) == 0)
8393 /* If the high bit is set and the low bit is not, or the mask is all
8394 1's, the value is zero. */
8395 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8398 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8401 while (((val >>= 1) & 1) != 0)
8411 unsigned long val = INT_LOWPART (op);
8413 /* If the low bit is zero, the value is the first 1 bit we find from
8417 if ((val & 0xffffffff) == 0)
8421 while (((val >>= 1) & 1) == 0)
8427 /* If the low bit is set and the high bit is not, or the mask is all
8428 1's, the value is 31. */
8429 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8432 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8435 while (((val <<= 1) & 0x80000000) != 0)
8441 /* Locate some local-dynamic symbol still in use by this function
8442 so that we can print its name in some tls_ld pattern. */
8445 rs6000_get_some_local_dynamic_name (void)
8449 if (cfun->machine->some_ld_name)
8450 return cfun->machine->some_ld_name;
8452 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8454 && for_each_rtx (&PATTERN (insn),
8455 rs6000_get_some_local_dynamic_name_1, 0))
8456 return cfun->machine->some_ld_name;
8461 /* Helper function for rs6000_get_some_local_dynamic_name. */
8464 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8468 if (GET_CODE (x) == SYMBOL_REF)
8470 const char *str = XSTR (x, 0);
8471 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8473 cfun->machine->some_ld_name = str;
8481 /* Print an operand. Recognize special options, documented below. */
8484 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8485 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8487 #define SMALL_DATA_RELOC "sda21"
8488 #define SMALL_DATA_REG 0
8492 print_operand (FILE *file, rtx x, int code)
8496 unsigned HOST_WIDE_INT uval;
8501 /* Write out an instruction after the call which may be replaced
8502 with glue code by the loader. This depends on the AIX version. */
8503 asm_fprintf (file, RS6000_CALL_GLUE);
8506 /* %a is output_address. */
8509 /* If X is a constant integer whose low-order 5 bits are zero,
8510 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8511 in the AIX assembler where "sri" with a zero shift count
8512 writes a trash instruction. */
8513 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8520 /* If constant, low-order 16 bits of constant, unsigned.
8521 Otherwise, write normally. */
8523 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8525 print_operand (file, x, 0);
8529 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8530 for 64-bit mask direction. */
8531 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8534 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8538 /* X is a CR register. Print the number of the EQ bit of the CR */
8539 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8540 output_operand_lossage ("invalid %%E value");
8542 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8546 /* X is a CR register. Print the shift count needed to move it
8547 to the high-order four bits. */
8548 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8549 output_operand_lossage ("invalid %%f value");
8551 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8555 /* Similar, but print the count for the rotate in the opposite
8557 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8558 output_operand_lossage ("invalid %%F value");
8560 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8564 /* X is a constant integer. If it is negative, print "m",
8565 otherwise print "z". This is to make an aze or ame insn. */
8566 if (GET_CODE (x) != CONST_INT)
8567 output_operand_lossage ("invalid %%G value");
8568 else if (INTVAL (x) >= 0)
8575 /* If constant, output low-order five bits. Otherwise, write
8578 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8580 print_operand (file, x, 0);
8584 /* If constant, output low-order six bits. Otherwise, write
8587 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8589 print_operand (file, x, 0);
8593 /* Print `i' if this is a constant, else nothing. */
8599 /* Write the bit number in CCR for jump. */
8602 output_operand_lossage ("invalid %%j code");
8604 fprintf (file, "%d", i);
8608 /* Similar, but add one for shift count in rlinm for scc and pass
8609 scc flag to `ccr_bit'. */
8612 output_operand_lossage ("invalid %%J code");
8614 /* If we want bit 31, write a shift count of zero, not 32. */
8615 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8619 /* X must be a constant. Write the 1's complement of the
8622 output_operand_lossage ("invalid %%k value");
8624 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8628 /* X must be a symbolic constant on ELF. Write an
8629 expression suitable for an 'addi' that adds in the low 16
8631 if (GET_CODE (x) != CONST)
8633 print_operand_address (file, x);
8638 if (GET_CODE (XEXP (x, 0)) != PLUS
8639 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8640 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8641 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8642 output_operand_lossage ("invalid %%K value");
8643 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8645 /* For GNU as, there must be a non-alphanumeric character
8646 between 'l' and the number. The '-' is added by
8647 print_operand() already. */
8648 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8650 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8654 /* %l is output_asm_label. */
8657 /* Write second word of DImode or DFmode reference. Works on register
8658 or non-indexed memory only. */
8659 if (GET_CODE (x) == REG)
8660 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8661 else if (GET_CODE (x) == MEM)
8663 /* Handle possible auto-increment. Since it is pre-increment and
8664 we have already done it, we can just use an offset of word. */
8665 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8666 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8667 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8670 output_address (XEXP (adjust_address_nv (x, SImode,
8674 if (small_data_operand (x, GET_MODE (x)))
8675 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8676 reg_names[SMALL_DATA_REG]);
8681 /* MB value for a mask operand. */
8682 if (! mask_operand (x, SImode))
8683 output_operand_lossage ("invalid %%m value");
8685 fprintf (file, "%d", extract_MB (x));
8689 /* ME value for a mask operand. */
8690 if (! mask_operand (x, SImode))
8691 output_operand_lossage ("invalid %%M value");
8693 fprintf (file, "%d", extract_ME (x));
8696 /* %n outputs the negative of its operand. */
8699 /* Write the number of elements in the vector times 4. */
8700 if (GET_CODE (x) != PARALLEL)
8701 output_operand_lossage ("invalid %%N value");
8703 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8707 /* Similar, but subtract 1 first. */
8708 if (GET_CODE (x) != PARALLEL)
8709 output_operand_lossage ("invalid %%O value");
8711 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8715 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8717 || INT_LOWPART (x) < 0
8718 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8719 output_operand_lossage ("invalid %%p value");
8721 fprintf (file, "%d", i);
8725 /* The operand must be an indirect memory reference. The result
8726 is the register number. */
8727 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8728 || REGNO (XEXP (x, 0)) >= 32)
8729 output_operand_lossage ("invalid %%P value");
8731 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8735 /* This outputs the logical code corresponding to a boolean
8736 expression. The expression may have one or both operands
8737 negated (if one, only the first one). For condition register
8738 logical operations, it will also treat the negated
8739 CR codes as NOTs, but not handle NOTs of them. */
8741 const char *const *t = 0;
8743 enum rtx_code code = GET_CODE (x);
8744 static const char * const tbl[3][3] = {
8745 { "and", "andc", "nor" },
8746 { "or", "orc", "nand" },
8747 { "xor", "eqv", "xor" } };
8751 else if (code == IOR)
8753 else if (code == XOR)
8756 output_operand_lossage ("invalid %%q value");
8758 if (GET_CODE (XEXP (x, 0)) != NOT)
8762 if (GET_CODE (XEXP (x, 1)) == NOT)
8780 /* X is a CR register. Print the mask for `mtcrf'. */
8781 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8782 output_operand_lossage ("invalid %%R value");
8784 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8788 /* Low 5 bits of 32 - value */
8790 output_operand_lossage ("invalid %%s value");
8792 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8796 /* PowerPC64 mask position. All 0's is excluded.
8797 CONST_INT 32-bit mask is considered sign-extended so any
8798 transition must occur within the CONST_INT, not on the boundary. */
8799 if (! mask64_operand (x, DImode))
8800 output_operand_lossage ("invalid %%S value");
8802 uval = INT_LOWPART (x);
8804 if (uval & 1) /* Clear Left */
8806 #if HOST_BITS_PER_WIDE_INT > 64
8807 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8811 else /* Clear Right */
8814 #if HOST_BITS_PER_WIDE_INT > 64
8815 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8823 fprintf (file, "%d", i);
8827 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8828 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8831 /* Bit 3 is OV bit. */
8832 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8834 /* If we want bit 31, write a shift count of zero, not 32. */
8835 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8839 /* Print the symbolic name of a branch target register. */
8840 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8841 && REGNO (x) != COUNT_REGISTER_REGNUM))
8842 output_operand_lossage ("invalid %%T value");
8843 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8844 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8846 fputs ("ctr", file);
8850 /* High-order 16 bits of constant for use in unsigned operand. */
8852 output_operand_lossage ("invalid %%u value");
8854 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8855 (INT_LOWPART (x) >> 16) & 0xffff);
8859 /* High-order 16 bits of constant for use in signed operand. */
8861 output_operand_lossage ("invalid %%v value");
8863 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8864 (INT_LOWPART (x) >> 16) & 0xffff);
8868 /* Print `u' if this has an auto-increment or auto-decrement. */
8869 if (GET_CODE (x) == MEM
8870 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8871 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8876 /* Print the trap code for this operand. */
8877 switch (GET_CODE (x))
8880 fputs ("eq", file); /* 4 */
8883 fputs ("ne", file); /* 24 */
8886 fputs ("lt", file); /* 16 */
8889 fputs ("le", file); /* 20 */
8892 fputs ("gt", file); /* 8 */
8895 fputs ("ge", file); /* 12 */
8898 fputs ("llt", file); /* 2 */
8901 fputs ("lle", file); /* 6 */
8904 fputs ("lgt", file); /* 1 */
8907 fputs ("lge", file); /* 5 */
8915 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8918 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8919 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8921 print_operand (file, x, 0);
8925 /* MB value for a PowerPC64 rldic operand. */
8926 val = (GET_CODE (x) == CONST_INT
8927 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8932 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8933 if ((val <<= 1) < 0)
8936 #if HOST_BITS_PER_WIDE_INT == 32
8937 if (GET_CODE (x) == CONST_INT && i >= 0)
8938 i += 32; /* zero-extend high-part was all 0's */
8939 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8941 val = CONST_DOUBLE_LOW (x);
8948 for ( ; i < 64; i++)
8949 if ((val <<= 1) < 0)
8954 fprintf (file, "%d", i + 1);
8958 if (GET_CODE (x) == MEM
8959 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8964 /* Like 'L', for third word of TImode */
8965 if (GET_CODE (x) == REG)
8966 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8967 else if (GET_CODE (x) == MEM)
8969 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8970 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8971 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8973 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8974 if (small_data_operand (x, GET_MODE (x)))
8975 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8976 reg_names[SMALL_DATA_REG]);
8981 /* X is a SYMBOL_REF. Write out the name preceded by a
8982 period and without any trailing data in brackets. Used for function
8983 names. If we are configured for System V (or the embedded ABI) on
8984 the PowerPC, do not emit the period, since those systems do not use
8985 TOCs and the like. */
8986 if (GET_CODE (x) != SYMBOL_REF)
8989 if (XSTR (x, 0)[0] != '.')
8991 switch (DEFAULT_ABI)
9006 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9008 assemble_name (file, XSTR (x, 0));
9012 /* Like 'L', for last word of TImode. */
9013 if (GET_CODE (x) == REG)
9014 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9015 else if (GET_CODE (x) == MEM)
9017 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9018 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9019 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9021 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9022 if (small_data_operand (x, GET_MODE (x)))
9023 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9024 reg_names[SMALL_DATA_REG]);
9028 /* Print AltiVec or SPE memory operand. */
9033 if (GET_CODE (x) != MEM)
9041 if (GET_CODE (tmp) == REG)
9043 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9046 /* Handle [reg+UIMM]. */
9047 else if (GET_CODE (tmp) == PLUS &&
9048 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9052 if (GET_CODE (XEXP (tmp, 0)) != REG)
9055 x = INTVAL (XEXP (tmp, 1));
9056 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9060 /* Fall through. Must be [reg+reg]. */
9062 if (GET_CODE (tmp) == REG)
9063 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9064 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9066 if (REGNO (XEXP (tmp, 0)) == 0)
9067 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9068 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9070 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9071 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9079 if (GET_CODE (x) == REG)
9080 fprintf (file, "%s", reg_names[REGNO (x)]);
9081 else if (GET_CODE (x) == MEM)
9083 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9084 know the width from the mode. */
9085 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9086 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9087 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9088 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9089 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9090 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9092 output_address (XEXP (x, 0));
9095 output_addr_const (file, x);
9099 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9103 output_operand_lossage ("invalid %%xn code");
9107 /* Print the address of an operand. */
9110 print_operand_address (FILE *file, rtx x)
9112 if (GET_CODE (x) == REG)
9113 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9114 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9115 || GET_CODE (x) == LABEL_REF)
9117 output_addr_const (file, x);
9118 if (small_data_operand (x, GET_MODE (x)))
9119 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9120 reg_names[SMALL_DATA_REG]);
9121 else if (TARGET_TOC)
9124 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9126 if (REGNO (XEXP (x, 0)) == 0)
9127 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9128 reg_names[ REGNO (XEXP (x, 0)) ]);
9130 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9131 reg_names[ REGNO (XEXP (x, 1)) ]);
9133 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9134 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9135 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9137 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9138 && CONSTANT_P (XEXP (x, 1)))
9140 output_addr_const (file, XEXP (x, 1));
9141 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9145 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9146 && CONSTANT_P (XEXP (x, 1)))
9148 fprintf (file, "lo16(");
9149 output_addr_const (file, XEXP (x, 1));
9150 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9153 else if (legitimate_constant_pool_address_p (x))
9155 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9157 rtx contains_minus = XEXP (x, 1);
9161 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9162 turn it into (sym) for output_addr_const. */
9163 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9164 contains_minus = XEXP (contains_minus, 0);
9166 minus = XEXP (contains_minus, 0);
9167 symref = XEXP (minus, 0);
9168 XEXP (contains_minus, 0) = symref;
9173 name = XSTR (symref, 0);
9174 newname = alloca (strlen (name) + sizeof ("@toc"));
9175 strcpy (newname, name);
9176 strcat (newname, "@toc");
9177 XSTR (symref, 0) = newname;
9179 output_addr_const (file, XEXP (x, 1));
9181 XSTR (symref, 0) = name;
9182 XEXP (contains_minus, 0) = minus;
9185 output_addr_const (file, XEXP (x, 1));
9187 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9193 /* Target hook for assembling integer objects. The PowerPC version has
9194 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9195 is defined. It also needs to handle DI-mode objects on 64-bit
9199 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9201 #ifdef RELOCATABLE_NEEDS_FIXUP
9202 /* Special handling for SI values. */
9203 if (size == 4 && aligned_p)
9205 extern int in_toc_section (void);
9206 static int recurse = 0;
9208 /* For -mrelocatable, we mark all addresses that need to be fixed up
9209 in the .fixup section. */
9210 if (TARGET_RELOCATABLE
9211 && !in_toc_section ()
9212 && !in_text_section ()
9214 && GET_CODE (x) != CONST_INT
9215 && GET_CODE (x) != CONST_DOUBLE
9221 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9223 ASM_OUTPUT_LABEL (asm_out_file, buf);
9224 fprintf (asm_out_file, "\t.long\t(");
9225 output_addr_const (asm_out_file, x);
9226 fprintf (asm_out_file, ")@fixup\n");
9227 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9228 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9229 fprintf (asm_out_file, "\t.long\t");
9230 assemble_name (asm_out_file, buf);
9231 fprintf (asm_out_file, "\n\t.previous\n");
9235 /* Remove initial .'s to turn a -mcall-aixdesc function
9236 address into the address of the descriptor, not the function
9238 else if (GET_CODE (x) == SYMBOL_REF
9239 && XSTR (x, 0)[0] == '.'
9240 && DEFAULT_ABI == ABI_AIX)
9242 const char *name = XSTR (x, 0);
9243 while (*name == '.')
9246 fprintf (asm_out_file, "\t.long\t%s\n", name);
9250 #endif /* RELOCATABLE_NEEDS_FIXUP */
9251 return default_assemble_integer (x, size, aligned_p);
9254 #ifdef HAVE_GAS_HIDDEN
9255 /* Emit an assembler directive to set symbol visibility for DECL to
9259 rs6000_assemble_visibility (tree decl, int vis)
9261 /* Functions need to have their entry point symbol visibility set as
9262 well as their descriptor symbol visibility. */
9263 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9265 static const char * const visibility_types[] = {
9266 NULL, "internal", "hidden", "protected"
9269 const char *name, *type;
9271 name = ((* targetm.strip_name_encoding)
9272 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9273 type = visibility_types[vis];
9275 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9276 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9279 default_assemble_visibility (decl, vis);
9284 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9286 /* Reversal of FP compares takes care -- an ordered compare
9287 becomes an unordered compare and vice versa. */
9288 if (mode == CCFPmode
9289 && (!flag_finite_math_only
9290 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9291 || code == UNEQ || code == LTGT))
9292 return reverse_condition_maybe_unordered (code);
9294 return reverse_condition (code);
9297 /* Generate a compare for CODE. Return a brand-new rtx that
9298 represents the result of the compare. */
9301 rs6000_generate_compare (enum rtx_code code)
9303 enum machine_mode comp_mode;
9306 if (rs6000_compare_fp_p)
9307 comp_mode = CCFPmode;
9308 else if (code == GTU || code == LTU
9309 || code == GEU || code == LEU)
9310 comp_mode = CCUNSmode;
9314 /* First, the compare. */
9315 compare_result = gen_reg_rtx (comp_mode);
9317 /* SPE FP compare instructions on the GPRs. Yuck! */
9318 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9319 && rs6000_compare_fp_p)
9321 rtx cmp, or1, or2, or_result, compare_result2;
9329 cmp = flag_finite_math_only
9330 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9332 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9333 rs6000_compare_op1);
9341 cmp = flag_finite_math_only
9342 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9344 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9345 rs6000_compare_op1);
9353 cmp = flag_finite_math_only
9354 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9356 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9357 rs6000_compare_op1);
9363 /* Synthesize LE and GE from LT/GT || EQ. */
9364 if (code == LE || code == GE || code == LEU || code == GEU)
9366 /* Synthesize GE/LE frome GT/LT || EQ. */
9372 case LE: code = LT; break;
9373 case GE: code = GT; break;
9374 case LEU: code = LT; break;
9375 case GEU: code = GT; break;
9379 or1 = gen_reg_rtx (SImode);
9380 or2 = gen_reg_rtx (SImode);
9381 or_result = gen_reg_rtx (CCEQmode);
9382 compare_result2 = gen_reg_rtx (CCFPmode);
9385 cmp = flag_finite_math_only
9386 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9388 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9389 rs6000_compare_op1);
9392 /* The MC8540 FP compare instructions set the CR bits
9393 differently than other PPC compare instructions. For
9394 that matter, there is no generic test instruction, but a
9395 testgt, testlt, and testeq. For a true condition, bit 2
9396 is set (x1xx) in the CR. Following the traditional CR
9402 ... bit 2 would be a GT CR alias, so later on we
9403 look in the GT bits for the branch instructions.
9404 However, we must be careful to emit correct RTL in
9405 the meantime, so optimizations don't get confused. */
9407 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9408 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9410 /* OR them together. */
9411 cmp = gen_rtx_SET (VOIDmode, or_result,
9412 gen_rtx_COMPARE (CCEQmode,
9413 gen_rtx_IOR (SImode, or1, or2),
9415 compare_result = or_result;
9420 /* We only care about 1 bit (x1xx), so map everything to NE to
9421 maintain rtl sanity. We'll get to the right bit (x1xx) at
9422 code output time. */
9423 if (code == NE || code == LTGT)
9424 /* Do the inverse here because we have no cmpne
9425 instruction. We use the cmpeq instruction and expect
9426 to get a 0 instead. */
9435 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9436 gen_rtx_COMPARE (comp_mode,
9438 rs6000_compare_op1)));
9440 /* Some kinds of FP comparisons need an OR operation;
9441 under flag_finite_math_only we don't bother. */
9442 if (rs6000_compare_fp_p
9443 && ! flag_finite_math_only
9444 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9445 && (code == LE || code == GE
9446 || code == UNEQ || code == LTGT
9447 || code == UNGT || code == UNLT))
9449 enum rtx_code or1, or2;
9450 rtx or1_rtx, or2_rtx, compare2_rtx;
9451 rtx or_result = gen_reg_rtx (CCEQmode);
9455 case LE: or1 = LT; or2 = EQ; break;
9456 case GE: or1 = GT; or2 = EQ; break;
9457 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9458 case LTGT: or1 = LT; or2 = GT; break;
9459 case UNGT: or1 = UNORDERED; or2 = GT; break;
9460 case UNLT: or1 = UNORDERED; or2 = LT; break;
9463 validate_condition_mode (or1, comp_mode);
9464 validate_condition_mode (or2, comp_mode);
9465 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9466 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9467 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9468 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9470 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9472 compare_result = or_result;
9476 validate_condition_mode (code, GET_MODE (compare_result));
9478 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9482 /* Emit the RTL for an sCOND pattern. */
9485 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9488 enum machine_mode op_mode;
9489 enum rtx_code cond_code;
9491 condition_rtx = rs6000_generate_compare (code);
9492 cond_code = GET_CODE (condition_rtx);
9495 || cond_code == GE || cond_code == LE
9496 || cond_code == GEU || cond_code == LEU
9497 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9499 rtx not_result = gen_reg_rtx (CCEQmode);
9500 rtx not_op, rev_cond_rtx;
9501 enum machine_mode cc_mode;
9503 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9505 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9506 SImode, XEXP (condition_rtx, 0), const0_rtx);
9507 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9508 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9509 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9512 op_mode = GET_MODE (rs6000_compare_op0);
9513 if (op_mode == VOIDmode)
9514 op_mode = GET_MODE (rs6000_compare_op1);
9516 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9518 PUT_MODE (condition_rtx, DImode);
9519 convert_move (result, condition_rtx, 0);
9523 PUT_MODE (condition_rtx, SImode);
9524 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9528 /* Emit a branch of kind CODE to location LOC. */
9531 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9533 rtx condition_rtx, loc_ref;
9535 condition_rtx = rs6000_generate_compare (code);
9536 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9537 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9538 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9542 /* Return the string to output a conditional branch to LABEL, which is
9543 the operand number of the label, or -1 if the branch is really a
9546 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9547 condition code register and its mode specifies what kind of
9550 REVERSED is nonzero if we should reverse the sense of the comparison.
9552 INSN is the insn. */
9555 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9557 static char string[64];
9558 enum rtx_code code = GET_CODE (op);
9559 rtx cc_reg = XEXP (op, 0);
9560 enum machine_mode mode = GET_MODE (cc_reg);
9561 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9562 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9563 int really_reversed = reversed ^ need_longbranch;
9569 validate_condition_mode (code, mode);
9571 /* Work out which way this really branches. We could use
9572 reverse_condition_maybe_unordered here always but this
9573 makes the resulting assembler clearer. */
9574 if (really_reversed)
9576 /* Reversal of FP compares takes care -- an ordered compare
9577 becomes an unordered compare and vice versa. */
9578 if (mode == CCFPmode)
9579 code = reverse_condition_maybe_unordered (code);
9581 code = reverse_condition (code);
9584 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9586 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9589 /* Opposite of GT. */
9591 else if (code == NE)
9599 /* Not all of these are actually distinct opcodes, but
9600 we distinguish them for clarity of the resulting assembler. */
9602 ccode = "ne"; break;
9604 ccode = "eq"; break;
9606 ccode = "ge"; break;
9607 case GT: case GTU: case UNGT:
9608 ccode = "gt"; break;
9610 ccode = "le"; break;
9611 case LT: case LTU: case UNLT:
9612 ccode = "lt"; break;
9613 case UNORDERED: ccode = "un"; break;
9614 case ORDERED: ccode = "nu"; break;
9615 case UNGE: ccode = "nl"; break;
9616 case UNLE: ccode = "ng"; break;
9621 /* Maybe we have a guess as to how likely the branch is.
9622 The old mnemonics don't have a way to specify this information. */
9624 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9625 if (note != NULL_RTX)
9627 /* PROB is the difference from 50%. */
9628 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9629 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9631 /* Only hint for highly probable/improbable branches on newer
9632 cpus as static prediction overrides processor dynamic
9633 prediction. For older cpus we may as well always hint, but
9634 assume not taken for branches that are very close to 50% as a
9635 mispredicted taken branch is more expensive than a
9636 mispredicted not-taken branch. */
9638 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9640 if (abs (prob) > REG_BR_PROB_BASE / 20
9641 && ((prob > 0) ^ need_longbranch))
9649 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9651 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9653 /* We need to escape any '%' characters in the reg_names string.
9654 Assume they'd only be the first character... */
9655 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9657 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9661 /* If the branch distance was too far, we may have to use an
9662 unconditional branch to go the distance. */
9663 if (need_longbranch)
9664 s += sprintf (s, ",$+8\n\tb %s", label);
9666 s += sprintf (s, ",%s", label);
9672 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9673 operands of the last comparison is nonzero/true, FALSE_COND if it
9674 is zero/false. Return 0 if the hardware has no such operation. */
9677 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9679 enum rtx_code code = GET_CODE (op);
9680 rtx op0 = rs6000_compare_op0;
9681 rtx op1 = rs6000_compare_op1;
9683 enum machine_mode compare_mode = GET_MODE (op0);
9684 enum machine_mode result_mode = GET_MODE (dest);
9687 /* These modes should always match. */
9688 if (GET_MODE (op1) != compare_mode
9689 /* In the isel case however, we can use a compare immediate, so
9690 op1 may be a small constant. */
9691 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9693 if (GET_MODE (true_cond) != result_mode)
9695 if (GET_MODE (false_cond) != result_mode)
9698 /* First, work out if the hardware can do this at all, or
9699 if it's too slow... */
9700 if (! rs6000_compare_fp_p)
9703 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9707 /* Eliminate half of the comparisons by switching operands, this
9708 makes the remaining code simpler. */
9709 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9710 || code == LTGT || code == LT || code == UNLE)
9712 code = reverse_condition_maybe_unordered (code);
9714 true_cond = false_cond;
9718 /* UNEQ and LTGT take four instructions for a comparison with zero,
9719 it'll probably be faster to use a branch here too. */
9720 if (code == UNEQ && HONOR_NANS (compare_mode))
9723 if (GET_CODE (op1) == CONST_DOUBLE)
9724 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9726 /* We're going to try to implement comparisons by performing
9727 a subtract, then comparing against zero. Unfortunately,
9728 Inf - Inf is NaN which is not zero, and so if we don't
9729 know that the operand is finite and the comparison
9730 would treat EQ different to UNORDERED, we can't do it. */
9731 if (HONOR_INFINITIES (compare_mode)
9732 && code != GT && code != UNGE
9733 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9734 /* Constructs of the form (a OP b ? a : b) are safe. */
9735 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9736 || (! rtx_equal_p (op0, true_cond)
9737 && ! rtx_equal_p (op1, true_cond))))
9739 /* At this point we know we can use fsel. */
9741 /* Reduce the comparison to a comparison against zero. */
9742 temp = gen_reg_rtx (compare_mode);
9743 emit_insn (gen_rtx_SET (VOIDmode, temp,
9744 gen_rtx_MINUS (compare_mode, op0, op1)));
9746 op1 = CONST0_RTX (compare_mode);
9748 /* If we don't care about NaNs we can reduce some of the comparisons
9749 down to faster ones. */
9750 if (! HONOR_NANS (compare_mode))
9756 true_cond = false_cond;
9769 /* Now, reduce everything down to a GE. */
9776 temp = gen_reg_rtx (compare_mode);
9777 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9782 temp = gen_reg_rtx (compare_mode);
9783 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9788 temp = gen_reg_rtx (compare_mode);
9789 emit_insn (gen_rtx_SET (VOIDmode, temp,
9790 gen_rtx_NEG (compare_mode,
9791 gen_rtx_ABS (compare_mode, op0))));
9796 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9797 temp = gen_reg_rtx (result_mode);
9798 emit_insn (gen_rtx_SET (VOIDmode, temp,
9799 gen_rtx_IF_THEN_ELSE (result_mode,
9800 gen_rtx_GE (VOIDmode,
9802 true_cond, false_cond)));
9803 false_cond = true_cond;
9806 temp = gen_reg_rtx (compare_mode);
9807 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9812 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9813 temp = gen_reg_rtx (result_mode);
9814 emit_insn (gen_rtx_SET (VOIDmode, temp,
9815 gen_rtx_IF_THEN_ELSE (result_mode,
9816 gen_rtx_GE (VOIDmode,
9818 true_cond, false_cond)));
9819 true_cond = false_cond;
9822 temp = gen_reg_rtx (compare_mode);
9823 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9831 emit_insn (gen_rtx_SET (VOIDmode, dest,
9832 gen_rtx_IF_THEN_ELSE (result_mode,
9833 gen_rtx_GE (VOIDmode,
9835 true_cond, false_cond)));
9839 /* Same as above, but for ints (isel). */
9842 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9844 rtx condition_rtx, cr;
9846 /* All isel implementations thus far are 32-bits. */
9847 if (GET_MODE (rs6000_compare_op0) != SImode)
9850 /* We still have to do the compare, because isel doesn't do a
9851 compare, it just looks at the CRx bits set by a previous compare
9853 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9854 cr = XEXP (condition_rtx, 0);
9856 if (GET_MODE (cr) == CCmode)
9857 emit_insn (gen_isel_signed (dest, condition_rtx,
9858 true_cond, false_cond, cr));
9860 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9861 true_cond, false_cond, cr));
9867 output_isel (rtx *operands)
9871 code = GET_CODE (operands[1]);
9872 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9874 PUT_CODE (operands[1], reverse_condition (code));
9875 return "isel %0,%3,%2,%j1";
9878 return "isel %0,%2,%3,%j1";
9882 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
9884 enum machine_mode mode = GET_MODE (op0);
9888 if (code == SMAX || code == SMIN)
9893 if (code == SMAX || code == UMAX)
9894 target = emit_conditional_move (dest, c, op0, op1, mode,
9897 target = emit_conditional_move (dest, c, op0, op1, mode,
9899 if (target == NULL_RTX)
9902 emit_move_insn (dest, target);
9905 /* Called by splitter for multireg moves.
9907 operands[0] : Destination of move
9908 operands[1] : Source of move
9911 operands[2-n] : Destination slots
9912 operands[n-m] : Source slots
9913 where n = 2 + HARD_REGNO_NREGS (reg, GET_MODE (operands[0]))
9914 m = 2 + 2 * HARD_REGNO_NREGS (reg, GET_MODE (operands[0])) - 1
9916 Splits the move of operands[1] to operands[0].
9917 This is done, if GPRs are one of the operands. In this case
9918 a sequence of simple move insns has to be issued. The sequence of these
9919 move insns has to be done in correct order to avoid early clobber of the
9920 base register or destructive overlap of registers.
9924 rs6000_split_multireg_move (rtx *operands)
9926 int nregs, reg, i, j, used_update = 0;
9927 enum machine_mode mode;
9928 rtx dst = operands[0];
9929 rtx src = operands[1];
9932 /* Calculate number to move (2/4 for 32/64 bit mode). */
9934 reg = REG_P (operands[0]) ? REGNO (operands[0]) : REGNO (operands[1]);
9935 mode = GET_MODE (operands[0]);
9936 nregs = HARD_REGNO_NREGS (reg, mode);
9938 if (REG_P (operands[1])
9939 && REG_P (operands[0])
9940 && (REGNO (operands[1]) < REGNO (operands[0])))
9942 /* Move register range backwards, if we have destructive overlap. */
9945 for (i = 0; i < nregs; i++)
9948 operands[i+2] = operand_subword (operands[0], j, 0, mode);
9949 operands[i+2+nregs] =
9950 operand_subword (operands[1], j, 0, mode);
9957 if (GET_CODE (operands[1]) == MEM)
9961 if (GET_CODE (XEXP (operands[1], 0)) == PRE_INC
9962 || GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
9965 breg = XEXP (XEXP (operands[1], 0), 0);
9966 delta_rtx = GET_CODE (XEXP (operands[1], 0)) == PRE_INC
9967 ? GEN_INT (GET_MODE_SIZE (GET_MODE (operands[1])))
9968 : GEN_INT (-GET_MODE_SIZE (GET_MODE (operands[1])));
9969 insn = emit_insn (TARGET_32BIT
9970 ? gen_addsi3 (breg, breg, delta_rtx)
9971 : gen_adddi3 (breg, breg, delta_rtx));
9972 src = gen_rtx_MEM (mode, breg);
9975 /* We have now address involving an base register only.
9976 If we use one of the registers to address memory,
9977 we have change that register last. */
9979 breg = (GET_CODE (XEXP (src, 0)) == PLUS
9980 ? XEXP (XEXP (src, 0), 0)
9986 if (REGNO (breg) >= REGNO (dst)
9987 && REGNO (breg) < REGNO (dst) + nregs)
9988 j = REGNO (breg) - REGNO (dst);
9991 if (GET_CODE (operands[0]) == MEM)
9995 if (GET_CODE (XEXP (operands[0], 0)) == PRE_INC
9996 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
9999 breg = XEXP (XEXP (operands[0], 0), 0);
10000 delta_rtx = GET_CODE (XEXP (operands[0], 0)) == PRE_INC
10001 ? GEN_INT (GET_MODE_SIZE (GET_MODE (operands[0])))
10002 : GEN_INT (-GET_MODE_SIZE (GET_MODE (operands[0])));
10004 /* We have to update the breg before doing the store.
10005 Use store with update, if available. */
10009 insn = emit_insn (TARGET_32BIT
10010 ? gen_movsi_update (breg, breg, delta_rtx,
10011 operand_subword (src, 0, 0, mode))
10012 : gen_movdi_update (breg, breg, delta_rtx,
10013 operand_subword (src, 0, 0, mode)));
10017 insn = emit_insn (TARGET_32BIT
10018 ? gen_addsi3 (breg, breg, delta_rtx)
10019 : gen_adddi3 (breg, breg, delta_rtx));
10020 dst = gen_rtx_MEM (mode, breg);
10024 for (i = 0; i < nregs; i++)
10026 /* Calculate index to next subword. */
10031 operands[i+2] = operand_subword (dst, j, 0, mode);
10032 operands[i+2+nregs] = operand_subword (src, j, 0, mode);
10034 if (j == 0 && used_update)
10036 /* Already emited move of first word by
10037 store with update -> emit dead insn instead (r := r). */
10038 operands[i+2] = operands[i+2+nregs];
10045 /* This page contains routines that are used to determine what the
10046 function prologue and epilogue code will do and write them out. */
10048 /* Return the first fixed-point register that is required to be
10049 saved. 32 if none. */
10052 first_reg_to_save (void)
10056 /* Find lowest numbered live register. */
10057 for (first_reg = 13; first_reg <= 31; first_reg++)
10058 if (regs_ever_live[first_reg]
10059 && (! call_used_regs[first_reg]
10060 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10061 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10062 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10067 && current_function_uses_pic_offset_table
10068 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10069 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10075 /* Similar, for FP regs. */
10078 first_fp_reg_to_save (void)
10082 /* Find lowest numbered live register. */
10083 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10084 if (regs_ever_live[first_reg])
10090 /* Similar, for AltiVec regs. */
10093 first_altivec_reg_to_save (void)
10097 /* Stack frame remains as is unless we are in AltiVec ABI. */
10098 if (! TARGET_ALTIVEC_ABI)
10099 return LAST_ALTIVEC_REGNO + 1;
10101 /* Find lowest numbered live register. */
10102 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10103 if (regs_ever_live[i])
10109 /* Return a 32-bit mask of the AltiVec registers we need to set in
10110 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10111 the 32-bit word is 0. */
10113 static unsigned int
10114 compute_vrsave_mask (void)
10116 unsigned int i, mask = 0;
10118 /* First, find out if we use _any_ altivec registers. */
10119 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10120 if (regs_ever_live[i])
10121 mask |= ALTIVEC_REG_BIT (i);
10126 /* Next, remove the argument registers from the set. These must
10127 be in the VRSAVE mask set by the caller, so we don't need to add
10128 them in again. More importantly, the mask we compute here is
10129 used to generate CLOBBERs in the set_vrsave insn, and we do not
10130 wish the argument registers to die. */
10131 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10132 mask &= ~ALTIVEC_REG_BIT (i);
10134 /* Similarly, remove the return value from the set. */
10137 diddle_return_value (is_altivec_return_reg, &yes);
10139 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10146 is_altivec_return_reg (rtx reg, void *xyes)
10148 bool *yes = (bool *) xyes;
10149 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10154 /* Calculate the stack information for the current function. This is
10155 complicated by having two separate calling sequences, the AIX calling
10156 sequence and the V.4 calling sequence.
10158 AIX (and Darwin/Mac OS X) stack frames look like:
10160 SP----> +---------------------------------------+
10161 | back chain to caller | 0 0
10162 +---------------------------------------+
10163 | saved CR | 4 8 (8-11)
10164 +---------------------------------------+
10166 +---------------------------------------+
10167 | reserved for compilers | 12 24
10168 +---------------------------------------+
10169 | reserved for binders | 16 32
10170 +---------------------------------------+
10171 | saved TOC pointer | 20 40
10172 +---------------------------------------+
10173 | Parameter save area (P) | 24 48
10174 +---------------------------------------+
10175 | Alloca space (A) | 24+P etc.
10176 +---------------------------------------+
10177 | Local variable space (L) | 24+P+A
10178 +---------------------------------------+
10179 | Float/int conversion temporary (X) | 24+P+A+L
10180 +---------------------------------------+
10181 | Save area for AltiVec registers (W) | 24+P+A+L+X
10182 +---------------------------------------+
10183 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10184 +---------------------------------------+
10185 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10186 +---------------------------------------+
10187 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10188 +---------------------------------------+
10189 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10190 +---------------------------------------+
10191 old SP->| back chain to caller's caller |
10192 +---------------------------------------+
10194 The required alignment for AIX configurations is two words (i.e., 8
10198 V.4 stack frames look like:
10200 SP----> +---------------------------------------+
10201 | back chain to caller | 0
10202 +---------------------------------------+
10203 | caller's saved LR | 4
10204 +---------------------------------------+
10205 | Parameter save area (P) | 8
10206 +---------------------------------------+
10207 | Alloca space (A) | 8+P
10208 +---------------------------------------+
10209 | Varargs save area (V) | 8+P+A
10210 +---------------------------------------+
10211 | Local variable space (L) | 8+P+A+V
10212 +---------------------------------------+
10213 | Float/int conversion temporary (X) | 8+P+A+V+L
10214 +---------------------------------------+
10215 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10216 +---------------------------------------+
10217 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10218 +---------------------------------------+
10219 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10220 +---------------------------------------+
10221 | SPE: area for 64-bit GP registers |
10222 +---------------------------------------+
10223 | SPE alignment padding |
10224 +---------------------------------------+
10225 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10226 +---------------------------------------+
10227 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10228 +---------------------------------------+
10229 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10230 +---------------------------------------+
10231 old SP->| back chain to caller's caller |
10232 +---------------------------------------+
10234 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10235 given. (But note below and in sysv4.h that we require only 8 and
10236 may round up the size of our stack frame anyways. The historical
10237 reason is early versions of powerpc-linux which didn't properly
10238 align the stack at program startup. A happy side-effect is that
10239 -mno-eabi libraries can be used with -meabi programs.)
10241 The EABI configuration defaults to the V.4 layout. However,
10242 the stack alignment requirements may differ. If -mno-eabi is not
10243 given, the required stack alignment is 8 bytes; if -mno-eabi is
10244 given, the required alignment is 16 bytes. (But see V.4 comment
10247 #ifndef ABI_STACK_BOUNDARY
10248 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10252 rs6000_stack_info (void)
10254 static rs6000_stack_t info, zero_info;
10255 rs6000_stack_t *info_ptr = &info;
10256 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10258 int total_raw_size;
10260 /* Zero all fields portably. */
10265 /* Cache value so we don't rescan instruction chain over and over. */
10266 if (cfun->machine->insn_chain_scanned_p == 0)
10268 cfun->machine->insn_chain_scanned_p = 1;
10269 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10273 /* Select which calling sequence. */
10274 info_ptr->abi = DEFAULT_ABI;
10276 /* Calculate which registers need to be saved & save area size. */
10277 info_ptr->first_gp_reg_save = first_reg_to_save ();
10278 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10279 even if it currently looks like we won't. */
10280 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10281 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10282 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10283 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10284 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10286 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10288 /* For the SPE, we have an additional upper 32-bits on each GPR.
10289 Ideally we should save the entire 64-bits only when the upper
10290 half is used in SIMD instructions. Since we only record
10291 registers live (not the size they are used in), this proves
10292 difficult because we'd have to traverse the instruction chain at
10293 the right time, taking reload into account. This is a real pain,
10294 so we opt to save the GPRs in 64-bits always if but one register
10295 gets used in 64-bits. Otherwise, all the registers in the frame
10296 get saved in 32-bits.
10298 So... since when we save all GPRs (except the SP) in 64-bits, the
10299 traditional GP save area will be empty. */
10300 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10301 info_ptr->gp_size = 0;
10303 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10304 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10306 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10307 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10308 - info_ptr->first_altivec_reg_save);
10310 /* Does this function call anything? */
10311 info_ptr->calls_p = (! current_function_is_leaf
10312 || cfun->machine->ra_needs_full_frame);
10314 /* Determine if we need to save the link register. */
10315 if (rs6000_ra_ever_killed ()
10316 || (DEFAULT_ABI == ABI_AIX
10317 && current_function_profile
10318 && !TARGET_PROFILE_KERNEL)
10319 #ifdef TARGET_RELOCATABLE
10320 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10322 || (info_ptr->first_fp_reg_save != 64
10323 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10324 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10325 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10326 || (DEFAULT_ABI == ABI_DARWIN
10328 && current_function_uses_pic_offset_table)
10329 || info_ptr->calls_p)
10331 info_ptr->lr_save_p = 1;
10332 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10335 /* Determine if we need to save the condition code registers. */
10336 if (regs_ever_live[CR2_REGNO]
10337 || regs_ever_live[CR3_REGNO]
10338 || regs_ever_live[CR4_REGNO])
10340 info_ptr->cr_save_p = 1;
10341 if (DEFAULT_ABI == ABI_V4)
10342 info_ptr->cr_size = reg_size;
10345 /* If the current function calls __builtin_eh_return, then we need
10346 to allocate stack space for registers that will hold data for
10347 the exception handler. */
10348 if (current_function_calls_eh_return)
10351 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10354 /* SPE saves EH registers in 64-bits. */
10355 ehrd_size = i * (TARGET_SPE_ABI
10356 && info_ptr->spe_64bit_regs_used != 0
10357 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10362 /* Determine various sizes. */
10363 info_ptr->reg_size = reg_size;
10364 info_ptr->fixed_size = RS6000_SAVE_AREA;
10365 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10366 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10367 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10370 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10371 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10373 info_ptr->spe_gp_size = 0;
10375 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
10377 info_ptr->vrsave_mask = compute_vrsave_mask ();
10378 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
10382 info_ptr->vrsave_mask = 0;
10383 info_ptr->vrsave_size = 0;
10386 /* Calculate the offsets. */
10387 switch (DEFAULT_ABI)
10395 info_ptr->fp_save_offset = - info_ptr->fp_size;
10396 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10398 if (TARGET_ALTIVEC_ABI)
10400 info_ptr->vrsave_save_offset
10401 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10403 /* Align stack so vector save area is on a quadword boundary. */
10404 if (info_ptr->altivec_size != 0)
10405 info_ptr->altivec_padding_size
10406 = 16 - (-info_ptr->vrsave_save_offset % 16);
10408 info_ptr->altivec_padding_size = 0;
10410 info_ptr->altivec_save_offset
10411 = info_ptr->vrsave_save_offset
10412 - info_ptr->altivec_padding_size
10413 - info_ptr->altivec_size;
10415 /* Adjust for AltiVec case. */
10416 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10419 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10420 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10421 info_ptr->lr_save_offset = 2*reg_size;
10425 info_ptr->fp_save_offset = - info_ptr->fp_size;
10426 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10427 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10429 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10431 /* Align stack so SPE GPR save area is aligned on a
10432 double-word boundary. */
10433 if (info_ptr->spe_gp_size != 0)
10434 info_ptr->spe_padding_size
10435 = 8 - (-info_ptr->cr_save_offset % 8);
10437 info_ptr->spe_padding_size = 0;
10439 info_ptr->spe_gp_save_offset
10440 = info_ptr->cr_save_offset
10441 - info_ptr->spe_padding_size
10442 - info_ptr->spe_gp_size;
10444 /* Adjust for SPE case. */
10445 info_ptr->toc_save_offset
10446 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10448 else if (TARGET_ALTIVEC_ABI)
10450 info_ptr->vrsave_save_offset
10451 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10453 /* Align stack so vector save area is on a quadword boundary. */
10454 if (info_ptr->altivec_size != 0)
10455 info_ptr->altivec_padding_size
10456 = 16 - (-info_ptr->vrsave_save_offset % 16);
10458 info_ptr->altivec_padding_size = 0;
10460 info_ptr->altivec_save_offset
10461 = info_ptr->vrsave_save_offset
10462 - info_ptr->altivec_padding_size
10463 - info_ptr->altivec_size;
10465 /* Adjust for AltiVec case. */
10466 info_ptr->toc_save_offset
10467 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10470 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10471 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10472 info_ptr->lr_save_offset = reg_size;
10476 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10477 + info_ptr->gp_size
10478 + info_ptr->altivec_size
10479 + info_ptr->altivec_padding_size
10480 + info_ptr->spe_gp_size
10481 + info_ptr->spe_padding_size
10483 + info_ptr->cr_size
10484 + info_ptr->lr_size
10485 + info_ptr->vrsave_size
10486 + info_ptr->toc_size,
10487 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10490 total_raw_size = (info_ptr->vars_size
10491 + info_ptr->parm_size
10492 + info_ptr->save_size
10493 + info_ptr->varargs_size
10494 + info_ptr->fixed_size);
10496 info_ptr->total_size =
10497 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10499 /* Determine if we need to allocate any stack frame:
10501 For AIX we need to push the stack if a frame pointer is needed
10502 (because the stack might be dynamically adjusted), if we are
10503 debugging, if we make calls, or if the sum of fp_save, gp_save,
10504 and local variables are more than the space needed to save all
10505 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10506 + 18*8 = 288 (GPR13 reserved).
10508 For V.4 we don't have the stack cushion that AIX uses, but assume
10509 that the debugger can handle stackless frames. */
10511 if (info_ptr->calls_p)
10512 info_ptr->push_p = 1;
10514 else if (DEFAULT_ABI == ABI_V4)
10515 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10517 else if (frame_pointer_needed)
10518 info_ptr->push_p = 1;
10520 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10521 info_ptr->push_p = 1;
10525 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10527 /* Zero offsets if we're not saving those registers. */
10528 if (info_ptr->fp_size == 0)
10529 info_ptr->fp_save_offset = 0;
10531 if (info_ptr->gp_size == 0)
10532 info_ptr->gp_save_offset = 0;
10534 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10535 info_ptr->altivec_save_offset = 0;
10537 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10538 info_ptr->vrsave_save_offset = 0;
10540 if (! TARGET_SPE_ABI
10541 || info_ptr->spe_64bit_regs_used == 0
10542 || info_ptr->spe_gp_size == 0)
10543 info_ptr->spe_gp_save_offset = 0;
10545 if (! info_ptr->lr_save_p)
10546 info_ptr->lr_save_offset = 0;
10548 if (! info_ptr->cr_save_p)
10549 info_ptr->cr_save_offset = 0;
10551 if (! info_ptr->toc_save_p)
10552 info_ptr->toc_save_offset = 0;
10557 /* Return true if the current function uses any GPRs in 64-bit SIMD
10561 spe_func_has_64bit_regs_p (void)
10565 /* Functions that save and restore all the call-saved registers will
10566 need to save/restore the registers in 64-bits. */
10567 if (current_function_calls_eh_return
10568 || current_function_calls_setjmp
10569 || current_function_has_nonlocal_goto)
10572 insns = get_insns ();
10574 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10580 i = PATTERN (insn);
10581 if (GET_CODE (i) == SET
10582 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10591 debug_stack_info (rs6000_stack_t *info)
10593 const char *abi_string;
10596 info = rs6000_stack_info ();
10598 fprintf (stderr, "\nStack information for function %s:\n",
10599 ((current_function_decl && DECL_NAME (current_function_decl))
10600 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10605 default: abi_string = "Unknown"; break;
10606 case ABI_NONE: abi_string = "NONE"; break;
10607 case ABI_AIX: abi_string = "AIX"; break;
10608 case ABI_DARWIN: abi_string = "Darwin"; break;
10609 case ABI_V4: abi_string = "V.4"; break;
10612 fprintf (stderr, "\tABI = %5s\n", abi_string);
10614 if (TARGET_ALTIVEC_ABI)
10615 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10617 if (TARGET_SPE_ABI)
10618 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10620 if (info->first_gp_reg_save != 32)
10621 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10623 if (info->first_fp_reg_save != 64)
10624 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10626 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10627 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10628 info->first_altivec_reg_save);
10630 if (info->lr_save_p)
10631 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10633 if (info->cr_save_p)
10634 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10636 if (info->toc_save_p)
10637 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10639 if (info->vrsave_mask)
10640 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10643 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10646 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10648 if (info->gp_save_offset)
10649 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10651 if (info->fp_save_offset)
10652 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10654 if (info->altivec_save_offset)
10655 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10656 info->altivec_save_offset);
10658 if (info->spe_gp_save_offset)
10659 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10660 info->spe_gp_save_offset);
10662 if (info->vrsave_save_offset)
10663 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10664 info->vrsave_save_offset);
10666 if (info->lr_save_offset)
10667 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10669 if (info->cr_save_offset)
10670 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10672 if (info->toc_save_offset)
10673 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10675 if (info->varargs_save_offset)
10676 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10678 if (info->total_size)
10679 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10681 if (info->varargs_size)
10682 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10684 if (info->vars_size)
10685 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10687 if (info->parm_size)
10688 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10690 if (info->fixed_size)
10691 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10694 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10696 if (info->spe_gp_size)
10697 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10700 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10702 if (info->altivec_size)
10703 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10705 if (info->vrsave_size)
10706 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10708 if (info->altivec_padding_size)
10709 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10710 info->altivec_padding_size);
10712 if (info->spe_padding_size)
10713 fprintf (stderr, "\tspe_padding_size = %5d\n",
10714 info->spe_padding_size);
10717 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10720 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10722 if (info->toc_size)
10723 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10725 if (info->save_size)
10726 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10728 if (info->reg_size != 4)
10729 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10731 fprintf (stderr, "\n");
10735 rs6000_return_addr (int count, rtx frame)
10737 /* Currently we don't optimize very well between prolog and body
10738 code and for PIC code the code can be actually quite bad, so
10739 don't try to be too clever here. */
10740 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10742 cfun->machine->ra_needs_full_frame = 1;
10749 plus_constant (copy_to_reg
10750 (gen_rtx_MEM (Pmode,
10751 memory_address (Pmode, frame))),
10752 RETURN_ADDRESS_OFFSET)));
10755 cfun->machine->ra_need_lr = 1;
10756 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10759 /* Say whether a function is a candidate for sibcall handling or not.
10760 We do not allow indirect calls to be optimized into sibling calls.
10761 Also, we can't do it if there are any vector parameters; there's
10762 nowhere to put the VRsave code so it works; note that functions with
10763 vector parameters are required to have a prototype, so the argument
10764 type info must be available here. (The tail recursion case can work
10765 with vector parameters, but there's no way to distinguish here.) */
10767 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
10772 if (TARGET_ALTIVEC_VRSAVE)
10774 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10775 type; type = TREE_CHAIN (type))
10777 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10781 if (DEFAULT_ABI == ABI_DARWIN
10782 || (*targetm.binds_local_p) (decl))
10784 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10786 if (!lookup_attribute ("longcall", attr_list)
10787 || lookup_attribute ("shortcall", attr_list))
10795 rs6000_ra_ever_killed (void)
10801 /* Irritatingly, there are two kinds of thunks -- those created with
10802 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10803 through the regular part of the compiler. This is a very hacky
10804 way to tell them apart. */
10805 if (current_function_is_thunk && !no_new_pseudos)
10808 /* regs_ever_live has LR marked as used if any sibcalls are present,
10809 but this should not force saving and restoring in the
10810 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10811 clobbers LR, so that is inappropriate. */
10813 /* Also, the prologue can generate a store into LR that
10814 doesn't really count, like this:
10817 bcl to set PIC register
10821 When we're called from the epilogue, we need to avoid counting
10822 this as a store. */
10824 push_topmost_sequence ();
10825 top = get_insns ();
10826 pop_topmost_sequence ();
10827 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10829 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10833 if (FIND_REG_INC_NOTE (insn, reg))
10835 else if (GET_CODE (insn) == CALL_INSN
10836 && !SIBLING_CALL_P (insn))
10838 else if (set_of (reg, insn) != NULL_RTX
10839 && !prologue_epilogue_contains (insn))
10846 /* Add a REG_MAYBE_DEAD note to the insn. */
10848 rs6000_maybe_dead (rtx insn)
10850 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10855 /* Emit instructions needed to load the TOC register.
10856 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10857 a constant pool; or for SVR4 -fpic. */
10860 rs6000_emit_load_toc_table (int fromprolog)
10863 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10865 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10867 rtx temp = (fromprolog
10868 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10869 : gen_reg_rtx (Pmode));
10870 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10872 rs6000_maybe_dead (insn);
10873 insn = emit_move_insn (dest, temp);
10875 rs6000_maybe_dead (insn);
10877 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10880 rtx tempLR = (fromprolog
10881 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10882 : gen_reg_rtx (Pmode));
10883 rtx temp0 = (fromprolog
10884 ? gen_rtx_REG (Pmode, 0)
10885 : gen_reg_rtx (Pmode));
10888 /* possibly create the toc section */
10889 if (! toc_initialized)
10892 function_section (current_function_decl);
10899 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10900 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10902 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10903 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10905 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10907 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10908 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10915 static int reload_toc_labelno = 0;
10917 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10919 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10920 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10922 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10923 emit_move_insn (dest, tempLR);
10924 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10926 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10928 rs6000_maybe_dead (insn);
10930 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10932 /* This is for AIX code running in non-PIC ELF32. */
10935 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10936 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10938 insn = emit_insn (gen_elf_high (dest, realsym));
10940 rs6000_maybe_dead (insn);
10941 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10943 rs6000_maybe_dead (insn);
10945 else if (DEFAULT_ABI == ABI_AIX)
10948 insn = emit_insn (gen_load_toc_aix_si (dest));
10950 insn = emit_insn (gen_load_toc_aix_di (dest));
10952 rs6000_maybe_dead (insn);
10959 get_TOC_alias_set (void)
10961 static int set = -1;
10963 set = new_alias_set ();
10967 /* This returns nonzero if the current function uses the TOC. This is
10968 determined by the presence of (unspec ... UNSPEC_TOC) or
10969 use (unspec ... UNSPEC_TOC), which are generated by the various
10970 load_toc_* patterns. */
10977 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10980 rtx pat = PATTERN (insn);
10983 if (GET_CODE (pat) == PARALLEL)
10984 for (i = 0; i < XVECLEN (pat, 0); i++)
10986 rtx sub = XVECEXP (pat, 0, i);
10987 if (GET_CODE (sub) == USE)
10989 sub = XEXP (sub, 0);
10990 if (GET_CODE (sub) == UNSPEC
10991 && XINT (sub, 1) == UNSPEC_TOC)
11000 create_TOC_reference (rtx symbol)
11002 return gen_rtx_PLUS (Pmode,
11003 gen_rtx_REG (Pmode, TOC_REGISTER),
11004 gen_rtx_CONST (Pmode,
11005 gen_rtx_MINUS (Pmode, symbol,
11006 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11009 /* If _Unwind_* has been called from within the same module,
11010 toc register is not guaranteed to be saved to 40(1) on function
11011 entry. Save it there in that case. */
11014 rs6000_aix_emit_builtin_unwind_init (void)
11017 rtx stack_top = gen_reg_rtx (Pmode);
11018 rtx opcode_addr = gen_reg_rtx (Pmode);
11019 rtx opcode = gen_reg_rtx (SImode);
11020 rtx tocompare = gen_reg_rtx (SImode);
11021 rtx no_toc_save_needed = gen_label_rtx ();
11023 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11024 emit_move_insn (stack_top, mem);
11026 mem = gen_rtx_MEM (Pmode,
11027 gen_rtx_PLUS (Pmode, stack_top,
11028 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11029 emit_move_insn (opcode_addr, mem);
11030 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11031 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11032 : 0xE8410028, SImode));
11034 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11035 SImode, NULL_RTX, NULL_RTX,
11036 no_toc_save_needed);
11038 mem = gen_rtx_MEM (Pmode,
11039 gen_rtx_PLUS (Pmode, stack_top,
11040 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11041 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11042 emit_label (no_toc_save_needed);
11045 /* This ties together stack memory (MEM with an alias set of
11046 rs6000_sr_alias_set) and the change to the stack pointer. */
11049 rs6000_emit_stack_tie (void)
11051 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11053 set_mem_alias_set (mem, rs6000_sr_alias_set);
11054 emit_insn (gen_stack_tie (mem));
11057 /* Emit the correct code for allocating stack space, as insns.
11058 If COPY_R12, make sure a copy of the old frame is left in r12.
11059 The generated code may use hard register 0 as a temporary. */
11062 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11065 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11066 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11067 rtx todec = GEN_INT (-size);
11069 if (current_function_limit_stack)
11071 if (REG_P (stack_limit_rtx)
11072 && REGNO (stack_limit_rtx) > 1
11073 && REGNO (stack_limit_rtx) <= 31)
11075 emit_insn (TARGET_32BIT
11076 ? gen_addsi3 (tmp_reg,
11079 : gen_adddi3 (tmp_reg,
11083 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11086 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11088 && DEFAULT_ABI == ABI_V4)
11090 rtx toload = gen_rtx_CONST (VOIDmode,
11091 gen_rtx_PLUS (Pmode,
11095 emit_insn (gen_elf_high (tmp_reg, toload));
11096 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11097 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11101 warning ("stack limit expression is not supported");
11104 if (copy_r12 || ! TARGET_UPDATE)
11105 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11111 /* Need a note here so that try_split doesn't get confused. */
11112 if (get_last_insn() == NULL_RTX)
11113 emit_note (NOTE_INSN_DELETED);
11114 insn = emit_move_insn (tmp_reg, todec);
11115 try_split (PATTERN (insn), insn, 0);
11119 insn = emit_insn (TARGET_32BIT
11120 ? gen_movsi_update (stack_reg, stack_reg,
11122 : gen_movdi_update (stack_reg, stack_reg,
11123 todec, stack_reg));
11127 insn = emit_insn (TARGET_32BIT
11128 ? gen_addsi3 (stack_reg, stack_reg, todec)
11129 : gen_adddi3 (stack_reg, stack_reg, todec));
11130 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11131 gen_rtx_REG (Pmode, 12));
11134 RTX_FRAME_RELATED_P (insn) = 1;
11136 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11137 gen_rtx_SET (VOIDmode, stack_reg,
11138 gen_rtx_PLUS (Pmode, stack_reg,
11143 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11144 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11145 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11146 deduce these equivalences by itself so it wasn't necessary to hold
11147 its hand so much. */
11150 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11151 rtx reg2, rtx rreg)
11155 /* copy_rtx will not make unique copies of registers, so we need to
11156 ensure we don't have unwanted sharing here. */
11158 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11161 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11163 real = copy_rtx (PATTERN (insn));
11165 if (reg2 != NULL_RTX)
11166 real = replace_rtx (real, reg2, rreg);
11168 real = replace_rtx (real, reg,
11169 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11170 STACK_POINTER_REGNUM),
11173 /* We expect that 'real' is either a SET or a PARALLEL containing
11174 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11175 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11177 if (GET_CODE (real) == SET)
11181 temp = simplify_rtx (SET_SRC (set));
11183 SET_SRC (set) = temp;
11184 temp = simplify_rtx (SET_DEST (set));
11186 SET_DEST (set) = temp;
11187 if (GET_CODE (SET_DEST (set)) == MEM)
11189 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11191 XEXP (SET_DEST (set), 0) = temp;
11194 else if (GET_CODE (real) == PARALLEL)
11197 for (i = 0; i < XVECLEN (real, 0); i++)
11198 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11200 rtx set = XVECEXP (real, 0, i);
11202 temp = simplify_rtx (SET_SRC (set));
11204 SET_SRC (set) = temp;
11205 temp = simplify_rtx (SET_DEST (set));
11207 SET_DEST (set) = temp;
11208 if (GET_CODE (SET_DEST (set)) == MEM)
11210 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11212 XEXP (SET_DEST (set), 0) = temp;
11214 RTX_FRAME_RELATED_P (set) = 1;
11221 real = spe_synthesize_frame_save (real);
11223 RTX_FRAME_RELATED_P (insn) = 1;
11224 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11229 /* Given an SPE frame note, return a PARALLEL of SETs with the
11230 original note, plus a synthetic register save. */
11233 spe_synthesize_frame_save (rtx real)
11235 rtx synth, offset, reg, real2;
11237 if (GET_CODE (real) != SET
11238 || GET_MODE (SET_SRC (real)) != V2SImode)
11241 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11242 frame related note. The parallel contains a set of the register
11243 being saved, and another set to a synthetic register (n+1200).
11244 This is so we can differentiate between 64-bit and 32-bit saves.
11245 Words cannot describe this nastiness. */
11247 if (GET_CODE (SET_DEST (real)) != MEM
11248 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11249 || GET_CODE (SET_SRC (real)) != REG)
11253 (set (mem (plus (reg x) (const y)))
11256 (set (mem (plus (reg x) (const y+4)))
11260 real2 = copy_rtx (real);
11261 PUT_MODE (SET_DEST (real2), SImode);
11262 reg = SET_SRC (real2);
11263 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11264 synth = copy_rtx (real2);
11266 if (BYTES_BIG_ENDIAN)
11268 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11269 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11272 reg = SET_SRC (synth);
11274 synth = replace_rtx (synth, reg,
11275 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11277 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11278 synth = replace_rtx (synth, offset,
11279 GEN_INT (INTVAL (offset)
11280 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11282 RTX_FRAME_RELATED_P (synth) = 1;
11283 RTX_FRAME_RELATED_P (real2) = 1;
11284 if (BYTES_BIG_ENDIAN)
11285 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11287 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11292 /* Returns an insn that has a vrsave set operation with the
11293 appropriate CLOBBERs. */
11296 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11299 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11300 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11303 = gen_rtx_SET (VOIDmode,
11305 gen_rtx_UNSPEC_VOLATILE (SImode,
11306 gen_rtvec (2, reg, vrsave),
11311 /* We need to clobber the registers in the mask so the scheduler
11312 does not move sets to VRSAVE before sets of AltiVec registers.
11314 However, if the function receives nonlocal gotos, reload will set
11315 all call saved registers live. We will end up with:
11317 (set (reg 999) (mem))
11318 (parallel [ (set (reg vrsave) (unspec blah))
11319 (clobber (reg 999))])
11321 The clobber will cause the store into reg 999 to be dead, and
11322 flow will attempt to delete an epilogue insn. In this case, we
11323 need an unspec use/set of the register. */
11325 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11326 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11328 if (!epiloguep || call_used_regs [i])
11329 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11330 gen_rtx_REG (V4SImode, i));
11333 rtx reg = gen_rtx_REG (V4SImode, i);
11336 = gen_rtx_SET (VOIDmode,
11338 gen_rtx_UNSPEC (V4SImode,
11339 gen_rtvec (1, reg), 27));
11343 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11345 for (i = 0; i < nclobs; ++i)
11346 XVECEXP (insn, 0, i) = clobs[i];
11351 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11352 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11355 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11356 unsigned int regno, int offset, int total_size)
11358 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11359 rtx replacea, replaceb;
11361 int_rtx = GEN_INT (offset);
11363 /* Some cases that need register indexed addressing. */
11364 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11366 && SPE_VECTOR_MODE (mode)
11367 && !SPE_CONST_OFFSET_OK (offset)))
11369 /* Whomever calls us must make sure r11 is available in the
11370 flow path of instructions in the prologue. */
11371 offset_rtx = gen_rtx_REG (Pmode, 11);
11372 emit_move_insn (offset_rtx, int_rtx);
11374 replacea = offset_rtx;
11375 replaceb = int_rtx;
11379 offset_rtx = int_rtx;
11380 replacea = NULL_RTX;
11381 replaceb = NULL_RTX;
11384 reg = gen_rtx_REG (mode, regno);
11385 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11386 mem = gen_rtx_MEM (mode, addr);
11387 set_mem_alias_set (mem, rs6000_sr_alias_set);
11389 insn = emit_move_insn (mem, reg);
11391 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11394 /* Emit an offset memory reference suitable for a frame store, while
11395 converting to a valid addressing mode. */
11398 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11400 rtx int_rtx, offset_rtx;
11402 int_rtx = GEN_INT (offset);
11404 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11406 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11407 emit_move_insn (offset_rtx, int_rtx);
11410 offset_rtx = int_rtx;
11412 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11415 /* Emit function prologue as insns. */
11418 rs6000_emit_prologue (void)
11420 rs6000_stack_t *info = rs6000_stack_info ();
11421 enum machine_mode reg_mode = Pmode;
11422 int reg_size = UNITS_PER_WORD;
11423 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11424 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11425 rtx frame_reg_rtx = sp_reg_rtx;
11426 rtx cr_save_rtx = NULL_RTX;
11428 int saving_FPRs_inline;
11429 int using_store_multiple;
11430 HOST_WIDE_INT sp_offset = 0;
11432 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11434 reg_mode = V2SImode;
11438 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11439 && (!TARGET_SPE_ABI
11440 || info->spe_64bit_regs_used == 0)
11441 && info->first_gp_reg_save < 31);
11442 saving_FPRs_inline = (info->first_fp_reg_save == 64
11443 || FP_SAVE_INLINE (info->first_fp_reg_save)
11444 || current_function_calls_eh_return
11445 || cfun->machine->ra_need_lr);
11447 /* For V.4, update stack before we do any saving and set back pointer. */
11449 && (DEFAULT_ABI == ABI_V4
11450 || current_function_calls_eh_return))
11452 if (info->total_size < 32767)
11453 sp_offset = info->total_size;
11455 frame_reg_rtx = frame_ptr_rtx;
11456 rs6000_emit_allocate_stack (info->total_size,
11457 (frame_reg_rtx != sp_reg_rtx
11458 && (info->cr_save_p
11460 || info->first_fp_reg_save < 64
11461 || info->first_gp_reg_save < 32
11463 if (frame_reg_rtx != sp_reg_rtx)
11464 rs6000_emit_stack_tie ();
11467 /* Save AltiVec registers if needed. */
11468 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11472 /* There should be a non inline version of this, for when we
11473 are saving lots of vector registers. */
11474 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11475 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11477 rtx areg, savereg, mem;
11480 offset = info->altivec_save_offset + sp_offset
11481 + 16 * (i - info->first_altivec_reg_save);
11483 savereg = gen_rtx_REG (V4SImode, i);
11485 areg = gen_rtx_REG (Pmode, 0);
11486 emit_move_insn (areg, GEN_INT (offset));
11488 /* AltiVec addressing mode is [reg+reg]. */
11489 mem = gen_rtx_MEM (V4SImode,
11490 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11492 set_mem_alias_set (mem, rs6000_sr_alias_set);
11494 insn = emit_move_insn (mem, savereg);
11496 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11497 areg, GEN_INT (offset));
11501 /* VRSAVE is a bit vector representing which AltiVec registers
11502 are used. The OS uses this to determine which vector
11503 registers to save on a context switch. We need to save
11504 VRSAVE on the stack frame, add whatever AltiVec registers we
11505 used in this function, and do the corresponding magic in the
11508 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11510 rtx reg, mem, vrsave;
11513 /* Get VRSAVE onto a GPR. */
11514 reg = gen_rtx_REG (SImode, 12);
11515 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11517 emit_insn (gen_get_vrsave_internal (reg));
11519 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11522 offset = info->vrsave_save_offset + sp_offset;
11524 = gen_rtx_MEM (SImode,
11525 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11526 set_mem_alias_set (mem, rs6000_sr_alias_set);
11527 insn = emit_move_insn (mem, reg);
11529 /* Include the registers in the mask. */
11530 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11532 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11535 /* If we use the link register, get it into r0. */
11536 if (info->lr_save_p)
11537 emit_move_insn (gen_rtx_REG (Pmode, 0),
11538 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11540 /* If we need to save CR, put it into r12. */
11541 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11543 cr_save_rtx = gen_rtx_REG (SImode, 12);
11544 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11547 /* Do any required saving of fpr's. If only one or two to save, do
11548 it ourselves. Otherwise, call function. */
11549 if (saving_FPRs_inline)
11552 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11553 if ((regs_ever_live[info->first_fp_reg_save+i]
11554 && ! call_used_regs[info->first_fp_reg_save+i]))
11555 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11556 info->first_fp_reg_save + i,
11557 info->fp_save_offset + sp_offset + 8 * i,
11560 else if (info->first_fp_reg_save != 64)
11564 const char *alloc_rname;
11566 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11568 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11569 gen_rtx_REG (Pmode,
11570 LINK_REGISTER_REGNUM));
11571 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11572 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11573 alloc_rname = ggc_strdup (rname);
11574 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11575 gen_rtx_SYMBOL_REF (Pmode,
11577 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11579 rtx addr, reg, mem;
11580 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11581 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11582 GEN_INT (info->fp_save_offset
11583 + sp_offset + 8*i));
11584 mem = gen_rtx_MEM (DFmode, addr);
11585 set_mem_alias_set (mem, rs6000_sr_alias_set);
11587 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11589 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11590 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11591 NULL_RTX, NULL_RTX);
11594 /* Save GPRs. This is done as a PARALLEL if we are using
11595 the store-multiple instructions. */
11596 if (using_store_multiple)
11600 p = rtvec_alloc (32 - info->first_gp_reg_save);
11601 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11603 rtx addr, reg, mem;
11604 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11605 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11606 GEN_INT (info->gp_save_offset
11609 mem = gen_rtx_MEM (reg_mode, addr);
11610 set_mem_alias_set (mem, rs6000_sr_alias_set);
11612 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11614 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11615 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11616 NULL_RTX, NULL_RTX);
11621 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11622 if ((regs_ever_live[info->first_gp_reg_save+i]
11623 && ! call_used_regs[info->first_gp_reg_save+i])
11624 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11625 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11626 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11628 rtx addr, reg, mem;
11629 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11631 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11633 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11636 if (!SPE_CONST_OFFSET_OK (offset))
11638 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11639 emit_move_insn (b, GEN_INT (offset));
11642 b = GEN_INT (offset);
11644 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11645 mem = gen_rtx_MEM (V2SImode, addr);
11646 set_mem_alias_set (mem, rs6000_sr_alias_set);
11647 insn = emit_move_insn (mem, reg);
11649 if (GET_CODE (b) == CONST_INT)
11650 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11651 NULL_RTX, NULL_RTX);
11653 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11654 b, GEN_INT (offset));
11658 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11659 GEN_INT (info->gp_save_offset
11662 mem = gen_rtx_MEM (reg_mode, addr);
11663 set_mem_alias_set (mem, rs6000_sr_alias_set);
11665 insn = emit_move_insn (mem, reg);
11666 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11667 NULL_RTX, NULL_RTX);
11672 /* ??? There's no need to emit actual instructions here, but it's the
11673 easiest way to get the frame unwind information emitted. */
11674 if (current_function_calls_eh_return)
11676 unsigned int i, regno;
11678 /* In AIX ABI we need to pretend we save r2 here. */
11681 rtx addr, reg, mem;
11683 reg = gen_rtx_REG (reg_mode, 2);
11684 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11685 GEN_INT (sp_offset + 5 * reg_size));
11686 mem = gen_rtx_MEM (reg_mode, addr);
11687 set_mem_alias_set (mem, rs6000_sr_alias_set);
11689 insn = emit_move_insn (mem, reg);
11690 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11691 NULL_RTX, NULL_RTX);
11692 PATTERN (insn) = gen_blockage ();
11697 regno = EH_RETURN_DATA_REGNO (i);
11698 if (regno == INVALID_REGNUM)
11701 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11702 info->ehrd_offset + sp_offset
11703 + reg_size * (int) i,
11708 /* Save lr if we used it. */
11709 if (info->lr_save_p)
11711 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11712 GEN_INT (info->lr_save_offset + sp_offset));
11713 rtx reg = gen_rtx_REG (Pmode, 0);
11714 rtx mem = gen_rtx_MEM (Pmode, addr);
11715 /* This should not be of rs6000_sr_alias_set, because of
11716 __builtin_return_address. */
11718 insn = emit_move_insn (mem, reg);
11719 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11720 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11723 /* Save CR if we use any that must be preserved. */
11724 if (info->cr_save_p)
11726 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11727 GEN_INT (info->cr_save_offset + sp_offset));
11728 rtx mem = gen_rtx_MEM (SImode, addr);
11730 set_mem_alias_set (mem, rs6000_sr_alias_set);
11732 /* If r12 was used to hold the original sp, copy cr into r0 now
11734 if (REGNO (frame_reg_rtx) == 12)
11736 cr_save_rtx = gen_rtx_REG (SImode, 0);
11737 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11739 insn = emit_move_insn (mem, cr_save_rtx);
11741 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11742 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11743 But that's OK. All we have to do is specify that _one_ condition
11744 code register is saved in this stack slot. The thrower's epilogue
11745 will then restore all the call-saved registers.
11746 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11747 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11748 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11751 /* Update stack and set back pointer unless this is V.4,
11752 for which it was done previously. */
11754 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
11755 rs6000_emit_allocate_stack (info->total_size, FALSE);
11757 /* Set frame pointer, if needed. */
11758 if (frame_pointer_needed)
11760 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11762 RTX_FRAME_RELATED_P (insn) = 1;
11765 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11766 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11767 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11768 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11770 /* If emit_load_toc_table will use the link register, we need to save
11771 it. We use R12 for this purpose because emit_load_toc_table
11772 can use register 0. This allows us to use a plain 'blr' to return
11773 from the procedure more often. */
11774 int save_LR_around_toc_setup = (TARGET_ELF
11775 && DEFAULT_ABI != ABI_AIX
11777 && ! info->lr_save_p
11778 && EXIT_BLOCK_PTR->pred != NULL);
11779 if (save_LR_around_toc_setup)
11781 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11782 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11783 rs6000_emit_load_toc_table (TRUE);
11784 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11787 rs6000_emit_load_toc_table (TRUE);
11791 if (DEFAULT_ABI == ABI_DARWIN
11792 && flag_pic && current_function_uses_pic_offset_table)
11794 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11795 const char *picbase = machopic_function_base_name ();
11796 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11798 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11800 rs6000_maybe_dead (
11801 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11802 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11807 /* Write function prologue. */
11810 rs6000_output_function_prologue (FILE *file,
11811 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
11813 rs6000_stack_t *info = rs6000_stack_info ();
11815 if (TARGET_DEBUG_STACK)
11816 debug_stack_info (info);
11818 /* Write .extern for any function we will call to save and restore
11820 if (info->first_fp_reg_save < 64
11821 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11822 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11823 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11824 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11825 RESTORE_FP_SUFFIX);
11827 /* Write .extern for AIX common mode routines, if needed. */
11828 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11830 fputs ("\t.extern __mulh\n", file);
11831 fputs ("\t.extern __mull\n", file);
11832 fputs ("\t.extern __divss\n", file);
11833 fputs ("\t.extern __divus\n", file);
11834 fputs ("\t.extern __quoss\n", file);
11835 fputs ("\t.extern __quous\n", file);
11836 common_mode_defined = 1;
11839 if (! HAVE_prologue)
11843 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11844 the "toplevel" insn chain. */
11845 emit_note (NOTE_INSN_DELETED);
11846 rs6000_emit_prologue ();
11847 emit_note (NOTE_INSN_DELETED);
11849 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11853 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11855 INSN_ADDRESSES_NEW (insn, addr);
11860 if (TARGET_DEBUG_STACK)
11861 debug_rtx_list (get_insns (), 100);
11862 final (get_insns (), file, FALSE, FALSE);
11866 rs6000_pic_labelno++;
11869 /* Emit function epilogue as insns.
11871 At present, dwarf2out_frame_debug_expr doesn't understand
11872 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11873 anywhere in the epilogue. Most of the insns below would in any case
11874 need special notes to explain where r11 is in relation to the stack. */
11877 rs6000_emit_epilogue (int sibcall)
11879 rs6000_stack_t *info;
11880 int restoring_FPRs_inline;
11881 int using_load_multiple;
11882 int using_mfcr_multiple;
11883 int use_backchain_to_restore_sp;
11885 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11886 rtx frame_reg_rtx = sp_reg_rtx;
11887 enum machine_mode reg_mode = Pmode;
11888 int reg_size = UNITS_PER_WORD;
11891 info = rs6000_stack_info ();
11893 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11895 reg_mode = V2SImode;
11899 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11900 && (!TARGET_SPE_ABI
11901 || info->spe_64bit_regs_used == 0)
11902 && info->first_gp_reg_save < 31);
11903 restoring_FPRs_inline = (sibcall
11904 || current_function_calls_eh_return
11905 || info->first_fp_reg_save == 64
11906 || FP_SAVE_INLINE (info->first_fp_reg_save));
11907 use_backchain_to_restore_sp = (frame_pointer_needed
11908 || current_function_calls_alloca
11909 || info->total_size > 32767);
11910 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11911 || rs6000_cpu == PROCESSOR_PPC603
11912 || rs6000_cpu == PROCESSOR_PPC750
11915 /* If we have a frame pointer, a call to alloca, or a large stack
11916 frame, restore the old stack pointer using the backchain. Otherwise,
11917 we know what size to update it with. */
11918 if (use_backchain_to_restore_sp)
11920 /* Under V.4, don't reset the stack pointer until after we're done
11921 loading the saved registers. */
11922 if (DEFAULT_ABI == ABI_V4)
11923 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11925 emit_move_insn (frame_reg_rtx,
11926 gen_rtx_MEM (Pmode, sp_reg_rtx));
11929 else if (info->push_p)
11931 if (DEFAULT_ABI == ABI_V4
11932 || current_function_calls_eh_return)
11933 sp_offset = info->total_size;
11936 emit_insn (TARGET_32BIT
11937 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11938 GEN_INT (info->total_size))
11939 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11940 GEN_INT (info->total_size)));
11944 /* Restore AltiVec registers if needed. */
11945 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11949 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11950 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11952 rtx addr, areg, mem;
11954 areg = gen_rtx_REG (Pmode, 0);
11956 (areg, GEN_INT (info->altivec_save_offset
11958 + 16 * (i - info->first_altivec_reg_save)));
11960 /* AltiVec addressing mode is [reg+reg]. */
11961 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11962 mem = gen_rtx_MEM (V4SImode, addr);
11963 set_mem_alias_set (mem, rs6000_sr_alias_set);
11965 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11969 /* Restore VRSAVE if needed. */
11970 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11972 rtx addr, mem, reg;
11974 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11975 GEN_INT (info->vrsave_save_offset + sp_offset));
11976 mem = gen_rtx_MEM (SImode, addr);
11977 set_mem_alias_set (mem, rs6000_sr_alias_set);
11978 reg = gen_rtx_REG (SImode, 12);
11979 emit_move_insn (reg, mem);
11981 emit_insn (generate_set_vrsave (reg, info, 1));
11984 /* Get the old lr if we saved it. */
11985 if (info->lr_save_p)
11987 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11988 info->lr_save_offset + sp_offset);
11990 set_mem_alias_set (mem, rs6000_sr_alias_set);
11992 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11995 /* Get the old cr if we saved it. */
11996 if (info->cr_save_p)
11998 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11999 GEN_INT (info->cr_save_offset + sp_offset));
12000 rtx mem = gen_rtx_MEM (SImode, addr);
12002 set_mem_alias_set (mem, rs6000_sr_alias_set);
12004 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12007 /* Set LR here to try to overlap restores below. */
12008 if (info->lr_save_p)
12009 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12010 gen_rtx_REG (Pmode, 0));
12012 /* Load exception handler data registers, if needed. */
12013 if (current_function_calls_eh_return)
12015 unsigned int i, regno;
12019 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12020 GEN_INT (sp_offset + 5 * reg_size));
12021 rtx mem = gen_rtx_MEM (reg_mode, addr);
12023 set_mem_alias_set (mem, rs6000_sr_alias_set);
12025 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12032 regno = EH_RETURN_DATA_REGNO (i);
12033 if (regno == INVALID_REGNUM)
12036 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12037 info->ehrd_offset + sp_offset
12038 + reg_size * (int) i);
12039 set_mem_alias_set (mem, rs6000_sr_alias_set);
12041 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12045 /* Restore GPRs. This is done as a PARALLEL if we are using
12046 the load-multiple instructions. */
12047 if (using_load_multiple)
12050 p = rtvec_alloc (32 - info->first_gp_reg_save);
12051 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12053 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12054 GEN_INT (info->gp_save_offset
12057 rtx mem = gen_rtx_MEM (reg_mode, addr);
12059 set_mem_alias_set (mem, rs6000_sr_alias_set);
12062 gen_rtx_SET (VOIDmode,
12063 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12066 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12069 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12070 if ((regs_ever_live[info->first_gp_reg_save+i]
12071 && ! call_used_regs[info->first_gp_reg_save+i])
12072 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12073 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12074 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12076 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12077 GEN_INT (info->gp_save_offset
12080 rtx mem = gen_rtx_MEM (reg_mode, addr);
12082 /* Restore 64-bit quantities for SPE. */
12083 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12085 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12088 if (!SPE_CONST_OFFSET_OK (offset))
12090 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12091 emit_move_insn (b, GEN_INT (offset));
12094 b = GEN_INT (offset);
12096 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12097 mem = gen_rtx_MEM (V2SImode, addr);
12100 set_mem_alias_set (mem, rs6000_sr_alias_set);
12102 emit_move_insn (gen_rtx_REG (reg_mode,
12103 info->first_gp_reg_save + i), mem);
12106 /* Restore fpr's if we need to do it without calling a function. */
12107 if (restoring_FPRs_inline)
12108 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12109 if ((regs_ever_live[info->first_fp_reg_save+i]
12110 && ! call_used_regs[info->first_fp_reg_save+i]))
12113 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12114 GEN_INT (info->fp_save_offset
12117 mem = gen_rtx_MEM (DFmode, addr);
12118 set_mem_alias_set (mem, rs6000_sr_alias_set);
12120 emit_move_insn (gen_rtx_REG (DFmode,
12121 info->first_fp_reg_save + i),
12125 /* If we saved cr, restore it here. Just those that were used. */
12126 if (info->cr_save_p)
12128 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12131 if (using_mfcr_multiple)
12133 for (i = 0; i < 8; i++)
12134 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12140 if (using_mfcr_multiple && count > 1)
12145 p = rtvec_alloc (count);
12148 for (i = 0; i < 8; i++)
12149 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12151 rtvec r = rtvec_alloc (2);
12152 RTVEC_ELT (r, 0) = r12_rtx;
12153 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12154 RTVEC_ELT (p, ndx) =
12155 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12156 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12159 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12164 for (i = 0; i < 8; i++)
12165 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12167 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12173 /* If this is V.4, unwind the stack pointer after all of the loads
12174 have been done. We need to emit a block here so that sched
12175 doesn't decide to move the sp change before the register restores
12176 (which may not have any obvious dependency on the stack). This
12177 doesn't hurt performance, because there is no scheduling that can
12178 be done after this point. */
12179 if (DEFAULT_ABI == ABI_V4
12180 || current_function_calls_eh_return)
12182 if (frame_reg_rtx != sp_reg_rtx)
12183 rs6000_emit_stack_tie ();
12185 if (use_backchain_to_restore_sp)
12187 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12189 else if (sp_offset != 0)
12191 emit_insn (TARGET_32BIT
12192 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12193 GEN_INT (sp_offset))
12194 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12195 GEN_INT (sp_offset)));
12199 if (current_function_calls_eh_return)
12201 rtx sa = EH_RETURN_STACKADJ_RTX;
12202 emit_insn (TARGET_32BIT
12203 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12204 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12210 if (! restoring_FPRs_inline)
12211 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12213 p = rtvec_alloc (2);
12215 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12216 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12217 gen_rtx_REG (Pmode,
12218 LINK_REGISTER_REGNUM));
12220 /* If we have to restore more than two FP registers, branch to the
12221 restore function. It will return to our caller. */
12222 if (! restoring_FPRs_inline)
12226 const char *alloc_rname;
12228 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12229 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12230 alloc_rname = ggc_strdup (rname);
12231 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12232 gen_rtx_SYMBOL_REF (Pmode,
12235 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12238 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12239 GEN_INT (info->fp_save_offset + 8*i));
12240 mem = gen_rtx_MEM (DFmode, addr);
12241 set_mem_alias_set (mem, rs6000_sr_alias_set);
12243 RTVEC_ELT (p, i+3) =
12244 gen_rtx_SET (VOIDmode,
12245 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12250 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12254 /* Write function epilogue. */
12257 rs6000_output_function_epilogue (FILE *file,
12258 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12260 rs6000_stack_t *info = rs6000_stack_info ();
12262 if (! HAVE_epilogue)
12264 rtx insn = get_last_insn ();
12265 /* If the last insn was a BARRIER, we don't have to write anything except
12266 the trace table. */
12267 if (GET_CODE (insn) == NOTE)
12268 insn = prev_nonnote_insn (insn);
12269 if (insn == 0 || GET_CODE (insn) != BARRIER)
12271 /* This is slightly ugly, but at least we don't have two
12272 copies of the epilogue-emitting code. */
12275 /* A NOTE_INSN_DELETED is supposed to be at the start
12276 and end of the "toplevel" insn chain. */
12277 emit_note (NOTE_INSN_DELETED);
12278 rs6000_emit_epilogue (FALSE);
12279 emit_note (NOTE_INSN_DELETED);
12281 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12285 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12287 INSN_ADDRESSES_NEW (insn, addr);
12292 if (TARGET_DEBUG_STACK)
12293 debug_rtx_list (get_insns (), 100);
12294 final (get_insns (), file, FALSE, FALSE);
12299 #if TARGET_OBJECT_FORMAT == OBJECT_MACHO
12300 /* Mach-O doesn't support labels at the end of objects, so if
12301 it looks like we might want one, insert a NOP. */
12303 rtx insn = get_last_insn ();
12306 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12307 insn = PREV_INSN (insn);
12311 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12312 fputs ("\tnop\n", file);
12316 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12319 We don't output a traceback table if -finhibit-size-directive was
12320 used. The documentation for -finhibit-size-directive reads
12321 ``don't output a @code{.size} assembler directive, or anything
12322 else that would cause trouble if the function is split in the
12323 middle, and the two halves are placed at locations far apart in
12324 memory.'' The traceback table has this property, since it
12325 includes the offset from the start of the function to the
12326 traceback table itself.
12328 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12329 different traceback table. */
12330 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12331 && rs6000_traceback != traceback_none)
12333 const char *fname = NULL;
12334 const char *language_string = lang_hooks.name;
12335 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12337 int optional_tbtab;
12339 if (rs6000_traceback == traceback_full)
12340 optional_tbtab = 1;
12341 else if (rs6000_traceback == traceback_part)
12342 optional_tbtab = 0;
12344 optional_tbtab = !optimize_size && !TARGET_ELF;
12346 if (optional_tbtab)
12348 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12349 while (*fname == '.') /* V.4 encodes . in the name */
12352 /* Need label immediately before tbtab, so we can compute
12353 its offset from the function start. */
12354 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12355 ASM_OUTPUT_LABEL (file, fname);
12358 /* The .tbtab pseudo-op can only be used for the first eight
12359 expressions, since it can't handle the possibly variable
12360 length fields that follow. However, if you omit the optional
12361 fields, the assembler outputs zeros for all optional fields
12362 anyways, giving each variable length field is minimum length
12363 (as defined in sys/debug.h). Thus we can not use the .tbtab
12364 pseudo-op at all. */
12366 /* An all-zero word flags the start of the tbtab, for debuggers
12367 that have to find it by searching forward from the entry
12368 point or from the current pc. */
12369 fputs ("\t.long 0\n", file);
12371 /* Tbtab format type. Use format type 0. */
12372 fputs ("\t.byte 0,", file);
12374 /* Language type. Unfortunately, there doesn't seem to be any
12375 official way to get this info, so we use language_string. C
12376 is 0. C++ is 9. No number defined for Obj-C, so use the
12377 value for C for now. There is no official value for Java,
12378 although IBM appears to be using 13. There is no official value
12379 for Chill, so we've chosen 44 pseudo-randomly. */
12380 if (! strcmp (language_string, "GNU C")
12381 || ! strcmp (language_string, "GNU Objective-C"))
12383 else if (! strcmp (language_string, "GNU F77"))
12385 else if (! strcmp (language_string, "GNU Ada"))
12387 else if (! strcmp (language_string, "GNU Pascal"))
12389 else if (! strcmp (language_string, "GNU C++"))
12391 else if (! strcmp (language_string, "GNU Java"))
12393 else if (! strcmp (language_string, "GNU CHILL"))
12397 fprintf (file, "%d,", i);
12399 /* 8 single bit fields: global linkage (not set for C extern linkage,
12400 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12401 from start of procedure stored in tbtab, internal function, function
12402 has controlled storage, function has no toc, function uses fp,
12403 function logs/aborts fp operations. */
12404 /* Assume that fp operations are used if any fp reg must be saved. */
12405 fprintf (file, "%d,",
12406 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12408 /* 6 bitfields: function is interrupt handler, name present in
12409 proc table, function calls alloca, on condition directives
12410 (controls stack walks, 3 bits), saves condition reg, saves
12412 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12413 set up as a frame pointer, even when there is no alloca call. */
12414 fprintf (file, "%d,",
12415 ((optional_tbtab << 6)
12416 | ((optional_tbtab & frame_pointer_needed) << 5)
12417 | (info->cr_save_p << 1)
12418 | (info->lr_save_p)));
12420 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12422 fprintf (file, "%d,",
12423 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12425 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12426 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12428 if (optional_tbtab)
12430 /* Compute the parameter info from the function decl argument
12433 int next_parm_info_bit = 31;
12435 for (decl = DECL_ARGUMENTS (current_function_decl);
12436 decl; decl = TREE_CHAIN (decl))
12438 rtx parameter = DECL_INCOMING_RTL (decl);
12439 enum machine_mode mode = GET_MODE (parameter);
12441 if (GET_CODE (parameter) == REG)
12443 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12449 if (mode == SFmode)
12451 else if (mode == DFmode || mode == TFmode)
12456 /* If only one bit will fit, don't or in this entry. */
12457 if (next_parm_info_bit > 0)
12458 parm_info |= (bits << (next_parm_info_bit - 1));
12459 next_parm_info_bit -= 2;
12463 fixed_parms += ((GET_MODE_SIZE (mode)
12464 + (UNITS_PER_WORD - 1))
12466 next_parm_info_bit -= 1;
12472 /* Number of fixed point parameters. */
12473 /* This is actually the number of words of fixed point parameters; thus
12474 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12475 fprintf (file, "%d,", fixed_parms);
12477 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12479 /* This is actually the number of fp registers that hold parameters;
12480 and thus the maximum value is 13. */
12481 /* Set parameters on stack bit if parameters are not in their original
12482 registers, regardless of whether they are on the stack? Xlc
12483 seems to set the bit when not optimizing. */
12484 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12486 if (! optional_tbtab)
12489 /* Optional fields follow. Some are variable length. */
12491 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12492 11 double float. */
12493 /* There is an entry for each parameter in a register, in the order that
12494 they occur in the parameter list. Any intervening arguments on the
12495 stack are ignored. If the list overflows a long (max possible length
12496 34 bits) then completely leave off all elements that don't fit. */
12497 /* Only emit this long if there was at least one parameter. */
12498 if (fixed_parms || float_parms)
12499 fprintf (file, "\t.long %d\n", parm_info);
12501 /* Offset from start of code to tb table. */
12502 fputs ("\t.long ", file);
12503 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12505 RS6000_OUTPUT_BASENAME (file, fname);
12507 assemble_name (file, fname);
12509 fputs ("-.", file);
12511 RS6000_OUTPUT_BASENAME (file, fname);
12513 assemble_name (file, fname);
12517 /* Interrupt handler mask. */
12518 /* Omit this long, since we never set the interrupt handler bit
12521 /* Number of CTL (controlled storage) anchors. */
12522 /* Omit this long, since the has_ctl bit is never set above. */
12524 /* Displacement into stack of each CTL anchor. */
12525 /* Omit this list of longs, because there are no CTL anchors. */
12527 /* Length of function name. */
12530 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12532 /* Function name. */
12533 assemble_string (fname, strlen (fname));
12535 /* Register for alloca automatic storage; this is always reg 31.
12536 Only emit this if the alloca bit was set above. */
12537 if (frame_pointer_needed)
12538 fputs ("\t.byte 31\n", file);
12540 fputs ("\t.align 2\n", file);
12544 /* A C compound statement that outputs the assembler code for a thunk
12545 function, used to implement C++ virtual function calls with
12546 multiple inheritance. The thunk acts as a wrapper around a virtual
12547 function, adjusting the implicit object parameter before handing
12548 control off to the real function.
12550 First, emit code to add the integer DELTA to the location that
12551 contains the incoming first argument. Assume that this argument
12552 contains a pointer, and is the one used to pass the `this' pointer
12553 in C++. This is the incoming argument *before* the function
12554 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12555 values of all other incoming arguments.
12557 After the addition, emit code to jump to FUNCTION, which is a
12558 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12559 not touch the return address. Hence returning from FUNCTION will
12560 return to whoever called the current `thunk'.
12562 The effect must be as if FUNCTION had been called directly with the
12563 adjusted first argument. This macro is responsible for emitting
12564 all of the code for a thunk function; output_function_prologue()
12565 and output_function_epilogue() are not invoked.
12567 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12568 been extracted from it.) It might possibly be useful on some
12569 targets, but probably not.
12571 If you do not define this macro, the target-independent code in the
12572 C++ frontend will generate a less efficient heavyweight thunk that
12573 calls FUNCTION instead of jumping to it. The generic approach does
12574 not support varargs. */
12577 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
12578 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12581 rtx this, insn, funexp;
12583 reload_completed = 1;
12584 epilogue_completed = 1;
12585 no_new_pseudos = 1;
12587 /* Mark the end of the (empty) prologue. */
12588 emit_note (NOTE_INSN_PROLOGUE_END);
12590 /* Find the "this" pointer. If the function returns a structure,
12591 the structure return pointer is in r3. */
12592 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12593 this = gen_rtx_REG (Pmode, 4);
12595 this = gen_rtx_REG (Pmode, 3);
12597 /* Apply the constant offset, if required. */
12600 rtx delta_rtx = GEN_INT (delta);
12601 emit_insn (TARGET_32BIT
12602 ? gen_addsi3 (this, this, delta_rtx)
12603 : gen_adddi3 (this, this, delta_rtx));
12606 /* Apply the offset from the vtable, if required. */
12609 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12610 rtx tmp = gen_rtx_REG (Pmode, 12);
12612 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12613 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12615 emit_insn (TARGET_32BIT
12616 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12617 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12618 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12622 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12624 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12626 emit_insn (TARGET_32BIT
12627 ? gen_addsi3 (this, this, tmp)
12628 : gen_adddi3 (this, this, tmp));
12631 /* Generate a tail call to the target function. */
12632 if (!TREE_USED (function))
12634 assemble_external (function);
12635 TREE_USED (function) = 1;
12637 funexp = XEXP (DECL_RTL (function), 0);
12638 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12641 if (MACHOPIC_INDIRECT)
12642 funexp = machopic_indirect_call_target (funexp);
12645 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12646 generate sibcall RTL explicitly to avoid constraint abort. */
12647 insn = emit_call_insn (
12648 gen_rtx_PARALLEL (VOIDmode,
12650 gen_rtx_CALL (VOIDmode,
12651 funexp, const0_rtx),
12652 gen_rtx_USE (VOIDmode, const0_rtx),
12653 gen_rtx_USE (VOIDmode,
12654 gen_rtx_REG (SImode,
12655 LINK_REGISTER_REGNUM)),
12656 gen_rtx_RETURN (VOIDmode))));
12657 SIBLING_CALL_P (insn) = 1;
12660 /* Run just enough of rest_of_compilation to get the insns emitted.
12661 There's not really enough bulk here to make other passes such as
12662 instruction scheduling worth while. Note that use_thunk calls
12663 assemble_start_function and assemble_end_function. */
12664 insn = get_insns ();
12665 insn_locators_initialize ();
12666 shorten_branches (insn);
12667 final_start_function (insn, file, 1);
12668 final (insn, file, 1, 0);
12669 final_end_function ();
12671 reload_completed = 0;
12672 epilogue_completed = 0;
12673 no_new_pseudos = 0;
12676 /* A quick summary of the various types of 'constant-pool tables'
12679 Target Flags Name One table per
12680 AIX (none) AIX TOC object file
12681 AIX -mfull-toc AIX TOC object file
12682 AIX -mminimal-toc AIX minimal TOC translation unit
12683 SVR4/EABI (none) SVR4 SDATA object file
12684 SVR4/EABI -fpic SVR4 pic object file
12685 SVR4/EABI -fPIC SVR4 PIC translation unit
12686 SVR4/EABI -mrelocatable EABI TOC function
12687 SVR4/EABI -maix AIX TOC object file
12688 SVR4/EABI -maix -mminimal-toc
12689 AIX minimal TOC translation unit
12691 Name Reg. Set by entries contains:
12692 made by addrs? fp? sum?
12694 AIX TOC 2 crt0 as Y option option
12695 AIX minimal TOC 30 prolog gcc Y Y option
12696 SVR4 SDATA 13 crt0 gcc N Y N
12697 SVR4 pic 30 prolog ld Y not yet N
12698 SVR4 PIC 30 prolog gcc Y option option
12699 EABI TOC 30 prolog gcc Y option option
12703 /* Hash functions for the hash table. */
12706 rs6000_hash_constant (rtx k)
12708 enum rtx_code code = GET_CODE (k);
12709 enum machine_mode mode = GET_MODE (k);
12710 unsigned result = (code << 3) ^ mode;
12711 const char *format;
12714 format = GET_RTX_FORMAT (code);
12715 flen = strlen (format);
12721 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12724 if (mode != VOIDmode)
12725 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12737 for (; fidx < flen; fidx++)
12738 switch (format[fidx])
12743 const char *str = XSTR (k, fidx);
12744 len = strlen (str);
12745 result = result * 613 + len;
12746 for (i = 0; i < len; i++)
12747 result = result * 613 + (unsigned) str[i];
12752 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12756 result = result * 613 + (unsigned) XINT (k, fidx);
12759 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12760 result = result * 613 + (unsigned) XWINT (k, fidx);
12764 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12765 result = result * 613 + (unsigned) (XWINT (k, fidx)
12779 toc_hash_function (const void *hash_entry)
12781 const struct toc_hash_struct *thc =
12782 (const struct toc_hash_struct *) hash_entry;
12783 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12786 /* Compare H1 and H2 for equivalence. */
12789 toc_hash_eq (const void *h1, const void *h2)
12791 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12792 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12794 if (((const struct toc_hash_struct *) h1)->key_mode
12795 != ((const struct toc_hash_struct *) h2)->key_mode)
12798 return rtx_equal_p (r1, r2);
12801 /* These are the names given by the C++ front-end to vtables, and
12802 vtable-like objects. Ideally, this logic should not be here;
12803 instead, there should be some programmatic way of inquiring as
12804 to whether or not an object is a vtable. */
12806 #define VTABLE_NAME_P(NAME) \
12807 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12808 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12809 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12810 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12813 rs6000_output_symbol_ref (FILE *file, rtx x)
12815 /* Currently C++ toc references to vtables can be emitted before it
12816 is decided whether the vtable is public or private. If this is
12817 the case, then the linker will eventually complain that there is
12818 a reference to an unknown section. Thus, for vtables only,
12819 we emit the TOC reference to reference the symbol and not the
12821 const char *name = XSTR (x, 0);
12823 if (VTABLE_NAME_P (name))
12825 RS6000_OUTPUT_BASENAME (file, name);
12828 assemble_name (file, name);
12831 /* Output a TOC entry. We derive the entry name from what is being
12835 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
12838 const char *name = buf;
12839 const char *real_name;
12846 /* When the linker won't eliminate them, don't output duplicate
12847 TOC entries (this happens on AIX if there is any kind of TOC,
12848 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12850 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12852 struct toc_hash_struct *h;
12855 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12856 time because GGC is not initialized at that point. */
12857 if (toc_hash_table == NULL)
12858 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12859 toc_hash_eq, NULL);
12861 h = ggc_alloc (sizeof (*h));
12863 h->key_mode = mode;
12864 h->labelno = labelno;
12866 found = htab_find_slot (toc_hash_table, h, 1);
12867 if (*found == NULL)
12869 else /* This is indeed a duplicate.
12870 Set this label equal to that label. */
12872 fputs ("\t.set ", file);
12873 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12874 fprintf (file, "%d,", labelno);
12875 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12876 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12882 /* If we're going to put a double constant in the TOC, make sure it's
12883 aligned properly when strict alignment is on. */
12884 if (GET_CODE (x) == CONST_DOUBLE
12885 && STRICT_ALIGNMENT
12886 && GET_MODE_BITSIZE (mode) >= 64
12887 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12888 ASM_OUTPUT_ALIGN (file, 3);
12891 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12893 /* Handle FP constants specially. Note that if we have a minimal
12894 TOC, things we put here aren't actually in the TOC, so we can allow
12896 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12898 REAL_VALUE_TYPE rv;
12901 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12902 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12906 if (TARGET_MINIMAL_TOC)
12907 fputs (DOUBLE_INT_ASM_OP, file);
12909 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12910 k[0] & 0xffffffff, k[1] & 0xffffffff,
12911 k[2] & 0xffffffff, k[3] & 0xffffffff);
12912 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12913 k[0] & 0xffffffff, k[1] & 0xffffffff,
12914 k[2] & 0xffffffff, k[3] & 0xffffffff);
12919 if (TARGET_MINIMAL_TOC)
12920 fputs ("\t.long ", file);
12922 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12923 k[0] & 0xffffffff, k[1] & 0xffffffff,
12924 k[2] & 0xffffffff, k[3] & 0xffffffff);
12925 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12926 k[0] & 0xffffffff, k[1] & 0xffffffff,
12927 k[2] & 0xffffffff, k[3] & 0xffffffff);
12931 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12933 REAL_VALUE_TYPE rv;
12936 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12937 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12941 if (TARGET_MINIMAL_TOC)
12942 fputs (DOUBLE_INT_ASM_OP, file);
12944 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12945 k[0] & 0xffffffff, k[1] & 0xffffffff);
12946 fprintf (file, "0x%lx%08lx\n",
12947 k[0] & 0xffffffff, k[1] & 0xffffffff);
12952 if (TARGET_MINIMAL_TOC)
12953 fputs ("\t.long ", file);
12955 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12956 k[0] & 0xffffffff, k[1] & 0xffffffff);
12957 fprintf (file, "0x%lx,0x%lx\n",
12958 k[0] & 0xffffffff, k[1] & 0xffffffff);
12962 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12964 REAL_VALUE_TYPE rv;
12967 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12968 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12972 if (TARGET_MINIMAL_TOC)
12973 fputs (DOUBLE_INT_ASM_OP, file);
12975 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12976 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12981 if (TARGET_MINIMAL_TOC)
12982 fputs ("\t.long ", file);
12984 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12985 fprintf (file, "0x%lx\n", l & 0xffffffff);
12989 else if (GET_MODE (x) == VOIDmode
12990 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12992 unsigned HOST_WIDE_INT low;
12993 HOST_WIDE_INT high;
12995 if (GET_CODE (x) == CONST_DOUBLE)
12997 low = CONST_DOUBLE_LOW (x);
12998 high = CONST_DOUBLE_HIGH (x);
13001 #if HOST_BITS_PER_WIDE_INT == 32
13004 high = (low & 0x80000000) ? ~0 : 0;
13008 low = INTVAL (x) & 0xffffffff;
13009 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13013 /* TOC entries are always Pmode-sized, but since this
13014 is a bigendian machine then if we're putting smaller
13015 integer constants in the TOC we have to pad them.
13016 (This is still a win over putting the constants in
13017 a separate constant pool, because then we'd have
13018 to have both a TOC entry _and_ the actual constant.)
13020 For a 32-bit target, CONST_INT values are loaded and shifted
13021 entirely within `low' and can be stored in one TOC entry. */
13023 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13024 abort ();/* It would be easy to make this work, but it doesn't now. */
13026 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13028 #if HOST_BITS_PER_WIDE_INT == 32
13029 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13030 POINTER_SIZE, &low, &high, 0);
13033 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13034 high = (HOST_WIDE_INT) low >> 32;
13041 if (TARGET_MINIMAL_TOC)
13042 fputs (DOUBLE_INT_ASM_OP, file);
13044 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13045 (long) high & 0xffffffff, (long) low & 0xffffffff);
13046 fprintf (file, "0x%lx%08lx\n",
13047 (long) high & 0xffffffff, (long) low & 0xffffffff);
13052 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13054 if (TARGET_MINIMAL_TOC)
13055 fputs ("\t.long ", file);
13057 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13058 (long) high & 0xffffffff, (long) low & 0xffffffff);
13059 fprintf (file, "0x%lx,0x%lx\n",
13060 (long) high & 0xffffffff, (long) low & 0xffffffff);
13064 if (TARGET_MINIMAL_TOC)
13065 fputs ("\t.long ", file);
13067 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13068 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13074 if (GET_CODE (x) == CONST)
13076 if (GET_CODE (XEXP (x, 0)) != PLUS)
13079 base = XEXP (XEXP (x, 0), 0);
13080 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13083 if (GET_CODE (base) == SYMBOL_REF)
13084 name = XSTR (base, 0);
13085 else if (GET_CODE (base) == LABEL_REF)
13086 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13087 else if (GET_CODE (base) == CODE_LABEL)
13088 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13092 real_name = (*targetm.strip_name_encoding) (name);
13093 if (TARGET_MINIMAL_TOC)
13094 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13097 fprintf (file, "\t.tc %s", real_name);
13100 fprintf (file, ".N%d", - offset);
13102 fprintf (file, ".P%d", offset);
13104 fputs ("[TC],", file);
13107 /* Currently C++ toc references to vtables can be emitted before it
13108 is decided whether the vtable is public or private. If this is
13109 the case, then the linker will eventually complain that there is
13110 a TOC reference to an unknown section. Thus, for vtables only,
13111 we emit the TOC reference to reference the symbol and not the
13113 if (VTABLE_NAME_P (name))
13115 RS6000_OUTPUT_BASENAME (file, name);
13117 fprintf (file, "%d", offset);
13118 else if (offset > 0)
13119 fprintf (file, "+%d", offset);
13122 output_addr_const (file, x);
13126 /* Output an assembler pseudo-op to write an ASCII string of N characters
13127 starting at P to FILE.
13129 On the RS/6000, we have to do this using the .byte operation and
13130 write out special characters outside the quoted string.
13131 Also, the assembler is broken; very long strings are truncated,
13132 so we must artificially break them up early. */
13135 output_ascii (FILE *file, const char *p, int n)
13138 int i, count_string;
13139 const char *for_string = "\t.byte \"";
13140 const char *for_decimal = "\t.byte ";
13141 const char *to_close = NULL;
13144 for (i = 0; i < n; i++)
13147 if (c >= ' ' && c < 0177)
13150 fputs (for_string, file);
13153 /* Write two quotes to get one. */
13161 for_decimal = "\"\n\t.byte ";
13165 if (count_string >= 512)
13167 fputs (to_close, file);
13169 for_string = "\t.byte \"";
13170 for_decimal = "\t.byte ";
13178 fputs (for_decimal, file);
13179 fprintf (file, "%d", c);
13181 for_string = "\n\t.byte \"";
13182 for_decimal = ", ";
13188 /* Now close the string if we have written one. Then end the line. */
13190 fputs (to_close, file);
13193 /* Generate a unique section name for FILENAME for a section type
13194 represented by SECTION_DESC. Output goes into BUF.
13196 SECTION_DESC can be any string, as long as it is different for each
13197 possible section type.
13199 We name the section in the same manner as xlc. The name begins with an
13200 underscore followed by the filename (after stripping any leading directory
13201 names) with the last period replaced by the string SECTION_DESC. If
13202 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13206 rs6000_gen_section_name (char **buf, const char *filename,
13207 const char *section_desc)
13209 const char *q, *after_last_slash, *last_period = 0;
13213 after_last_slash = filename;
13214 for (q = filename; *q; q++)
13217 after_last_slash = q + 1;
13218 else if (*q == '.')
13222 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13223 *buf = (char *) xmalloc (len);
13228 for (q = after_last_slash; *q; q++)
13230 if (q == last_period)
13232 strcpy (p, section_desc);
13233 p += strlen (section_desc);
13237 else if (ISALNUM (*q))
13241 if (last_period == 0)
13242 strcpy (p, section_desc);
13247 /* Emit profile function. */
13250 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13252 if (TARGET_PROFILE_KERNEL)
13255 if (DEFAULT_ABI == ABI_AIX)
13257 #ifndef NO_PROFILE_COUNTERS
13258 # define NO_PROFILE_COUNTERS 0
13260 if (NO_PROFILE_COUNTERS)
13261 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13265 const char *label_name;
13268 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13269 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13270 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13272 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13276 else if (DEFAULT_ABI == ABI_DARWIN)
13278 const char *mcount_name = RS6000_MCOUNT;
13279 int caller_addr_regno = LINK_REGISTER_REGNUM;
13281 /* Be conservative and always set this, at least for now. */
13282 current_function_uses_pic_offset_table = 1;
13285 /* For PIC code, set up a stub and collect the caller's address
13286 from r0, which is where the prologue puts it. */
13287 if (MACHOPIC_INDIRECT)
13289 mcount_name = machopic_stub_name (mcount_name);
13290 if (current_function_uses_pic_offset_table)
13291 caller_addr_regno = 0;
13294 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13296 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13300 /* Write function profiler code. */
13303 output_function_profiler (FILE *file, int labelno)
13308 switch (DEFAULT_ABI)
13317 warning ("no profiling of 64-bit code for this ABI");
13320 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13321 fprintf (file, "\tmflr %s\n", reg_names[0]);
13324 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13325 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13326 reg_names[0], save_lr, reg_names[1]);
13327 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13328 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13329 assemble_name (file, buf);
13330 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13332 else if (flag_pic > 1)
13334 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13335 reg_names[0], save_lr, reg_names[1]);
13336 /* Now, we need to get the address of the label. */
13337 fputs ("\tbl 1f\n\t.long ", file);
13338 assemble_name (file, buf);
13339 fputs ("-.\n1:", file);
13340 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13341 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13342 reg_names[0], reg_names[11]);
13343 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13344 reg_names[0], reg_names[0], reg_names[11]);
13348 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13349 assemble_name (file, buf);
13350 fputs ("@ha\n", file);
13351 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13352 reg_names[0], save_lr, reg_names[1]);
13353 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13354 assemble_name (file, buf);
13355 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13358 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13359 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13364 if (!TARGET_PROFILE_KERNEL)
13366 /* Don't do anything, done in output_profile_hook (). */
13373 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13374 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13376 if (current_function_needs_context)
13378 asm_fprintf (file, "\tstd %s,24(%s)\n",
13379 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13380 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13381 asm_fprintf (file, "\tld %s,24(%s)\n",
13382 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13385 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13393 rs6000_use_dfa_pipeline_interface (void)
13398 /* Power4 load update and store update instructions are cracked into a
13399 load or store and an integer insn which are executed in the same cycle.
13400 Branches have their own dispatch slot which does not count against the
13401 GCC issue rate, but it changes the program flow so there are no other
13402 instructions to issue in this cycle. */
13405 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13406 int verbose ATTRIBUTE_UNUSED,
13407 rtx insn, int more)
13409 if (GET_CODE (PATTERN (insn)) == USE
13410 || GET_CODE (PATTERN (insn)) == CLOBBER)
13413 if (rs6000_cpu == PROCESSOR_POWER4)
13415 if (is_microcoded_insn (insn))
13417 else if (is_cracked_insn (insn))
13418 return more > 2 ? more - 2 : 0;
13424 /* Adjust the cost of a scheduling dependency. Return the new cost of
13425 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13428 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13431 if (! recog_memoized (insn))
13434 if (REG_NOTE_KIND (link) != 0)
13437 if (REG_NOTE_KIND (link) == 0)
13439 /* Data dependency; DEP_INSN writes a register that INSN reads
13440 some cycles later. */
13441 switch (get_attr_type (insn))
13444 /* Tell the first scheduling pass about the latency between
13445 a mtctr and bctr (and mtlr and br/blr). The first
13446 scheduling pass will not know about this latency since
13447 the mtctr instruction, which has the latency associated
13448 to it, will be generated by reload. */
13449 return TARGET_POWER ? 5 : 4;
13451 /* Leave some extra cycles between a compare and its
13452 dependent branch, to inhibit expensive mispredicts. */
13453 if ((rs6000_cpu_attr == CPU_PPC603
13454 || rs6000_cpu_attr == CPU_PPC604
13455 || rs6000_cpu_attr == CPU_PPC604E
13456 || rs6000_cpu_attr == CPU_PPC620
13457 || rs6000_cpu_attr == CPU_PPC630
13458 || rs6000_cpu_attr == CPU_PPC750
13459 || rs6000_cpu_attr == CPU_PPC7400
13460 || rs6000_cpu_attr == CPU_PPC7450
13461 || rs6000_cpu_attr == CPU_POWER4)
13462 && recog_memoized (dep_insn)
13463 && (INSN_CODE (dep_insn) >= 0)
13464 && (get_attr_type (dep_insn) == TYPE_CMP
13465 || get_attr_type (dep_insn) == TYPE_COMPARE
13466 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13467 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13468 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13469 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13470 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13471 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13476 /* Fall out to return default cost. */
13482 /* The function returns a true if INSN is microcoded.
13483 Return false ptherwise. */
13486 is_microcoded_insn (rtx insn)
13488 if (!insn || !INSN_P (insn)
13489 || GET_CODE (PATTERN (insn)) == USE
13490 || GET_CODE (PATTERN (insn)) == CLOBBER)
13493 if (rs6000_cpu == PROCESSOR_POWER4)
13495 enum attr_type type = get_attr_type (insn);
13496 if (type == TYPE_LOAD_EXT_U
13497 || type == TYPE_LOAD_EXT_UX
13498 || type == TYPE_LOAD_UX
13499 || type == TYPE_STORE_UX
13500 || type == TYPE_MFCR)
13507 /* The function returns a non-zero value if INSN can be scheduled only
13508 as the first insn in a dispatch group ("dispatch-slot restricted").
13509 In this case, the returned value indicates how many dispatch slots
13510 the insn occupies (at the beginning of the group).
13511 Return 0 otherwise. */
13514 is_dispatch_slot_restricted (rtx insn)
13516 enum attr_type type;
13518 if (rs6000_cpu != PROCESSOR_POWER4)
13522 || insn == NULL_RTX
13523 || GET_CODE (insn) == NOTE
13524 || GET_CODE (PATTERN (insn)) == USE
13525 || GET_CODE (PATTERN (insn)) == CLOBBER)
13528 type = get_attr_type (insn);
13534 case TYPE_DELAYED_CR:
13535 case TYPE_CR_LOGICAL:
13547 /* The function returns true if INSN is cracked into 2 instructions
13548 by the processor (and therefore occupies 2 issue slots). */
13551 is_cracked_insn (rtx insn)
13553 if (!insn || !INSN_P (insn)
13554 || GET_CODE (PATTERN (insn)) == USE
13555 || GET_CODE (PATTERN (insn)) == CLOBBER)
13558 if (rs6000_cpu == PROCESSOR_POWER4)
13560 enum attr_type type = get_attr_type (insn);
13561 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13562 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13563 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13564 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13565 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13566 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13567 || type == TYPE_IDIV || type == TYPE_LDIV
13568 || type == TYPE_INSERT_WORD)
13575 /* The function returns true if INSN can be issued only from
13576 the branch slot. */
13579 is_branch_slot_insn (rtx insn)
13581 if (!insn || !INSN_P (insn)
13582 || GET_CODE (PATTERN (insn)) == USE
13583 || GET_CODE (PATTERN (insn)) == CLOBBER)
13586 if (rs6000_cpu == PROCESSOR_POWER4)
13588 enum attr_type type = get_attr_type (insn);
13589 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
13597 /* A C statement (sans semicolon) to update the integer scheduling
13598 priority INSN_PRIORITY (INSN). Increase the priority to execute the
13599 INSN earlier, reduce the priority to execute INSN later. Do not
13600 define this macro if you do not need to adjust the scheduling
13601 priorities of insns. */
13604 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
13606 /* On machines (like the 750) which have asymmetric integer units,
13607 where one integer unit can do multiply and divides and the other
13608 can't, reduce the priority of multiply/divide so it is scheduled
13609 before other integer operations. */
13612 if (! INSN_P (insn))
13615 if (GET_CODE (PATTERN (insn)) == USE)
13618 switch (rs6000_cpu_attr) {
13620 switch (get_attr_type (insn))
13627 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13628 priority, priority);
13629 if (priority >= 0 && priority < 0x01000000)
13636 if (is_dispatch_slot_restricted (insn)
13637 && reload_completed
13638 && current_sched_info->sched_max_insns_priority
13639 && rs6000_sched_restricted_insns_priority)
13642 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
13643 if (rs6000_sched_restricted_insns_priority == 1)
13644 /* Attach highest priority to insn. This means that in
13645 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
13646 precede 'priority' (critical path) considerations. */
13647 return current_sched_info->sched_max_insns_priority;
13648 else if (rs6000_sched_restricted_insns_priority == 2)
13649 /* Increase priority of insn by a minimal amount. This means that in
13650 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
13651 precede dispatch-slot restriction considerations. */
13652 return (priority + 1);
13658 /* Return how many instructions the machine can issue per cycle. */
13661 rs6000_issue_rate (void)
13663 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13664 if (!reload_completed)
13667 switch (rs6000_cpu_attr) {
13668 case CPU_RIOS1: /* ? */
13670 case CPU_PPC601: /* ? */
13692 /* Return how many instructions to look ahead for better insn
13696 rs6000_use_sched_lookahead (void)
13698 if (rs6000_cpu_attr == CPU_PPC8540)
13703 /* Determine is PAT refers to memory. */
13706 is_mem_ref (rtx pat)
13712 if (GET_CODE (pat) == MEM)
13715 /* Recursively process the pattern. */
13716 fmt = GET_RTX_FORMAT (GET_CODE (pat));
13718 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
13721 ret |= is_mem_ref (XEXP (pat, i));
13722 else if (fmt[i] == 'E')
13723 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
13724 ret |= is_mem_ref (XVECEXP (pat, i, j));
13730 /* Determine if PAT is a PATTERN of a load insn. */
13733 is_load_insn1 (rtx pat)
13735 if (!pat || pat == NULL_RTX)
13738 if (GET_CODE (pat) == SET)
13739 return is_mem_ref (SET_SRC (pat));
13741 if (GET_CODE (pat) == PARALLEL)
13745 for (i = 0; i < XVECLEN (pat, 0); i++)
13746 if (is_load_insn1 (XVECEXP (pat, 0, i)))
13753 /* Determine if INSN loads from memory. */
13756 is_load_insn (rtx insn)
13758 if (!insn || !INSN_P (insn))
13761 if (GET_CODE (insn) == CALL_INSN)
13764 return is_load_insn1 (PATTERN (insn));
13767 /* Determine if PAT is a PATTERN of a store insn. */
13770 is_store_insn1 (rtx pat)
13772 if (!pat || pat == NULL_RTX)
13775 if (GET_CODE (pat) == SET)
13776 return is_mem_ref (SET_DEST (pat));
13778 if (GET_CODE (pat) == PARALLEL)
13782 for (i = 0; i < XVECLEN (pat, 0); i++)
13783 if (is_store_insn1 (XVECEXP (pat, 0, i)))
13790 /* Determine if INSN stores to memory. */
13793 is_store_insn (rtx insn)
13795 if (!insn || !INSN_P (insn))
13798 return is_store_insn1 (PATTERN (insn));
13801 /* Returns whether the dependence between INSN and NEXT is considered
13802 costly by the given target. */
13805 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
13807 /* If the flag is not enbled - no dependence is considered costly;
13808 allow all dependent insns in the same group.
13809 This is the most aggressive option. */
13810 if (rs6000_sched_costly_dep == no_dep_costly)
13813 /* If the flag is set to 1 - a dependence is always considered costly;
13814 do not allow dependent instructions in the same group.
13815 This is the most conservative option. */
13816 if (rs6000_sched_costly_dep == all_deps_costly)
13819 if (rs6000_sched_costly_dep == store_to_load_dep_costly
13820 && is_load_insn (next)
13821 && is_store_insn (insn))
13822 /* Prevent load after store in the same group. */
13825 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
13826 && is_load_insn (next)
13827 && is_store_insn (insn)
13828 && (!link || (int) REG_NOTE_KIND (link) == 0))
13829 /* Prevent load after store in the same group if it is a true dependence. */
13832 /* The flag is set to X; dependences with latency >= X are considered costly,
13833 and will not be scheduled in the same group. */
13834 if (rs6000_sched_costly_dep <= max_dep_latency
13835 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
13841 /* Return the next insn after INSN that is found before TAIL is reached,
13842 skipping any "non-active" insns - insns that will not actually occupy
13843 an issue slot. Return NULL_RTX if such an insn is not found. */
13846 get_next_active_insn (rtx insn, rtx tail)
13850 if (!insn || insn == tail)
13853 next_insn = NEXT_INSN (insn);
13856 && next_insn != tail
13857 && (GET_CODE(next_insn) == NOTE
13858 || GET_CODE (PATTERN (next_insn)) == USE
13859 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
13861 next_insn = NEXT_INSN (next_insn);
13864 if (!next_insn || next_insn == tail)
13870 /* Return whether the presence of INSN causes a dispatch group terminatation
13871 of group WHICH_GROUP.
13873 If WHICH_GROUP == current_group, this function will return true if INSN
13874 causes the termination of the current group (i.e, the dispatch group to
13875 which INSN belongs). This means that INSN will be the last insn in the
13876 group it belongs to.
13878 If WHICH_GROUP == previous_group, this function will return true if INSN
13879 causes the termination of the previous group (i.e, the dispatch group that
13880 precedes the group to which INSN belongs). This means that INSN will be
13881 the first insn in the group it belongs to). */
13884 insn_terminates_group_p (rtx insn, enum group_termination which_group)
13886 enum attr_type type;
13891 type = get_attr_type (insn);
13893 if (is_microcoded_insn (insn))
13896 if (which_group == current_group)
13898 if (is_branch_slot_insn (insn))
13902 else if (which_group == previous_group)
13904 if (is_dispatch_slot_restricted (insn))
13912 /* Return true if it is recommended to keep NEXT_INSN "far" (in a seperate
13913 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
13916 is_costly_group (rtx *group_insns, rtx next_insn)
13921 int issue_rate = rs6000_issue_rate ();
13923 for (i = 0; i < issue_rate; i++)
13925 rtx insn = group_insns[i];
13928 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
13930 rtx next = XEXP (link, 0);
13931 if (next == next_insn)
13933 cost = insn_cost (insn, link, next_insn);
13934 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
13943 /* Utility of the function redefine_groups.
13944 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
13945 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
13946 to keep it "far" (in a separate group) from GROUP_INSNS, following
13947 one of the following schemes, depending on the value of the flag
13948 -minsert_sched_nops = X:
13949 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
13950 in order to force NEXT_INSN into a seperate group.
13951 (2) X < sched_finish_regroup_exact: insert exactly X nops.
13952 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
13953 insertion (has a group just ended, how many vacant issue slots remain in the
13954 last group, and how many dispatch groups were encountered so far). */
13957 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
13958 bool *group_end, int can_issue_more, int *group_count)
13962 int issue_rate = rs6000_issue_rate ();
13963 bool end = *group_end;
13966 if (next_insn == NULL_RTX)
13967 return can_issue_more;
13969 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
13970 return can_issue_more;
13972 force = is_costly_group (group_insns, next_insn);
13974 return can_issue_more;
13976 if (sched_verbose > 6)
13977 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
13978 *group_count ,can_issue_more);
13980 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
13983 can_issue_more = 0;
13985 /* Since only a branch can be issued in the last issue_slot, it is
13986 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
13987 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
13988 in this case the last nop will start a new group and the branch will be
13989 forced to the new group. */
13990 if (can_issue_more && !is_branch_slot_insn (next_insn))
13993 while (can_issue_more > 0)
13996 emit_insn_before (nop, next_insn);
14004 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14006 int n_nops = rs6000_sched_insert_nops;
14008 /* Nops can't be issued from the branch slot, so the effective
14009 issue_rate for nops is 'issue_rate - 1'. */
14010 if (can_issue_more == 0)
14011 can_issue_more = issue_rate;
14013 if (can_issue_more == 0)
14015 can_issue_more = issue_rate - 1;
14018 for (i = 0; i < issue_rate; i++)
14020 group_insns[i] = 0;
14027 emit_insn_before (nop, next_insn);
14028 if (can_issue_more == issue_rate - 1) /* new group begins */
14031 if (can_issue_more == 0)
14033 can_issue_more = issue_rate - 1;
14036 for (i = 0; i < issue_rate; i++)
14038 group_insns[i] = 0;
14044 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14047 *group_end = /* Is next_insn going to start a new group? */
14049 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14050 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14051 || (can_issue_more < issue_rate &&
14052 insn_terminates_group_p (next_insn, previous_group)));
14053 if (*group_end && end)
14056 if (sched_verbose > 6)
14057 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14058 *group_count, can_issue_more);
14059 return can_issue_more;
14062 return can_issue_more;
14065 /* This function tries to synch the dispatch groups that the compiler "sees"
14066 with the dispatch groups that the processor dispatcher is expected to
14067 form in practice. It tries to achieve this synchronization by forcing the
14068 estimated processor grouping on the compiler (as opposed to the function
14069 'pad_goups' which tries to force the scheduler's grouping on the processor).
14071 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14072 examines the (estimated) dispatch groups that will be formed by the processor
14073 dispatcher. It marks these group boundaries to reflect the estimated
14074 processor grouping, overriding the grouping that the scheduler had marked.
14075 Depending on the value of the flag '-minsert-sched-nops' this function can
14076 force certain insns into separate groups or force a certain distance between
14077 them by inserting nops, for example, if there exists a "costly dependence"
14080 The function estimates the group boundaries that the processor will form as
14081 folllows: It keeps track of how many vacant issue slots are available after
14082 each insn. A subsequent insn will start a new group if one of the following
14084 - no more vacant issue slots remain in the current dispatch group.
14085 - only the last issue slot, which is the branch slot, is vacant, but the next
14086 insn is not a branch.
14087 - only the last 2 or less issue slots, including the branch slot, are vacant,
14088 which means that a cracked insn (which occupies two issue slots) can't be
14089 issued in this group.
14090 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14091 start a new group. */
14094 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14096 rtx insn, next_insn;
14098 int can_issue_more;
14101 int group_count = 0;
14105 issue_rate = rs6000_issue_rate ();
14106 group_insns = alloca (issue_rate * sizeof (rtx));
14107 for (i = 0; i < issue_rate; i++)
14109 group_insns[i] = 0;
14111 can_issue_more = issue_rate;
14113 insn = get_next_active_insn (prev_head_insn, tail);
14116 while (insn != NULL_RTX)
14118 slot = (issue_rate - can_issue_more);
14119 group_insns[slot] = insn;
14121 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14122 if (insn_terminates_group_p (insn, current_group))
14123 can_issue_more = 0;
14125 next_insn = get_next_active_insn (insn, tail);
14126 if (next_insn == NULL_RTX)
14127 return group_count + 1;
14129 group_end = /* Is next_insn going to start a new group? */
14130 (can_issue_more == 0
14131 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14132 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14133 || (can_issue_more < issue_rate &&
14134 insn_terminates_group_p (next_insn, previous_group)));
14136 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14137 next_insn, &group_end, can_issue_more, &group_count);
14142 can_issue_more = 0;
14143 for (i = 0; i < issue_rate; i++)
14145 group_insns[i] = 0;
14149 if (GET_MODE (next_insn) == TImode && can_issue_more)
14150 PUT_MODE(next_insn, VOIDmode);
14151 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14152 PUT_MODE (next_insn, TImode);
14155 if (can_issue_more == 0)
14156 can_issue_more = issue_rate;
14159 return group_count;
14162 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14163 dispatch group boundaries that the scheduler had marked. Pad with nops
14164 any dispatch groups which have vacant issue slots, in order to force the
14165 scheduler's grouping on the processor dispatcher. The function
14166 returns the number of dispatch groups found. */
14169 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14171 rtx insn, next_insn;
14174 int can_issue_more;
14176 int group_count = 0;
14178 /* Initialize issue_rate. */
14179 issue_rate = rs6000_issue_rate ();
14180 can_issue_more = issue_rate;
14182 insn = get_next_active_insn (prev_head_insn, tail);
14183 next_insn = get_next_active_insn (insn, tail);
14185 while (insn != NULL_RTX)
14188 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14190 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14192 if (next_insn == NULL_RTX)
14197 /* If the scheduler had marked group termination at this location
14198 (between insn and next_indn), and neither insn nor next_insn will
14199 force group termination, pad the group with nops to force group
14202 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14203 && !insn_terminates_group_p (insn, current_group)
14204 && !insn_terminates_group_p (next_insn, previous_group))
14206 if (!is_branch_slot_insn(next_insn))
14209 while (can_issue_more)
14212 emit_insn_before (nop, next_insn);
14217 can_issue_more = issue_rate;
14222 next_insn = get_next_active_insn (insn, tail);
14225 return group_count;
14228 /* The following function is called at the end of scheduling BB.
14229 After reload, it inserts nops at insn group bundling. */
14232 rs6000_sched_finish (FILE *dump, int sched_verbose)
14237 fprintf (dump, "=== Finishing schedule.\n");
14239 if (reload_completed && rs6000_cpu == PROCESSOR_POWER4)
14241 if (rs6000_sched_insert_nops == sched_finish_none)
14244 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14245 n_groups = pad_groups (dump, sched_verbose,
14246 current_sched_info->prev_head,
14247 current_sched_info->next_tail);
14249 n_groups = redefine_groups (dump, sched_verbose,
14250 current_sched_info->prev_head,
14251 current_sched_info->next_tail);
14253 if (sched_verbose >= 6)
14255 fprintf (dump, "ngroups = %d\n", n_groups);
14256 print_rtl (dump, current_sched_info->prev_head);
14257 fprintf (dump, "Done finish_sched\n");
14262 /* Length in units of the trampoline for entering a nested function. */
14265 rs6000_trampoline_size (void)
14269 switch (DEFAULT_ABI)
14275 ret = (TARGET_32BIT) ? 12 : 24;
14280 ret = (TARGET_32BIT) ? 40 : 48;
14287 /* Emit RTL insns to initialize the variable parts of a trampoline.
14288 FNADDR is an RTX for the address of the function's pure code.
14289 CXT is an RTX for the static chain value for the function. */
14292 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14294 enum machine_mode pmode = Pmode;
14295 int regsize = (TARGET_32BIT) ? 4 : 8;
14296 rtx ctx_reg = force_reg (pmode, cxt);
14298 switch (DEFAULT_ABI)
14303 /* Macros to shorten the code expansions below. */
14304 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14305 #define MEM_PLUS(addr,offset) \
14306 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14308 /* Under AIX, just build the 3 word function descriptor */
14311 rtx fn_reg = gen_reg_rtx (pmode);
14312 rtx toc_reg = gen_reg_rtx (pmode);
14313 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14314 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14315 emit_move_insn (MEM_DEREF (addr), fn_reg);
14316 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14317 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14321 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14324 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14325 FALSE, VOIDmode, 4,
14327 GEN_INT (rs6000_trampoline_size ()), SImode,
14337 /* Table of valid machine attributes. */
14339 const struct attribute_spec rs6000_attribute_table[] =
14341 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14342 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14343 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14344 { NULL, 0, 0, false, false, false, NULL }
14347 /* Handle a "longcall" or "shortcall" attribute; arguments as in
14348 struct attribute_spec.handler. */
14351 rs6000_handle_longcall_attribute (tree *node, tree name,
14352 tree args ATTRIBUTE_UNUSED,
14353 int flags ATTRIBUTE_UNUSED,
14354 bool *no_add_attrs)
14356 if (TREE_CODE (*node) != FUNCTION_TYPE
14357 && TREE_CODE (*node) != FIELD_DECL
14358 && TREE_CODE (*node) != TYPE_DECL)
14360 warning ("`%s' attribute only applies to functions",
14361 IDENTIFIER_POINTER (name));
14362 *no_add_attrs = true;
14368 /* Set longcall attributes on all functions declared when
14369 rs6000_default_long_calls is true. */
14371 rs6000_set_default_type_attributes (tree type)
14373 if (rs6000_default_long_calls
14374 && (TREE_CODE (type) == FUNCTION_TYPE
14375 || TREE_CODE (type) == METHOD_TYPE))
14376 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
14378 TYPE_ATTRIBUTES (type));
14381 /* Return a reference suitable for calling a function with the
14382 longcall attribute. */
14385 rs6000_longcall_ref (rtx call_ref)
14387 const char *call_name;
14390 if (GET_CODE (call_ref) != SYMBOL_REF)
14393 /* System V adds '.' to the internal name, so skip them. */
14394 call_name = XSTR (call_ref, 0);
14395 if (*call_name == '.')
14397 while (*call_name == '.')
14400 node = get_identifier (call_name);
14401 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
14404 return force_reg (Pmode, call_ref);
14407 #ifdef USING_ELFOS_H
14409 /* A C statement or statements to switch to the appropriate section
14410 for output of RTX in mode MODE. You can assume that RTX is some
14411 kind of constant in RTL. The argument MODE is redundant except in
14412 the case of a `const_int' rtx. Select the section by calling
14413 `text_section' or one of the alternatives for other sections.
14415 Do not define this macro if you put all constants in the read-only
14419 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
14420 unsigned HOST_WIDE_INT align)
14422 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14425 default_elf_select_rtx_section (mode, x, align);
14428 /* A C statement or statements to switch to the appropriate
14429 section for output of DECL. DECL is either a `VAR_DECL' node
14430 or a constant of some sort. RELOC indicates whether forming
14431 the initial value of DECL requires link-time relocations. */
14434 rs6000_elf_select_section (tree decl, int reloc,
14435 unsigned HOST_WIDE_INT align)
14437 /* Pretend that we're always building for a shared library when
14438 ABI_AIX, because otherwise we end up with dynamic relocations
14439 in read-only sections. This happens for function pointers,
14440 references to vtables in typeinfo, and probably other cases. */
14441 default_elf_select_section_1 (decl, reloc, align,
14442 flag_pic || DEFAULT_ABI == ABI_AIX);
14445 /* A C statement to build up a unique section name, expressed as a
14446 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
14447 RELOC indicates whether the initial value of EXP requires
14448 link-time relocations. If you do not define this macro, GCC will use
14449 the symbol name prefixed by `.' as the section name. Note - this
14450 macro can now be called for uninitialized data items as well as
14451 initialized data and functions. */
14454 rs6000_elf_unique_section (tree decl, int reloc)
14456 /* As above, pretend that we're always building for a shared library
14457 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
14458 default_unique_section_1 (decl, reloc,
14459 flag_pic || DEFAULT_ABI == ABI_AIX);
14462 /* For a SYMBOL_REF, set generic flags and then perform some
14463 target-specific processing.
14465 When the AIX ABI is requested on a non-AIX system, replace the
14466 function name with the real name (with a leading .) rather than the
14467 function descriptor name. This saves a lot of overriding code to
14468 read the prefixes. */
14471 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
14473 default_encode_section_info (decl, rtl, first);
14476 && TREE_CODE (decl) == FUNCTION_DECL
14478 && DEFAULT_ABI == ABI_AIX)
14480 rtx sym_ref = XEXP (rtl, 0);
14481 size_t len = strlen (XSTR (sym_ref, 0));
14482 char *str = alloca (len + 2);
14484 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
14485 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
14490 rs6000_elf_in_small_data_p (tree decl)
14492 if (rs6000_sdata == SDATA_NONE)
14495 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
14497 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
14498 if (strcmp (section, ".sdata") == 0
14499 || strcmp (section, ".sdata2") == 0
14500 || strcmp (section, ".sbss") == 0
14501 || strcmp (section, ".sbss2") == 0
14502 || strcmp (section, ".PPC.EMB.sdata0") == 0
14503 || strcmp (section, ".PPC.EMB.sbss0") == 0)
14508 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
14511 && (unsigned HOST_WIDE_INT) size <= g_switch_value
14512 /* If it's not public, and we're not going to reference it there,
14513 there's no need to put it in the small data section. */
14514 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
14521 #endif /* USING_ELFOS_H */
14524 /* Return a REG that occurs in ADDR with coefficient 1.
14525 ADDR can be effectively incremented by incrementing REG.
14527 r0 is special and we must not select it as an address
14528 register by this routine since our caller will try to
14529 increment the returned register via an "la" instruction. */
14532 find_addr_reg (rtx addr)
14534 while (GET_CODE (addr) == PLUS)
14536 if (GET_CODE (XEXP (addr, 0)) == REG
14537 && REGNO (XEXP (addr, 0)) != 0)
14538 addr = XEXP (addr, 0);
14539 else if (GET_CODE (XEXP (addr, 1)) == REG
14540 && REGNO (XEXP (addr, 1)) != 0)
14541 addr = XEXP (addr, 1);
14542 else if (CONSTANT_P (XEXP (addr, 0)))
14543 addr = XEXP (addr, 1);
14544 else if (CONSTANT_P (XEXP (addr, 1)))
14545 addr = XEXP (addr, 0);
14549 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
14555 rs6000_fatal_bad_address (rtx op)
14557 fatal_insn ("bad address", op);
14563 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
14564 reference and a constant. */
14567 symbolic_operand (rtx op)
14569 switch (GET_CODE (op))
14576 return (GET_CODE (op) == SYMBOL_REF ||
14577 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
14578 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
14579 && GET_CODE (XEXP (op, 1)) == CONST_INT);
14586 #ifdef RS6000_LONG_BRANCH
14588 static tree stub_list = 0;
14590 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
14591 procedure calls to the linked list. */
14594 add_compiler_stub (tree label_name, tree function_name, int line_number)
14596 tree stub = build_tree_list (function_name, label_name);
14597 TREE_TYPE (stub) = build_int_2 (line_number, 0);
14598 TREE_CHAIN (stub) = stub_list;
14602 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
14603 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
14604 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
14606 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
14607 handling procedure calls from the linked list and initializes the
14611 output_compiler_stub (void)
14614 char label_buf[256];
14618 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
14620 fprintf (asm_out_file,
14621 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
14623 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14624 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14625 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
14626 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14628 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
14630 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
14633 label_buf[0] = '_';
14634 strcpy (label_buf+1,
14635 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
14638 strcpy (tmp_buf, "lis r12,hi16(");
14639 strcat (tmp_buf, label_buf);
14640 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
14641 strcat (tmp_buf, label_buf);
14642 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
14643 output_asm_insn (tmp_buf, 0);
14645 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14646 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14647 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
14648 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14654 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
14655 already there or not. */
14658 no_previous_def (tree function_name)
14661 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
14662 if (function_name == STUB_FUNCTION_NAME (stub))
14667 /* GET_PREV_LABEL gets the label name from the previous definition of
14671 get_prev_label (tree function_name)
14674 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
14675 if (function_name == STUB_FUNCTION_NAME (stub))
14676 return STUB_LABEL_NAME (stub);
14680 /* INSN is either a function call or a millicode call. It may have an
14681 unconditional jump in its delay slot.
14683 CALL_DEST is the routine we are calling. */
14686 output_call (rtx insn, rtx call_dest, int operand_number)
14688 static char buf[256];
14689 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
14692 tree funname = get_identifier (XSTR (call_dest, 0));
14694 if (no_previous_def (funname))
14696 int line_number = 0;
14697 rtx label_rtx = gen_label_rtx ();
14698 char *label_buf, temp_buf[256];
14699 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
14700 CODE_LABEL_NUMBER (label_rtx));
14701 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
14702 labelname = get_identifier (label_buf);
14703 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
14705 line_number = NOTE_LINE_NUMBER (insn);
14706 add_compiler_stub (labelname, funname, line_number);
14709 labelname = get_prev_label (funname);
14711 sprintf (buf, "jbsr %%z%d,%.246s",
14712 operand_number, IDENTIFIER_POINTER (labelname));
14717 sprintf (buf, "bl %%z%d", operand_number);
14722 #endif /* RS6000_LONG_BRANCH */
14724 /* Generate PIC and indirect symbol stubs. */
14727 machopic_output_stub (FILE *file, const char *symb, const char *stub)
14729 unsigned int length;
14730 char *symbol_name, *lazy_ptr_name;
14731 char *local_label_0;
14732 static int label = 0;
14734 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
14735 symb = (*targetm.strip_name_encoding) (symb);
14738 length = strlen (symb);
14739 symbol_name = alloca (length + 32);
14740 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
14742 lazy_ptr_name = alloca (length + 32);
14743 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
14746 machopic_picsymbol_stub1_section ();
14748 machopic_symbol_stub1_section ();
14749 fprintf (file, "\t.align 2\n");
14751 fprintf (file, "%s:\n", stub);
14752 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
14757 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
14758 sprintf (local_label_0, "\"L%011d$spb\"", label);
14760 fprintf (file, "\tmflr r0\n");
14761 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
14762 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
14763 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
14764 lazy_ptr_name, local_label_0);
14765 fprintf (file, "\tmtlr r0\n");
14766 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
14767 lazy_ptr_name, local_label_0);
14768 fprintf (file, "\tmtctr r12\n");
14769 fprintf (file, "\tbctr\n");
14773 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
14774 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
14775 fprintf (file, "\tmtctr r12\n");
14776 fprintf (file, "\tbctr\n");
14779 machopic_lazy_symbol_ptr_section ();
14780 fprintf (file, "%s:\n", lazy_ptr_name);
14781 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
14782 fprintf (file, "\t.long dyld_stub_binding_helper\n");
14785 /* Legitimize PIC addresses. If the address is already
14786 position-independent, we return ORIG. Newly generated
14787 position-independent addresses go into a reg. This is REG if non
14788 zero, otherwise we allocate register(s) as necessary. */
14790 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
14793 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
14798 if (reg == NULL && ! reload_in_progress && ! reload_completed)
14799 reg = gen_reg_rtx (Pmode);
14801 if (GET_CODE (orig) == CONST)
14803 if (GET_CODE (XEXP (orig, 0)) == PLUS
14804 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
14807 if (GET_CODE (XEXP (orig, 0)) == PLUS)
14809 /* Use a different reg for the intermediate value, as
14810 it will be marked UNCHANGING. */
14811 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
14814 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
14817 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
14823 if (GET_CODE (offset) == CONST_INT)
14825 if (SMALL_INT (offset))
14826 return plus_constant (base, INTVAL (offset));
14827 else if (! reload_in_progress && ! reload_completed)
14828 offset = force_reg (Pmode, offset);
14831 rtx mem = force_const_mem (Pmode, orig);
14832 return machopic_legitimize_pic_address (mem, Pmode, reg);
14835 return gen_rtx (PLUS, Pmode, base, offset);
14838 /* Fall back on generic machopic code. */
14839 return machopic_legitimize_pic_address (orig, mode, reg);
14842 /* This is just a placeholder to make linking work without having to
14843 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
14844 ever needed for Darwin (not too likely!) this would have to get a
14845 real definition. */
14852 #endif /* TARGET_MACHO */
14855 static unsigned int
14856 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
14859 = default_section_type_flags_1 (decl, name, reloc,
14860 flag_pic || DEFAULT_ABI == ABI_AIX);
14862 if (TARGET_RELOCATABLE)
14863 flags |= SECTION_WRITE;
14868 /* Record an element in the table of global constructors. SYMBOL is
14869 a SYMBOL_REF of the function to be called; PRIORITY is a number
14870 between 0 and MAX_INIT_PRIORITY.
14872 This differs from default_named_section_asm_out_constructor in
14873 that we have special handling for -mrelocatable. */
14876 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
14878 const char *section = ".ctors";
14881 if (priority != DEFAULT_INIT_PRIORITY)
14883 sprintf (buf, ".ctors.%.5u",
14884 /* Invert the numbering so the linker puts us in the proper
14885 order; constructors are run from right to left, and the
14886 linker sorts in increasing order. */
14887 MAX_INIT_PRIORITY - priority);
14891 named_section_flags (section, SECTION_WRITE);
14892 assemble_align (POINTER_SIZE);
14894 if (TARGET_RELOCATABLE)
14896 fputs ("\t.long (", asm_out_file);
14897 output_addr_const (asm_out_file, symbol);
14898 fputs (")@fixup\n", asm_out_file);
14901 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14905 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
14907 const char *section = ".dtors";
14910 if (priority != DEFAULT_INIT_PRIORITY)
14912 sprintf (buf, ".dtors.%.5u",
14913 /* Invert the numbering so the linker puts us in the proper
14914 order; constructors are run from right to left, and the
14915 linker sorts in increasing order. */
14916 MAX_INIT_PRIORITY - priority);
14920 named_section_flags (section, SECTION_WRITE);
14921 assemble_align (POINTER_SIZE);
14923 if (TARGET_RELOCATABLE)
14925 fputs ("\t.long (", asm_out_file);
14926 output_addr_const (asm_out_file, symbol);
14927 fputs (")@fixup\n", asm_out_file);
14930 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14934 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
14938 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
14939 ASM_OUTPUT_LABEL (file, name);
14940 fputs (DOUBLE_INT_ASM_OP, file);
14942 assemble_name (file, name);
14943 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
14944 assemble_name (file, name);
14945 fputs (",24\n\t.type\t.", file);
14946 assemble_name (file, name);
14947 fputs (",@function\n", file);
14948 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
14950 fputs ("\t.globl\t.", file);
14951 assemble_name (file, name);
14954 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14956 ASM_OUTPUT_LABEL (file, name);
14960 if (TARGET_RELOCATABLE
14961 && (get_pool_size () != 0 || current_function_profile)
14966 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
14968 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14969 fprintf (file, "\t.long ");
14970 assemble_name (file, buf);
14972 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14973 assemble_name (file, buf);
14977 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
14978 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14980 if (DEFAULT_ABI == ABI_AIX)
14982 const char *desc_name, *orig_name;
14984 orig_name = (*targetm.strip_name_encoding) (name);
14985 desc_name = orig_name;
14986 while (*desc_name == '.')
14989 if (TREE_PUBLIC (decl))
14990 fprintf (file, "\t.globl %s\n", desc_name);
14992 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
14993 fprintf (file, "%s:\n", desc_name);
14994 fprintf (file, "\t.long %s\n", orig_name);
14995 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
14996 if (DEFAULT_ABI == ABI_AIX)
14997 fputs ("\t.long 0\n", file);
14998 fprintf (file, "\t.previous\n");
15000 ASM_OUTPUT_LABEL (file, name);
15006 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15008 fputs (GLOBAL_ASM_OP, stream);
15009 RS6000_OUTPUT_BASENAME (stream, name);
15010 putc ('\n', stream);
15014 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15017 static const char * const suffix[3] = { "PR", "RO", "RW" };
15019 if (flags & SECTION_CODE)
15021 else if (flags & SECTION_WRITE)
15026 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15027 (flags & SECTION_CODE) ? "." : "",
15028 name, suffix[smclass], flags & SECTION_ENTSIZE);
15032 rs6000_xcoff_select_section (tree decl, int reloc,
15033 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15035 if (decl_readonly_section_1 (decl, reloc, 1))
15037 if (TREE_PUBLIC (decl))
15038 read_only_data_section ();
15040 read_only_private_data_section ();
15044 if (TREE_PUBLIC (decl))
15047 private_data_section ();
15052 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15056 /* Use select_section for private and uninitialized data. */
15057 if (!TREE_PUBLIC (decl)
15058 || DECL_COMMON (decl)
15059 || DECL_INITIAL (decl) == NULL_TREE
15060 || DECL_INITIAL (decl) == error_mark_node
15061 || (flag_zero_initialized_in_bss
15062 && initializer_zerop (DECL_INITIAL (decl))))
15065 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15066 name = (*targetm.strip_name_encoding) (name);
15067 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15070 /* Select section for constant in constant pool.
15072 On RS/6000, all constants are in the private read-only data area.
15073 However, if this is being placed in the TOC it must be output as a
15077 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15078 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15080 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15083 read_only_private_data_section ();
15086 /* Remove any trailing [DS] or the like from the symbol name. */
15088 static const char *
15089 rs6000_xcoff_strip_name_encoding (const char *name)
15094 len = strlen (name);
15095 if (name[len - 1] == ']')
15096 return ggc_alloc_string (name, len - 4);
15101 /* Section attributes. AIX is always PIC. */
15103 static unsigned int
15104 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15106 unsigned int align;
15107 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15109 /* Align to at least UNIT size. */
15110 if (flags & SECTION_CODE)
15111 align = MIN_UNITS_PER_WORD;
15113 /* Increase alignment of large objects if not already stricter. */
15114 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15115 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15116 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15118 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15121 /* Output at beginning of assembler file.
15123 Initialize the section names for the RS/6000 at this point.
15125 Specify filename, including full path, to assembler.
15127 We want to go into the TOC section so at least one .toc will be emitted.
15128 Also, in order to output proper .bs/.es pairs, we need at least one static
15129 [RW] section emitted.
15131 Finally, declare mcount when profiling to make the assembler happy. */
15134 rs6000_xcoff_file_start (void)
15136 rs6000_gen_section_name (&xcoff_bss_section_name,
15137 main_input_filename, ".bss_");
15138 rs6000_gen_section_name (&xcoff_private_data_section_name,
15139 main_input_filename, ".rw_");
15140 rs6000_gen_section_name (&xcoff_read_only_section_name,
15141 main_input_filename, ".ro_");
15143 fputs ("\t.file\t", asm_out_file);
15144 output_quoted_string (asm_out_file, main_input_filename);
15145 fputc ('\n', asm_out_file);
15147 if (write_symbols != NO_DEBUG)
15148 private_data_section ();
15151 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15152 rs6000_file_start ();
15155 /* Output at end of assembler file.
15156 On the RS/6000, referencing data should automatically pull in text. */
15159 rs6000_xcoff_file_end (void)
15162 fputs ("_section_.text:\n", asm_out_file);
15164 fputs (TARGET_32BIT
15165 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15168 #endif /* TARGET_XCOFF */
15171 /* Cross-module name binding. Darwin does not support overriding
15172 functions at dynamic-link time. */
15175 rs6000_binds_local_p (tree decl)
15177 return default_binds_local_p_1 (decl, 0);
15181 /* Compute a (partial) cost for rtx X. Return true if the complete
15182 cost has been computed, and false if subexpressions should be
15183 scanned. In either case, *TOTAL contains the cost result. */
15186 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15191 /* On the RS/6000, if it is valid in the insn, it is free.
15192 So this always returns 0. */
15203 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15204 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15205 + 0x8000) >= 0x10000)
15206 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15207 ? COSTS_N_INSNS (2)
15208 : COSTS_N_INSNS (1));
15214 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15215 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15216 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15217 ? COSTS_N_INSNS (2)
15218 : COSTS_N_INSNS (1));
15224 *total = COSTS_N_INSNS (2);
15227 switch (rs6000_cpu)
15229 case PROCESSOR_RIOS1:
15230 case PROCESSOR_PPC405:
15231 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15232 ? COSTS_N_INSNS (5)
15233 : (INTVAL (XEXP (x, 1)) >= -256
15234 && INTVAL (XEXP (x, 1)) <= 255)
15235 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15238 case PROCESSOR_PPC440:
15239 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15240 ? COSTS_N_INSNS (3)
15241 : COSTS_N_INSNS (2));
15244 case PROCESSOR_RS64A:
15245 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15246 ? GET_MODE (XEXP (x, 1)) != DImode
15247 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15248 : (INTVAL (XEXP (x, 1)) >= -256
15249 && INTVAL (XEXP (x, 1)) <= 255)
15250 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15253 case PROCESSOR_RIOS2:
15254 case PROCESSOR_MPCCORE:
15255 case PROCESSOR_PPC604e:
15256 *total = COSTS_N_INSNS (2);
15259 case PROCESSOR_PPC601:
15260 *total = COSTS_N_INSNS (5);
15263 case PROCESSOR_PPC603:
15264 case PROCESSOR_PPC7400:
15265 case PROCESSOR_PPC750:
15266 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15267 ? COSTS_N_INSNS (5)
15268 : (INTVAL (XEXP (x, 1)) >= -256
15269 && INTVAL (XEXP (x, 1)) <= 255)
15270 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15273 case PROCESSOR_PPC7450:
15274 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15275 ? COSTS_N_INSNS (4)
15276 : COSTS_N_INSNS (3));
15279 case PROCESSOR_PPC403:
15280 case PROCESSOR_PPC604:
15281 case PROCESSOR_PPC8540:
15282 *total = COSTS_N_INSNS (4);
15285 case PROCESSOR_PPC620:
15286 case PROCESSOR_PPC630:
15287 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15288 ? GET_MODE (XEXP (x, 1)) != DImode
15289 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15290 : (INTVAL (XEXP (x, 1)) >= -256
15291 && INTVAL (XEXP (x, 1)) <= 255)
15292 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15295 case PROCESSOR_POWER4:
15296 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15297 ? GET_MODE (XEXP (x, 1)) != DImode
15298 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15299 : COSTS_N_INSNS (2));
15308 if (GET_CODE (XEXP (x, 1)) == CONST_INT
15309 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
15311 *total = COSTS_N_INSNS (2);
15318 switch (rs6000_cpu)
15320 case PROCESSOR_RIOS1:
15321 *total = COSTS_N_INSNS (19);
15324 case PROCESSOR_RIOS2:
15325 *total = COSTS_N_INSNS (13);
15328 case PROCESSOR_RS64A:
15329 *total = (GET_MODE (XEXP (x, 1)) != DImode
15330 ? COSTS_N_INSNS (65)
15331 : COSTS_N_INSNS (67));
15334 case PROCESSOR_MPCCORE:
15335 *total = COSTS_N_INSNS (6);
15338 case PROCESSOR_PPC403:
15339 *total = COSTS_N_INSNS (33);
15342 case PROCESSOR_PPC405:
15343 *total = COSTS_N_INSNS (35);
15346 case PROCESSOR_PPC440:
15347 *total = COSTS_N_INSNS (34);
15350 case PROCESSOR_PPC601:
15351 *total = COSTS_N_INSNS (36);
15354 case PROCESSOR_PPC603:
15355 *total = COSTS_N_INSNS (37);
15358 case PROCESSOR_PPC604:
15359 case PROCESSOR_PPC604e:
15360 *total = COSTS_N_INSNS (20);
15363 case PROCESSOR_PPC620:
15364 case PROCESSOR_PPC630:
15365 *total = (GET_MODE (XEXP (x, 1)) != DImode
15366 ? COSTS_N_INSNS (21)
15367 : COSTS_N_INSNS (37));
15370 case PROCESSOR_PPC750:
15371 case PROCESSOR_PPC8540:
15372 case PROCESSOR_PPC7400:
15373 *total = COSTS_N_INSNS (19);
15376 case PROCESSOR_PPC7450:
15377 *total = COSTS_N_INSNS (23);
15380 case PROCESSOR_POWER4:
15381 *total = (GET_MODE (XEXP (x, 1)) != DImode
15382 ? COSTS_N_INSNS (18)
15383 : COSTS_N_INSNS (34));
15391 *total = COSTS_N_INSNS (4);
15395 /* MEM should be slightly more expensive than (plus (reg) (const)) */
15404 /* A C expression returning the cost of moving data from a register of class
15405 CLASS1 to one of CLASS2. */
15408 rs6000_register_move_cost (enum machine_mode mode,
15409 enum reg_class from, enum reg_class to)
15411 /* Moves from/to GENERAL_REGS. */
15412 if (reg_classes_intersect_p (to, GENERAL_REGS)
15413 || reg_classes_intersect_p (from, GENERAL_REGS))
15415 if (! reg_classes_intersect_p (to, GENERAL_REGS))
15418 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
15419 return (rs6000_memory_move_cost (mode, from, 0)
15420 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
15422 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
15423 else if (from == CR_REGS)
15427 /* A move will cost one instruction per GPR moved. */
15428 return 2 * HARD_REGNO_NREGS (0, mode);
15431 /* Moving between two similar registers is just one instruction. */
15432 else if (reg_classes_intersect_p (to, from))
15433 return mode == TFmode ? 4 : 2;
15435 /* Everything else has to go through GENERAL_REGS. */
15437 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
15438 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
15441 /* A C expressions returning the cost of moving data of MODE from a register to
15445 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
15446 int in ATTRIBUTE_UNUSED)
15448 if (reg_classes_intersect_p (class, GENERAL_REGS))
15449 return 4 * HARD_REGNO_NREGS (0, mode);
15450 else if (reg_classes_intersect_p (class, FLOAT_REGS))
15451 return 4 * HARD_REGNO_NREGS (32, mode);
15452 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
15453 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
15455 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
15458 /* Return an RTX representing where to find the function value of a
15459 function returning MODE. */
15461 rs6000_complex_function_value (enum machine_mode mode)
15463 unsigned int regno;
15465 enum machine_mode inner = GET_MODE_INNER (mode);
15467 if (FLOAT_MODE_P (mode))
15468 regno = FP_ARG_RETURN;
15471 regno = GP_ARG_RETURN;
15473 /* 32-bit is OK since it'll go in r3/r4. */
15475 && GET_MODE_BITSIZE (inner) >= 32)
15476 return gen_rtx_REG (mode, regno);
15479 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
15481 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
15482 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
15483 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
15486 /* Define how to find the value returned by a function.
15487 VALTYPE is the data type of the value (as a tree).
15488 If the precise function being called is known, FUNC is its FUNCTION_DECL;
15489 otherwise, FUNC is 0.
15491 On the SPE, both FPs and vectors are returned in r3.
15493 On RS/6000 an integer value is in r3 and a floating-point value is in
15494 fp1, unless -msoft-float. */
15497 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
15499 enum machine_mode mode;
15500 unsigned int regno;
15502 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
15504 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
15505 return gen_rtx_PARALLEL (DImode,
15507 gen_rtx_EXPR_LIST (VOIDmode,
15508 gen_rtx_REG (SImode, GP_ARG_RETURN),
15510 gen_rtx_EXPR_LIST (VOIDmode,
15511 gen_rtx_REG (SImode,
15512 GP_ARG_RETURN + 1),
15516 if ((INTEGRAL_TYPE_P (valtype)
15517 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
15518 || POINTER_TYPE_P (valtype))
15519 mode = TARGET_32BIT ? SImode : DImode;
15521 mode = TYPE_MODE (valtype);
15523 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
15524 regno = FP_ARG_RETURN;
15525 else if (TREE_CODE (valtype) == COMPLEX_TYPE
15526 && TARGET_HARD_FLOAT
15527 && SPLIT_COMPLEX_ARGS)
15528 return rs6000_complex_function_value (mode);
15529 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
15530 regno = ALTIVEC_ARG_RETURN;
15532 regno = GP_ARG_RETURN;
15534 return gen_rtx_REG (mode, regno);
15537 /* Define how to find the value returned by a library function
15538 assuming the value has mode MODE. */
15540 rs6000_libcall_value (enum machine_mode mode)
15542 unsigned int regno;
15544 if (GET_MODE_CLASS (mode) == MODE_FLOAT
15545 && TARGET_HARD_FLOAT && TARGET_FPRS)
15546 regno = FP_ARG_RETURN;
15547 else if (ALTIVEC_VECTOR_MODE (mode))
15548 regno = ALTIVEC_ARG_RETURN;
15549 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
15550 return rs6000_complex_function_value (mode);
15552 regno = GP_ARG_RETURN;
15554 return gen_rtx_REG (mode, regno);
15557 /* Return true if TYPE is of type __ev64_opaque__. */
15560 is_ev64_opaque_type (tree type)
15563 && (type == opaque_V2SI_type_node
15564 || type == opaque_V2SF_type_node
15565 || type == opaque_p_V2SI_type_node));
15569 rs6000_dwarf_register_span (rtx reg)
15573 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
15576 regno = REGNO (reg);
15578 /* The duality of the SPE register size wreaks all kinds of havoc.
15579 This is a way of distinguishing r0 in 32-bits from r0 in
15582 gen_rtx_PARALLEL (VOIDmode,
15585 gen_rtx_REG (SImode, regno + 1200),
15586 gen_rtx_REG (SImode, regno))
15588 gen_rtx_REG (SImode, regno),
15589 gen_rtx_REG (SImode, regno + 1200)));
15592 #include "gt-rs6000.h"