1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
55 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
58 #ifndef TARGET_NO_PROTOTYPE
59 #define TARGET_NO_PROTOTYPE 0
62 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
63 && easy_vector_same (x, y))
65 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
67 && easy_vector_same (x, y))
69 #define min(A,B) ((A) < (B) ? (A) : (B))
70 #define max(A,B) ((A) > (B) ? (A) : (B))
74 enum processor_type rs6000_cpu;
75 struct rs6000_cpu_select rs6000_select[3] =
77 /* switch name, tune arch */
78 { (const char *)0, "--with-cpu=", 1, 1 },
79 { (const char *)0, "-mcpu=", 1, 1 },
80 { (const char *)0, "-mtune=", 1, 0 },
83 /* Size of long double */
84 const char *rs6000_long_double_size_string;
85 int rs6000_long_double_type_size;
87 /* Whether -mabi=altivec has appeared */
88 int rs6000_altivec_abi;
90 /* Whether VRSAVE instructions should be generated. */
91 int rs6000_altivec_vrsave;
93 /* String from -mvrsave= option. */
94 const char *rs6000_altivec_vrsave_string;
96 /* Nonzero if we want SPE ABI extensions. */
99 /* Whether isel instructions should be generated. */
102 /* Whether SPE simd instructions should be generated. */
105 /* Nonzero if floating point operations are done in the GPRs. */
106 int rs6000_float_gprs = 0;
108 /* String from -mfloat-gprs=. */
109 const char *rs6000_float_gprs_string;
111 /* String from -misel=. */
112 const char *rs6000_isel_string;
114 /* String from -mspe=. */
115 const char *rs6000_spe_string;
117 /* Set to nonzero once AIX common-mode calls have been defined. */
118 static GTY(()) int common_mode_defined;
120 /* Save information from a "cmpxx" operation until the branch or scc is
122 rtx rs6000_compare_op0, rs6000_compare_op1;
123 int rs6000_compare_fp_p;
125 /* Label number of label created for -mrelocatable, to call to so we can
126 get the address of the GOT section */
127 int rs6000_pic_labelno;
130 /* Which abi to adhere to */
131 const char *rs6000_abi_name;
133 /* Semantics of the small data area */
134 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
136 /* Which small data model to use */
137 const char *rs6000_sdata_name = (char *)0;
139 /* Counter for labels which are to be placed in .fixup. */
140 int fixuplabelno = 0;
143 /* Bit size of immediate TLS offsets and string from which it is decoded. */
144 int rs6000_tls_size = 32;
145 const char *rs6000_tls_size_string;
147 /* ABI enumeration available for subtarget to use. */
148 enum rs6000_abi rs6000_current_abi;
150 /* ABI string from -mabi= option. */
151 const char *rs6000_abi_string;
154 const char *rs6000_debug_name;
155 int rs6000_debug_stack; /* debug stack applications */
156 int rs6000_debug_arg; /* debug argument handling */
159 static GTY(()) tree opaque_V2SI_type_node;
160 static GTY(()) tree opaque_V2SF_type_node;
161 static GTY(()) tree opaque_p_V2SI_type_node;
163 const char *rs6000_traceback_name;
165 traceback_default = 0,
171 /* Flag to say the TOC is initialized */
173 char toc_label_name[10];
175 /* Alias set for saves and restores from the rs6000 stack. */
176 static int rs6000_sr_alias_set;
178 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
179 The only place that looks at this is rs6000_set_default_type_attributes;
180 everywhere else should rely on the presence or absence of a longcall
181 attribute on the function declaration. */
182 int rs6000_default_long_calls;
183 const char *rs6000_longcall_switch;
185 /* Control alignment for fields within structures. */
186 /* String from -malign-XXXXX. */
187 const char *rs6000_alignment_string;
188 int rs6000_alignment_flags;
190 struct builtin_description
192 /* mask is not const because we're going to alter it below. This
193 nonsense will go away when we rewrite the -march infrastructure
194 to give us more target flag bits. */
196 const enum insn_code icode;
197 const char *const name;
198 const enum rs6000_builtins code;
201 static bool rs6000_function_ok_for_sibcall (tree, tree);
202 static int num_insns_constant_wide (HOST_WIDE_INT);
203 static void validate_condition_mode (enum rtx_code, enum machine_mode);
204 static rtx rs6000_generate_compare (enum rtx_code);
205 static void rs6000_maybe_dead (rtx);
206 static void rs6000_emit_stack_tie (void);
207 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
208 static rtx spe_synthesize_frame_save (rtx);
209 static bool spe_func_has_64bit_regs_p (void);
210 static void emit_frame_save (rtx, rtx, enum machine_mode,
211 unsigned int, int, int);
212 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
213 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
214 static unsigned rs6000_hash_constant (rtx);
215 static unsigned toc_hash_function (const void *);
216 static int toc_hash_eq (const void *, const void *);
217 static int constant_pool_expr_1 (rtx, int *, int *);
218 static bool constant_pool_expr_p (rtx);
219 static bool toc_relative_expr_p (rtx);
220 static bool legitimate_small_data_p (enum machine_mode, rtx);
221 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
222 static bool legitimate_indexed_address_p (rtx, int);
223 static bool legitimate_indirect_address_p (rtx, int);
224 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
225 static struct machine_function * rs6000_init_machine_status (void);
226 static bool rs6000_assemble_integer (rtx, unsigned int, int);
227 #ifdef HAVE_GAS_HIDDEN
228 static void rs6000_assemble_visibility (tree, int);
230 static int rs6000_ra_ever_killed (void);
231 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
232 extern const struct attribute_spec rs6000_attribute_table[];
233 static void rs6000_set_default_type_attributes (tree);
234 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
235 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
236 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
237 HOST_WIDE_INT, tree);
238 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
239 static void rs6000_file_start (void);
241 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
242 static void rs6000_elf_asm_out_constructor (rtx, int);
243 static void rs6000_elf_asm_out_destructor (rtx, int);
244 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
245 static void rs6000_elf_unique_section (tree, int);
246 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
247 unsigned HOST_WIDE_INT);
248 static void rs6000_elf_encode_section_info (tree, rtx, int)
250 static bool rs6000_elf_in_small_data_p (tree);
253 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
254 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
255 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
256 static void rs6000_xcoff_unique_section (tree, int);
257 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
258 unsigned HOST_WIDE_INT);
259 static const char * rs6000_xcoff_strip_name_encoding (const char *);
260 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
261 static void rs6000_xcoff_file_start (void);
262 static void rs6000_xcoff_file_end (void);
265 static bool rs6000_binds_local_p (tree);
267 static int rs6000_use_dfa_pipeline_interface (void);
268 static int rs6000_variable_issue (FILE *, int, rtx, int);
269 static bool rs6000_rtx_costs (rtx, int, int, int *);
270 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
271 static int rs6000_adjust_priority (rtx, int);
272 static int rs6000_issue_rate (void);
273 static int rs6000_use_sched_lookahead (void);
275 static void rs6000_init_builtins (void);
276 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
277 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
278 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
279 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
280 static void altivec_init_builtins (void);
281 static void rs6000_common_init_builtins (void);
283 static void enable_mask_for_builtins (struct builtin_description *,
284 int, enum rs6000_builtins,
285 enum rs6000_builtins);
286 static void spe_init_builtins (void);
287 static rtx spe_expand_builtin (tree, rtx, bool *);
288 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
289 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
290 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
292 static rtx altivec_expand_builtin (tree, rtx, bool *);
293 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
294 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
295 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
296 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
297 static rtx altivec_expand_predicate_builtin (enum insn_code,
298 const char *, tree, rtx);
299 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
300 static void rs6000_parse_abi_options (void);
301 static void rs6000_parse_alignment_option (void);
302 static void rs6000_parse_tls_size_option (void);
303 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
304 static int first_altivec_reg_to_save (void);
305 static unsigned int compute_vrsave_mask (void);
306 static void is_altivec_return_reg (rtx, void *);
307 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
308 int easy_vector_constant (rtx, enum machine_mode);
309 static int easy_vector_same (rtx, enum machine_mode);
310 static bool is_ev64_opaque_type (tree);
311 static rtx rs6000_dwarf_register_span (rtx);
312 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
313 static rtx rs6000_tls_get_addr (void);
314 static rtx rs6000_got_sym (void);
315 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
316 static const char *rs6000_get_some_local_dynamic_name (void);
317 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
318 static rtx rs6000_complex_function_value (enum machine_mode);
319 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
320 enum machine_mode, tree);
322 /* Hash table stuff for keeping track of TOC entries. */
324 struct toc_hash_struct GTY(())
326 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
327 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
329 enum machine_mode key_mode;
333 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
335 /* Default register names. */
336 char rs6000_reg_names[][8] =
338 "0", "1", "2", "3", "4", "5", "6", "7",
339 "8", "9", "10", "11", "12", "13", "14", "15",
340 "16", "17", "18", "19", "20", "21", "22", "23",
341 "24", "25", "26", "27", "28", "29", "30", "31",
342 "0", "1", "2", "3", "4", "5", "6", "7",
343 "8", "9", "10", "11", "12", "13", "14", "15",
344 "16", "17", "18", "19", "20", "21", "22", "23",
345 "24", "25", "26", "27", "28", "29", "30", "31",
346 "mq", "lr", "ctr","ap",
347 "0", "1", "2", "3", "4", "5", "6", "7",
349 /* AltiVec registers. */
350 "0", "1", "2", "3", "4", "5", "6", "7",
351 "8", "9", "10", "11", "12", "13", "14", "15",
352 "16", "17", "18", "19", "20", "21", "22", "23",
353 "24", "25", "26", "27", "28", "29", "30", "31",
359 #ifdef TARGET_REGNAMES
360 static const char alt_reg_names[][8] =
362 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
363 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
364 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
365 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
366 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
367 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
368 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
369 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
370 "mq", "lr", "ctr", "ap",
371 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
373 /* AltiVec registers. */
374 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
375 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
376 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
377 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
384 #ifndef MASK_STRICT_ALIGN
385 #define MASK_STRICT_ALIGN 0
387 #ifndef TARGET_PROFILE_KERNEL
388 #define TARGET_PROFILE_KERNEL 0
391 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
392 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
394 /* Return 1 for a symbol ref for a thread-local storage symbol. */
395 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
396 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
398 /* Initialize the GCC target structure. */
399 #undef TARGET_ATTRIBUTE_TABLE
400 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
401 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
402 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
404 #undef TARGET_ASM_ALIGNED_DI_OP
405 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
407 /* Default unaligned ops are only provided for ELF. Find the ops needed
408 for non-ELF systems. */
409 #ifndef OBJECT_FORMAT_ELF
411 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
413 #undef TARGET_ASM_UNALIGNED_HI_OP
414 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
415 #undef TARGET_ASM_UNALIGNED_SI_OP
416 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
417 #undef TARGET_ASM_UNALIGNED_DI_OP
418 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
421 #undef TARGET_ASM_UNALIGNED_HI_OP
422 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
423 #undef TARGET_ASM_UNALIGNED_SI_OP
424 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
428 /* This hook deals with fixups for relocatable code and DI-mode objects
430 #undef TARGET_ASM_INTEGER
431 #define TARGET_ASM_INTEGER rs6000_assemble_integer
433 #ifdef HAVE_GAS_HIDDEN
434 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
435 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
438 #undef TARGET_HAVE_TLS
439 #define TARGET_HAVE_TLS HAVE_AS_TLS
441 #undef TARGET_CANNOT_FORCE_CONST_MEM
442 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
444 #undef TARGET_ASM_FUNCTION_PROLOGUE
445 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
446 #undef TARGET_ASM_FUNCTION_EPILOGUE
447 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
449 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
450 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
451 #undef TARGET_SCHED_VARIABLE_ISSUE
452 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
454 #undef TARGET_SCHED_ISSUE_RATE
455 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
456 #undef TARGET_SCHED_ADJUST_COST
457 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
458 #undef TARGET_SCHED_ADJUST_PRIORITY
459 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
461 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
462 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
464 #undef TARGET_INIT_BUILTINS
465 #define TARGET_INIT_BUILTINS rs6000_init_builtins
467 #undef TARGET_EXPAND_BUILTIN
468 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
471 #undef TARGET_BINDS_LOCAL_P
472 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
475 #undef TARGET_ASM_OUTPUT_MI_THUNK
476 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
478 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
479 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
481 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
482 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
484 #undef TARGET_RTX_COSTS
485 #define TARGET_RTX_COSTS rs6000_rtx_costs
486 #undef TARGET_ADDRESS_COST
487 #define TARGET_ADDRESS_COST hook_int_rtx_0
489 #undef TARGET_VECTOR_OPAQUE_P
490 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
492 #undef TARGET_DWARF_REGISTER_SPAN
493 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
495 struct gcc_target targetm = TARGET_INITIALIZER;
497 /* Override command line options. Mostly we process the processor
498 type and sometimes adjust other TARGET_ options. */
501 rs6000_override_options (const char *default_cpu)
504 struct rs6000_cpu_select *ptr;
506 /* Simplify the entries below by making a mask for any POWER
507 variant and any PowerPC variant. */
509 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
510 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
511 | MASK_PPC_GFXOPT | MASK_POWERPC64)
512 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
516 const char *const name; /* Canonical processor name. */
517 const enum processor_type processor; /* Processor type enum value. */
518 const int target_enable; /* Target flags to enable. */
519 const int target_disable; /* Target flags to disable. */
520 } const processor_target_table[]
521 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
522 POWER_MASKS | POWERPC_MASKS},
523 {"power", PROCESSOR_POWER,
524 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
525 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
526 {"power2", PROCESSOR_POWER,
527 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
528 POWERPC_MASKS | MASK_NEW_MNEMONICS},
529 {"power3", PROCESSOR_PPC630,
530 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
532 {"power4", PROCESSOR_POWER4,
533 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
535 {"powerpc", PROCESSOR_POWERPC,
536 MASK_POWERPC | MASK_NEW_MNEMONICS,
537 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
538 {"powerpc64", PROCESSOR_POWERPC64,
539 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
540 POWER_MASKS | POWERPC_OPT_MASKS},
541 {"rios", PROCESSOR_RIOS1,
542 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
543 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
544 {"rios1", PROCESSOR_RIOS1,
545 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
546 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
547 {"rsc", PROCESSOR_PPC601,
548 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
549 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
550 {"rsc1", PROCESSOR_PPC601,
551 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
552 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
553 {"rios2", PROCESSOR_RIOS2,
554 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
555 POWERPC_MASKS | MASK_NEW_MNEMONICS},
556 {"rs64a", PROCESSOR_RS64A,
557 MASK_POWERPC | MASK_NEW_MNEMONICS,
558 POWER_MASKS | POWERPC_OPT_MASKS},
559 {"401", PROCESSOR_PPC403,
560 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
561 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
562 {"403", PROCESSOR_PPC403,
563 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
564 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
565 {"405", PROCESSOR_PPC405,
566 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
567 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
568 {"405fp", PROCESSOR_PPC405,
569 MASK_POWERPC | MASK_NEW_MNEMONICS,
570 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
571 {"440", PROCESSOR_PPC440,
572 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
573 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
574 {"440fp", PROCESSOR_PPC440,
575 MASK_POWERPC | MASK_NEW_MNEMONICS,
576 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
577 {"505", PROCESSOR_MPCCORE,
578 MASK_POWERPC | MASK_NEW_MNEMONICS,
579 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
580 {"601", PROCESSOR_PPC601,
581 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
582 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
583 {"602", PROCESSOR_PPC603,
584 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
585 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
586 {"603", PROCESSOR_PPC603,
587 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
588 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
589 {"603e", PROCESSOR_PPC603,
590 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
591 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
592 {"ec603e", PROCESSOR_PPC603,
593 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
594 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
595 {"604", PROCESSOR_PPC604,
596 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
597 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
598 {"604e", PROCESSOR_PPC604e,
599 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
600 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
601 {"620", PROCESSOR_PPC620,
602 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
604 {"630", PROCESSOR_PPC630,
605 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
607 {"740", PROCESSOR_PPC750,
608 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
609 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
610 {"750", PROCESSOR_PPC750,
611 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
612 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
613 {"7400", PROCESSOR_PPC7400,
614 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
615 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
616 {"7450", PROCESSOR_PPC7450,
617 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
618 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
619 {"8540", PROCESSOR_PPC8540,
620 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
621 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
622 {"801", PROCESSOR_MPCCORE,
623 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
624 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
625 {"821", PROCESSOR_MPCCORE,
626 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
627 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
628 {"823", PROCESSOR_MPCCORE,
629 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
630 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
631 {"860", PROCESSOR_MPCCORE,
632 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
633 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
634 {"970", PROCESSOR_POWER4,
635 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
638 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
640 /* Save current -mmultiple/-mno-multiple status. */
641 int multiple = TARGET_MULTIPLE;
642 /* Save current -mstring/-mno-string status. */
643 int string = TARGET_STRING;
645 /* Identify the processor type. */
646 rs6000_select[0].string = default_cpu;
647 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
649 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
651 ptr = &rs6000_select[i];
652 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
654 for (j = 0; j < ptt_size; j++)
655 if (! strcmp (ptr->string, processor_target_table[j].name))
658 rs6000_cpu = processor_target_table[j].processor;
662 target_flags |= processor_target_table[j].target_enable;
663 target_flags &= ~processor_target_table[j].target_disable;
669 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
676 /* If we are optimizing big endian systems for space, use the load/store
677 multiple and string instructions. */
678 if (BYTES_BIG_ENDIAN && optimize_size)
679 target_flags |= MASK_MULTIPLE | MASK_STRING;
681 /* If -mmultiple or -mno-multiple was explicitly used, don't
682 override with the processor default */
683 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
684 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
686 /* If -mstring or -mno-string was explicitly used, don't override
687 with the processor default. */
688 if ((target_flags_explicit & MASK_STRING) != 0)
689 target_flags = (target_flags & ~MASK_STRING) | string;
691 /* Don't allow -mmultiple or -mstring on little endian systems
692 unless the cpu is a 750, because the hardware doesn't support the
693 instructions used in little endian mode, and causes an alignment
694 trap. The 750 does not cause an alignment trap (except when the
695 target is unaligned). */
697 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
701 target_flags &= ~MASK_MULTIPLE;
702 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
703 warning ("-mmultiple is not supported on little endian systems");
708 target_flags &= ~MASK_STRING;
709 if ((target_flags_explicit & MASK_STRING) != 0)
710 warning ("-mstring is not supported on little endian systems");
714 /* Set debug flags */
715 if (rs6000_debug_name)
717 if (! strcmp (rs6000_debug_name, "all"))
718 rs6000_debug_stack = rs6000_debug_arg = 1;
719 else if (! strcmp (rs6000_debug_name, "stack"))
720 rs6000_debug_stack = 1;
721 else if (! strcmp (rs6000_debug_name, "arg"))
722 rs6000_debug_arg = 1;
724 error ("unknown -mdebug-%s switch", rs6000_debug_name);
727 if (rs6000_traceback_name)
729 if (! strncmp (rs6000_traceback_name, "full", 4))
730 rs6000_traceback = traceback_full;
731 else if (! strncmp (rs6000_traceback_name, "part", 4))
732 rs6000_traceback = traceback_part;
733 else if (! strncmp (rs6000_traceback_name, "no", 2))
734 rs6000_traceback = traceback_none;
736 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
737 rs6000_traceback_name);
740 /* Set size of long double */
741 rs6000_long_double_type_size = 64;
742 if (rs6000_long_double_size_string)
745 int size = strtol (rs6000_long_double_size_string, &tail, 10);
746 if (*tail != '\0' || (size != 64 && size != 128))
747 error ("Unknown switch -mlong-double-%s",
748 rs6000_long_double_size_string);
750 rs6000_long_double_type_size = size;
753 /* Handle -mabi= options. */
754 rs6000_parse_abi_options ();
756 /* Handle -malign-XXXXX option. */
757 rs6000_parse_alignment_option ();
759 /* Handle generic -mFOO=YES/NO options. */
760 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
761 &rs6000_altivec_vrsave);
762 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
764 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
765 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
768 /* Handle -mtls-size option. */
769 rs6000_parse_tls_size_option ();
771 #ifdef SUBTARGET_OVERRIDE_OPTIONS
772 SUBTARGET_OVERRIDE_OPTIONS;
774 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
775 SUBSUBTARGET_OVERRIDE_OPTIONS;
780 /* The e500 does not have string instructions, and we set
781 MASK_STRING above when optimizing for size. */
782 if ((target_flags & MASK_STRING) != 0)
783 target_flags = target_flags & ~MASK_STRING;
785 /* No SPE means 64-bit long doubles, even if an E500. */
786 if (rs6000_spe_string != 0
787 && !strcmp (rs6000_spe_string, "no"))
788 rs6000_long_double_type_size = 64;
790 else if (rs6000_select[1].string != NULL)
792 /* For the powerpc-eabispe configuration, we set all these by
793 default, so let's unset them if we manually set another
794 CPU that is not the E500. */
795 if (rs6000_abi_string == 0)
797 if (rs6000_spe_string == 0)
799 if (rs6000_float_gprs_string == 0)
800 rs6000_float_gprs = 0;
801 if (rs6000_isel_string == 0)
803 if (rs6000_long_double_size_string == 0)
804 rs6000_long_double_type_size = 64;
807 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
808 using TARGET_OPTIONS to handle a toggle switch, but we're out of
809 bits in target_flags so TARGET_SWITCHES cannot be used.
810 Assumption here is that rs6000_longcall_switch points into the
811 text of the complete option, rather than being a copy, so we can
812 scan back for the presence or absence of the no- modifier. */
813 if (rs6000_longcall_switch)
815 const char *base = rs6000_longcall_switch;
816 while (base[-1] != 'm') base--;
818 if (*rs6000_longcall_switch != '\0')
819 error ("invalid option `%s'", base);
820 rs6000_default_long_calls = (base[0] != 'n');
823 #ifdef TARGET_REGNAMES
824 /* If the user desires alternate register names, copy in the
825 alternate names now. */
827 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
830 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
831 If -maix-struct-return or -msvr4-struct-return was explicitly
832 used, don't override with the ABI default. */
833 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
835 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
836 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
838 target_flags |= MASK_AIX_STRUCT_RET;
841 if (TARGET_LONG_DOUBLE_128
842 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
843 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
845 /* Allocate an alias set for register saves & restores from stack. */
846 rs6000_sr_alias_set = new_alias_set ();
849 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
851 /* We can only guarantee the availability of DI pseudo-ops when
852 assembling for 64-bit targets. */
855 targetm.asm_out.aligned_op.di = NULL;
856 targetm.asm_out.unaligned_op.di = NULL;
859 /* Set maximum branch target alignment at two instructions, eight bytes. */
860 align_jumps_max_skip = 8;
861 align_loops_max_skip = 8;
863 /* Arrange to save and restore machine status around nested functions. */
864 init_machine_status = rs6000_init_machine_status;
867 /* Handle generic options of the form -mfoo=yes/no.
868 NAME is the option name.
869 VALUE is the option value.
870 FLAG is the pointer to the flag where to store a 1 or 0, depending on
871 whether the option value is 'yes' or 'no' respectively. */
873 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
877 else if (!strcmp (value, "yes"))
879 else if (!strcmp (value, "no"))
882 error ("unknown -m%s= option specified: '%s'", name, value);
885 /* Handle -mabi= options. */
887 rs6000_parse_abi_options ()
889 if (rs6000_abi_string == 0)
891 else if (! strcmp (rs6000_abi_string, "altivec"))
892 rs6000_altivec_abi = 1;
893 else if (! strcmp (rs6000_abi_string, "no-altivec"))
894 rs6000_altivec_abi = 0;
895 else if (! strcmp (rs6000_abi_string, "spe"))
899 error ("not configured for ABI: '%s'", rs6000_abi_string);
902 else if (! strcmp (rs6000_abi_string, "no-spe"))
905 error ("unknown ABI specified: '%s'", rs6000_abi_string);
908 /* Handle -malign-XXXXXX options. */
910 rs6000_parse_alignment_option ()
912 if (rs6000_alignment_string == 0
913 || ! strcmp (rs6000_alignment_string, "power"))
914 rs6000_alignment_flags = MASK_ALIGN_POWER;
915 else if (! strcmp (rs6000_alignment_string, "natural"))
916 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
918 error ("unknown -malign-XXXXX option specified: '%s'",
919 rs6000_alignment_string);
922 /* Validate and record the size specified with the -mtls-size option. */
925 rs6000_parse_tls_size_option ()
927 if (rs6000_tls_size_string == 0)
929 else if (strcmp (rs6000_tls_size_string, "16") == 0)
930 rs6000_tls_size = 16;
931 else if (strcmp (rs6000_tls_size_string, "32") == 0)
932 rs6000_tls_size = 32;
933 else if (strcmp (rs6000_tls_size_string, "64") == 0)
934 rs6000_tls_size = 64;
936 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
940 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
944 /* Do anything needed at the start of the asm file. */
951 const char *start = buffer;
952 struct rs6000_cpu_select *ptr;
953 const char *default_cpu = TARGET_CPU_DEFAULT;
954 FILE *file = asm_out_file;
956 default_file_start ();
958 #ifdef TARGET_BI_ARCH
959 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
963 if (flag_verbose_asm)
965 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
966 rs6000_select[0].string = default_cpu;
968 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
970 ptr = &rs6000_select[i];
971 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
973 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
979 switch (rs6000_sdata)
981 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
982 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
983 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
984 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
987 if (rs6000_sdata && g_switch_value)
989 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1000 /* Return nonzero if this function is known to have a null epilogue. */
1005 if (reload_completed)
1007 rs6000_stack_t *info = rs6000_stack_info ();
1009 if (info->first_gp_reg_save == 32
1010 && info->first_fp_reg_save == 64
1011 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1012 && ! info->lr_save_p
1013 && ! info->cr_save_p
1014 && info->vrsave_mask == 0
1022 /* Returns 1 always. */
1025 any_operand (rtx op ATTRIBUTE_UNUSED,
1026 enum machine_mode mode ATTRIBUTE_UNUSED)
1031 /* Returns 1 if op is the count register. */
1033 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1035 if (GET_CODE (op) != REG)
1038 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1041 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1047 /* Returns 1 if op is an altivec register. */
1049 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1052 return (register_operand (op, mode)
1053 && (GET_CODE (op) != REG
1054 || REGNO (op) > FIRST_PSEUDO_REGISTER
1055 || ALTIVEC_REGNO_P (REGNO (op))));
1059 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1061 if (GET_CODE (op) != REG)
1064 if (XER_REGNO_P (REGNO (op)))
1070 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1071 by such constants completes more quickly. */
1074 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1076 return ( GET_CODE (op) == CONST_INT
1077 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1080 /* Return 1 if OP is a constant that can fit in a D field. */
1083 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1085 return (GET_CODE (op) == CONST_INT
1086 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1089 /* Similar for an unsigned D field. */
1092 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1094 return (GET_CODE (op) == CONST_INT
1095 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1098 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1101 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1103 return (GET_CODE (op) == CONST_INT
1104 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1107 /* Returns 1 if OP is a CONST_INT that is a positive value
1108 and an exact power of 2. */
1111 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1113 return (GET_CODE (op) == CONST_INT
1115 && exact_log2 (INTVAL (op)) >= 0);
1118 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1122 gpc_reg_operand (rtx op, enum machine_mode mode)
1124 return (register_operand (op, mode)
1125 && (GET_CODE (op) != REG
1126 || (REGNO (op) >= ARG_POINTER_REGNUM
1127 && !XER_REGNO_P (REGNO (op)))
1128 || REGNO (op) < MQ_REGNO));
1131 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1135 cc_reg_operand (rtx op, enum machine_mode mode)
1137 return (register_operand (op, mode)
1138 && (GET_CODE (op) != REG
1139 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1140 || CR_REGNO_P (REGNO (op))));
1143 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1144 CR field that isn't CR0. */
1147 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1149 return (register_operand (op, mode)
1150 && (GET_CODE (op) != REG
1151 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1152 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1155 /* Returns 1 if OP is either a constant integer valid for a D-field or
1156 a non-special register. If a register, it must be in the proper
1157 mode unless MODE is VOIDmode. */
1160 reg_or_short_operand (rtx op, enum machine_mode mode)
1162 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1165 /* Similar, except check if the negation of the constant would be
1166 valid for a D-field. */
1169 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1171 if (GET_CODE (op) == CONST_INT)
1172 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1174 return gpc_reg_operand (op, mode);
1177 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1178 a non-special register. If a register, it must be in the proper
1179 mode unless MODE is VOIDmode. */
1182 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1184 if (gpc_reg_operand (op, mode))
1186 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1193 /* Return 1 if the operand is either a register or an integer whose
1194 high-order 16 bits are zero. */
1197 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1199 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1202 /* Return 1 is the operand is either a non-special register or ANY
1203 constant integer. */
1206 reg_or_cint_operand (rtx op, enum machine_mode mode)
1208 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1211 /* Return 1 is the operand is either a non-special register or ANY
1212 32-bit signed constant integer. */
1215 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1217 return (gpc_reg_operand (op, mode)
1218 || (GET_CODE (op) == CONST_INT
1219 #if HOST_BITS_PER_WIDE_INT != 32
1220 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1221 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1226 /* Return 1 is the operand is either a non-special register or a 32-bit
1227 signed constant integer valid for 64-bit addition. */
1230 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1232 return (gpc_reg_operand (op, mode)
1233 || (GET_CODE (op) == CONST_INT
1234 #if HOST_BITS_PER_WIDE_INT == 32
1235 && INTVAL (op) < 0x7fff8000
1237 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1243 /* Return 1 is the operand is either a non-special register or a 32-bit
1244 signed constant integer valid for 64-bit subtraction. */
1247 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1249 return (gpc_reg_operand (op, mode)
1250 || (GET_CODE (op) == CONST_INT
1251 #if HOST_BITS_PER_WIDE_INT == 32
1252 && (- INTVAL (op)) < 0x7fff8000
1254 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1260 /* Return 1 is the operand is either a non-special register or ANY
1261 32-bit unsigned constant integer. */
1264 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1266 if (GET_CODE (op) == CONST_INT)
1268 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1270 if (GET_MODE_BITSIZE (mode) <= 32)
1273 if (INTVAL (op) < 0)
1277 return ((INTVAL (op) & GET_MODE_MASK (mode)
1278 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1280 else if (GET_CODE (op) == CONST_DOUBLE)
1282 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1286 return CONST_DOUBLE_HIGH (op) == 0;
1289 return gpc_reg_operand (op, mode);
1292 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1295 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1297 return (GET_CODE (op) == SYMBOL_REF
1298 || GET_CODE (op) == CONST
1299 || GET_CODE (op) == LABEL_REF);
1302 /* Return 1 if the operand is a simple references that can be loaded via
1303 the GOT (labels involving addition aren't allowed). */
1306 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1308 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1311 /* Return the number of instructions it takes to form a constant in an
1312 integer register. */
1315 num_insns_constant_wide (HOST_WIDE_INT value)
1317 /* signed constant loadable with {cal|addi} */
1318 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1321 /* constant loadable with {cau|addis} */
1322 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1325 #if HOST_BITS_PER_WIDE_INT == 64
1326 else if (TARGET_POWERPC64)
1328 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1329 HOST_WIDE_INT high = value >> 31;
1331 if (high == 0 || high == -1)
1337 return num_insns_constant_wide (high) + 1;
1339 return (num_insns_constant_wide (high)
1340 + num_insns_constant_wide (low) + 1);
1349 num_insns_constant (rtx op, enum machine_mode mode)
1351 if (GET_CODE (op) == CONST_INT)
1353 #if HOST_BITS_PER_WIDE_INT == 64
1354 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1355 && mask64_operand (op, mode))
1359 return num_insns_constant_wide (INTVAL (op));
1362 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1367 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1368 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1369 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1372 else if (GET_CODE (op) == CONST_DOUBLE)
1378 int endian = (WORDS_BIG_ENDIAN == 0);
1380 if (mode == VOIDmode || mode == DImode)
1382 high = CONST_DOUBLE_HIGH (op);
1383 low = CONST_DOUBLE_LOW (op);
1387 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1388 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1390 low = l[1 - endian];
1394 return (num_insns_constant_wide (low)
1395 + num_insns_constant_wide (high));
1399 if (high == 0 && low >= 0)
1400 return num_insns_constant_wide (low);
1402 else if (high == -1 && low < 0)
1403 return num_insns_constant_wide (low);
1405 else if (mask64_operand (op, mode))
1409 return num_insns_constant_wide (high) + 1;
1412 return (num_insns_constant_wide (high)
1413 + num_insns_constant_wide (low) + 1);
1421 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1422 register with one instruction per word. We only do this if we can
1423 safely read CONST_DOUBLE_{LOW,HIGH}. */
1426 easy_fp_constant (rtx op, enum machine_mode mode)
1428 if (GET_CODE (op) != CONST_DOUBLE
1429 || GET_MODE (op) != mode
1430 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1433 /* Consider all constants with -msoft-float to be easy. */
1434 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1438 /* If we are using V.4 style PIC, consider all constants to be hard. */
1439 if (flag_pic && DEFAULT_ABI == ABI_V4)
1442 #ifdef TARGET_RELOCATABLE
1443 /* Similarly if we are using -mrelocatable, consider all constants
1445 if (TARGET_RELOCATABLE)
1454 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1455 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1457 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1458 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1459 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1460 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1463 else if (mode == DFmode)
1468 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1469 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1471 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1472 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1475 else if (mode == SFmode)
1480 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1481 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1483 return num_insns_constant_wide (l) == 1;
1486 else if (mode == DImode)
1487 return ((TARGET_POWERPC64
1488 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1489 || (num_insns_constant (op, DImode) <= 2));
1491 else if (mode == SImode)
1497 /* Return nonzero if all elements of a vector have the same value. */
1500 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1504 units = CONST_VECTOR_NUNITS (op);
1506 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1507 for (i = 1; i < units; ++i)
1508 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1515 /* Return 1 if the operand is a CONST_INT and can be put into a
1516 register without using memory. */
1519 easy_vector_constant (rtx op, enum machine_mode mode)
1523 if (GET_CODE (op) != CONST_VECTOR
1528 if (zero_constant (op, mode)
1529 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1530 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1533 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1536 if (TARGET_SPE && mode == V1DImode)
1539 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1540 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1542 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1544 evmergelo r0, r0, r0
1547 I don't know how efficient it would be to allow bigger constants,
1548 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1549 instructions is better than a 64-bit memory load, but I don't
1550 have the e500 timing specs. */
1551 if (TARGET_SPE && mode == V2SImode
1552 && cst >= -0x7fff && cst <= 0x7fff
1553 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1556 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1559 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1565 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1568 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1572 if (!easy_vector_constant (op, mode))
1575 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1577 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1581 output_vec_const_move (rtx *operands)
1584 enum machine_mode mode;
1590 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1591 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1592 mode = GET_MODE (dest);
1596 if (zero_constant (vec, mode))
1597 return "vxor %0,%0,%0";
1598 else if (EASY_VECTOR_15 (cst, vec, mode))
1600 operands[1] = GEN_INT (cst);
1604 return "vspltisw %0,%1";
1606 return "vspltish %0,%1";
1608 return "vspltisb %0,%1";
1613 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1621 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1622 pattern of V1DI, V4HI, and V2SF.
1624 FIXME: We should probably return # and add post reload
1625 splitters for these, but this way is so easy ;-).
1627 operands[1] = GEN_INT (cst);
1628 operands[2] = GEN_INT (cst2);
1630 return "li %0,%1\n\tevmergelo %0,%0,%0";
1632 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1638 /* Return 1 if the operand is the constant 0. This works for scalars
1639 as well as vectors. */
1641 zero_constant (rtx op, enum machine_mode mode)
1643 return op == CONST0_RTX (mode);
1646 /* Return 1 if the operand is 0.0. */
1648 zero_fp_constant (rtx op, enum machine_mode mode)
1650 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1653 /* Return 1 if the operand is in volatile memory. Note that during
1654 the RTL generation phase, memory_operand does not return TRUE for
1655 volatile memory references. So this function allows us to
1656 recognize volatile references where its safe. */
1659 volatile_mem_operand (rtx op, enum machine_mode mode)
1661 if (GET_CODE (op) != MEM)
1664 if (!MEM_VOLATILE_P (op))
1667 if (mode != GET_MODE (op))
1670 if (reload_completed)
1671 return memory_operand (op, mode);
1673 if (reload_in_progress)
1674 return strict_memory_address_p (mode, XEXP (op, 0));
1676 return memory_address_p (mode, XEXP (op, 0));
1679 /* Return 1 if the operand is an offsettable memory operand. */
1682 offsettable_mem_operand (rtx op, enum machine_mode mode)
1684 return ((GET_CODE (op) == MEM)
1685 && offsettable_address_p (reload_completed || reload_in_progress,
1686 mode, XEXP (op, 0)));
1689 /* Return 1 if the operand is either an easy FP constant (see above) or
1693 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1695 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1698 /* Return 1 if the operand is either a non-special register or an item
1699 that can be used as the operand of a `mode' add insn. */
1702 add_operand (rtx op, enum machine_mode mode)
1704 if (GET_CODE (op) == CONST_INT)
1705 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1706 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1708 return gpc_reg_operand (op, mode);
1711 /* Return 1 if OP is a constant but not a valid add_operand. */
1714 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1716 return (GET_CODE (op) == CONST_INT
1717 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1718 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1721 /* Return 1 if the operand is a non-special register or a constant that
1722 can be used as the operand of an OR or XOR insn on the RS/6000. */
1725 logical_operand (rtx op, enum machine_mode mode)
1727 HOST_WIDE_INT opl, oph;
1729 if (gpc_reg_operand (op, mode))
1732 if (GET_CODE (op) == CONST_INT)
1734 opl = INTVAL (op) & GET_MODE_MASK (mode);
1736 #if HOST_BITS_PER_WIDE_INT <= 32
1737 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1741 else if (GET_CODE (op) == CONST_DOUBLE)
1743 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1746 opl = CONST_DOUBLE_LOW (op);
1747 oph = CONST_DOUBLE_HIGH (op);
1754 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1755 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1758 /* Return 1 if C is a constant that is not a logical operand (as
1759 above), but could be split into one. */
1762 non_logical_cint_operand (rtx op, enum machine_mode mode)
1764 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1765 && ! logical_operand (op, mode)
1766 && reg_or_logical_cint_operand (op, mode));
1769 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1770 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1771 Reject all ones and all zeros, since these should have been optimized
1772 away and confuse the making of MB and ME. */
1775 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1777 HOST_WIDE_INT c, lsb;
1779 if (GET_CODE (op) != CONST_INT)
1784 /* Fail in 64-bit mode if the mask wraps around because the upper
1785 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1786 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1789 /* We don't change the number of transitions by inverting,
1790 so make sure we start with the LS bit zero. */
1794 /* Reject all zeros or all ones. */
1798 /* Find the first transition. */
1801 /* Invert to look for a second transition. */
1804 /* Erase first transition. */
1807 /* Find the second transition (if any). */
1810 /* Match if all the bits above are 1's (or c is zero). */
1814 /* Return 1 for the PowerPC64 rlwinm corner case. */
1817 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1819 HOST_WIDE_INT c, lsb;
1821 if (GET_CODE (op) != CONST_INT)
1826 if ((c & 0x80000001) != 0x80000001)
1840 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1841 It is if there are no more than one 1->0 or 0->1 transitions.
1842 Reject all zeros, since zero should have been optimized away and
1843 confuses the making of MB and ME. */
1846 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1848 if (GET_CODE (op) == CONST_INT)
1850 HOST_WIDE_INT c, lsb;
1854 /* Reject all zeros. */
1858 /* We don't change the number of transitions by inverting,
1859 so make sure we start with the LS bit zero. */
1863 /* Find the transition, and check that all bits above are 1's. */
1866 /* Match if all the bits above are 1's (or c is zero). */
1872 /* Like mask64_operand, but allow up to three transitions. This
1873 predicate is used by insn patterns that generate two rldicl or
1874 rldicr machine insns. */
1877 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1879 if (GET_CODE (op) == CONST_INT)
1881 HOST_WIDE_INT c, lsb;
1885 /* Disallow all zeros. */
1889 /* We don't change the number of transitions by inverting,
1890 so make sure we start with the LS bit zero. */
1894 /* Find the first transition. */
1897 /* Invert to look for a second transition. */
1900 /* Erase first transition. */
1903 /* Find the second transition. */
1906 /* Invert to look for a third transition. */
1909 /* Erase second transition. */
1912 /* Find the third transition (if any). */
1915 /* Match if all the bits above are 1's (or c is zero). */
1921 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1922 implement ANDing by the mask IN. */
1924 build_mask64_2_operands (rtx in, rtx *out)
1926 #if HOST_BITS_PER_WIDE_INT >= 64
1927 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1930 if (GET_CODE (in) != CONST_INT)
1936 /* Assume c initially something like 0x00fff000000fffff. The idea
1937 is to rotate the word so that the middle ^^^^^^ group of zeros
1938 is at the MS end and can be cleared with an rldicl mask. We then
1939 rotate back and clear off the MS ^^ group of zeros with a
1941 c = ~c; /* c == 0xff000ffffff00000 */
1942 lsb = c & -c; /* lsb == 0x0000000000100000 */
1943 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1944 c = ~c; /* c == 0x00fff000000fffff */
1945 c &= -lsb; /* c == 0x00fff00000000000 */
1946 lsb = c & -c; /* lsb == 0x0000100000000000 */
1947 c = ~c; /* c == 0xff000fffffffffff */
1948 c &= -lsb; /* c == 0xff00000000000000 */
1950 while ((lsb >>= 1) != 0)
1951 shift++; /* shift == 44 on exit from loop */
1952 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1953 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1954 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1958 /* Assume c initially something like 0xff000f0000000000. The idea
1959 is to rotate the word so that the ^^^ middle group of zeros
1960 is at the LS end and can be cleared with an rldicr mask. We then
1961 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1963 lsb = c & -c; /* lsb == 0x0000010000000000 */
1964 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1965 c = ~c; /* c == 0x00fff0ffffffffff */
1966 c &= -lsb; /* c == 0x00fff00000000000 */
1967 lsb = c & -c; /* lsb == 0x0000100000000000 */
1968 c = ~c; /* c == 0xff000fffffffffff */
1969 c &= -lsb; /* c == 0xff00000000000000 */
1971 while ((lsb >>= 1) != 0)
1972 shift++; /* shift == 44 on exit from loop */
1973 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1974 m1 >>= shift; /* m1 == 0x0000000000000fff */
1975 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1978 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1979 masks will be all 1's. We are guaranteed more than one transition. */
1980 out[0] = GEN_INT (64 - shift);
1981 out[1] = GEN_INT (m1);
1982 out[2] = GEN_INT (shift);
1983 out[3] = GEN_INT (m2);
1991 /* Return 1 if the operand is either a non-special register or a constant
1992 that can be used as the operand of a PowerPC64 logical AND insn. */
1995 and64_operand (rtx op, enum machine_mode mode)
1997 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1998 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2000 return (logical_operand (op, mode) || mask64_operand (op, mode));
2003 /* Like the above, but also match constants that can be implemented
2004 with two rldicl or rldicr insns. */
2007 and64_2_operand (rtx op, enum machine_mode mode)
2009 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2010 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2012 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2015 /* Return 1 if the operand is either a non-special register or a
2016 constant that can be used as the operand of an RS/6000 logical AND insn. */
2019 and_operand (rtx op, enum machine_mode mode)
2021 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2022 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2024 return (logical_operand (op, mode) || mask_operand (op, mode));
2027 /* Return 1 if the operand is a general register or memory operand. */
2030 reg_or_mem_operand (rtx op, enum machine_mode mode)
2032 return (gpc_reg_operand (op, mode)
2033 || memory_operand (op, mode)
2034 || volatile_mem_operand (op, mode));
2037 /* Return 1 if the operand is a general register or memory operand without
2038 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2042 lwa_operand (rtx op, enum machine_mode mode)
2046 if (reload_completed && GET_CODE (inner) == SUBREG)
2047 inner = SUBREG_REG (inner);
2049 return gpc_reg_operand (inner, mode)
2050 || (memory_operand (inner, mode)
2051 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2052 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2053 && (GET_CODE (XEXP (inner, 0)) != PLUS
2054 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2055 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2058 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2061 symbol_ref_operand (rtx op, enum machine_mode mode)
2063 if (mode != VOIDmode && GET_MODE (op) != mode)
2066 return (GET_CODE (op) == SYMBOL_REF
2067 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2070 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2071 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2074 call_operand (rtx op, enum machine_mode mode)
2076 if (mode != VOIDmode && GET_MODE (op) != mode)
2079 return (GET_CODE (op) == SYMBOL_REF
2080 || (GET_CODE (op) == REG
2081 && (REGNO (op) == LINK_REGISTER_REGNUM
2082 || REGNO (op) == COUNT_REGISTER_REGNUM
2083 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2086 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2090 current_file_function_operand (rtx op,
2091 enum machine_mode mode ATTRIBUTE_UNUSED)
2093 return (GET_CODE (op) == SYMBOL_REF
2094 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2095 && (SYMBOL_REF_LOCAL_P (op)
2096 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2099 /* Return 1 if this operand is a valid input for a move insn. */
2102 input_operand (rtx op, enum machine_mode mode)
2104 /* Memory is always valid. */
2105 if (memory_operand (op, mode))
2108 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2109 if (GET_CODE (op) == CONSTANT_P_RTX)
2112 /* For floating-point, easy constants are valid. */
2113 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2115 && easy_fp_constant (op, mode))
2118 /* Allow any integer constant. */
2119 if (GET_MODE_CLASS (mode) == MODE_INT
2120 && (GET_CODE (op) == CONST_INT
2121 || GET_CODE (op) == CONST_DOUBLE))
2124 /* Allow easy vector constants. */
2125 if (GET_CODE (op) == CONST_VECTOR
2126 && easy_vector_constant (op, mode))
2129 /* For floating-point or multi-word mode, the only remaining valid type
2131 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2132 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2133 return register_operand (op, mode);
2135 /* The only cases left are integral modes one word or smaller (we
2136 do not get called for MODE_CC values). These can be in any
2138 if (register_operand (op, mode))
2141 /* A SYMBOL_REF referring to the TOC is valid. */
2142 if (legitimate_constant_pool_address_p (op))
2145 /* A constant pool expression (relative to the TOC) is valid */
2146 if (toc_relative_expr_p (op))
2149 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2151 if (DEFAULT_ABI == ABI_V4
2152 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2153 && small_data_operand (op, Pmode))
2159 /* Return 1 for an operand in small memory on V.4/eabi. */
2162 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2163 enum machine_mode mode ATTRIBUTE_UNUSED)
2168 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2171 if (DEFAULT_ABI != ABI_V4)
2174 if (GET_CODE (op) == SYMBOL_REF)
2177 else if (GET_CODE (op) != CONST
2178 || GET_CODE (XEXP (op, 0)) != PLUS
2179 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2180 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2185 rtx sum = XEXP (op, 0);
2186 HOST_WIDE_INT summand;
2188 /* We have to be careful here, because it is the referenced address
2189 that must be 32k from _SDA_BASE_, not just the symbol. */
2190 summand = INTVAL (XEXP (sum, 1));
2191 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2194 sym_ref = XEXP (sum, 0);
2197 return SYMBOL_REF_SMALL_P (sym_ref);
2203 /* Return 1 for all valid move insn operand combination involving altivec
2207 altivec_in_gprs_p (rtx op0, rtx op1)
2209 if (REG_P (op0) && REGNO_REG_CLASS (REGNO (op0)) == GENERAL_REGS)
2212 if (REG_P (op1) && REGNO_REG_CLASS (REGNO (op1)) == GENERAL_REGS)
2218 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2221 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2223 switch (GET_CODE(op))
2226 if (RS6000_SYMBOL_REF_TLS_P (op))
2228 else if (CONSTANT_POOL_ADDRESS_P (op))
2230 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2238 else if (! strcmp (XSTR (op, 0), toc_label_name))
2247 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2248 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2250 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2259 constant_pool_expr_p (rtx op)
2263 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2267 toc_relative_expr_p (rtx op)
2271 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2274 /* SPE offset addressing is limited to 5-bits worth of double words. */
2275 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2278 legitimate_constant_pool_address_p (rtx x)
2281 && GET_CODE (x) == PLUS
2282 && GET_CODE (XEXP (x, 0)) == REG
2283 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2284 && constant_pool_expr_p (XEXP (x, 1)));
2288 legitimate_small_data_p (enum machine_mode mode, rtx x)
2290 return (DEFAULT_ABI == ABI_V4
2291 && !flag_pic && !TARGET_TOC
2292 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2293 && small_data_operand (x, mode));
2297 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2299 unsigned HOST_WIDE_INT offset, extra;
2301 if (GET_CODE (x) != PLUS)
2303 if (GET_CODE (XEXP (x, 0)) != REG)
2305 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2307 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2310 offset = INTVAL (XEXP (x, 1));
2318 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2319 which leaves the only valid constant offset of zero, which by
2320 canonicalization rules is also invalid. */
2327 /* SPE vector modes. */
2328 return SPE_CONST_OFFSET_OK (offset);
2334 else if (offset & 3)
2342 else if (offset & 3)
2352 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2356 legitimate_indexed_address_p (rtx x, int strict)
2360 if (GET_CODE (x) != PLUS)
2365 if (!REG_P (op0) || !REG_P (op1))
2368 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2369 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2370 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2371 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2375 legitimate_indirect_address_p (rtx x, int strict)
2377 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2381 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2383 if (GET_CODE (x) != LO_SUM)
2385 if (GET_CODE (XEXP (x, 0)) != REG)
2387 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2393 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2397 if (GET_MODE_NUNITS (mode) != 1)
2399 if (GET_MODE_BITSIZE (mode) > 32
2400 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2403 return CONSTANT_P (x);
2410 /* Try machine-dependent ways of modifying an illegitimate address
2411 to be legitimate. If we find one, return the new, valid address.
2412 This is used from only one place: `memory_address' in explow.c.
2414 OLDX is the address as it was before break_out_memory_refs was
2415 called. In some cases it is useful to look at this to decide what
2418 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2420 It is always safe for this function to do nothing. It exists to
2421 recognize opportunities to optimize the output.
2423 On RS/6000, first check for the sum of a register with a constant
2424 integer that is out of range. If so, generate code to add the
2425 constant with the low-order 16 bits masked to the register and force
2426 this result into another register (this can be done with `cau').
2427 Then generate an address of REG+(CONST&0xffff), allowing for the
2428 possibility of bit 16 being a one.
2430 Then check for the sum of a register and something not constant, try to
2431 load the other things into a register and return the sum. */
2434 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2435 enum machine_mode mode)
2437 if (GET_CODE (x) == SYMBOL_REF)
2439 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2441 return rs6000_legitimize_tls_address (x, model);
2444 if (GET_CODE (x) == PLUS
2445 && GET_CODE (XEXP (x, 0)) == REG
2446 && GET_CODE (XEXP (x, 1)) == CONST_INT
2447 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2449 HOST_WIDE_INT high_int, low_int;
2451 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2452 high_int = INTVAL (XEXP (x, 1)) - low_int;
2453 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2454 GEN_INT (high_int)), 0);
2455 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2457 else if (GET_CODE (x) == PLUS
2458 && GET_CODE (XEXP (x, 0)) == REG
2459 && GET_CODE (XEXP (x, 1)) != CONST_INT
2460 && GET_MODE_NUNITS (mode) == 1
2461 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2463 || (mode != DFmode && mode != TFmode))
2464 && (TARGET_POWERPC64 || mode != DImode)
2467 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2468 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2470 else if (ALTIVEC_VECTOR_MODE (mode))
2474 /* Make sure both operands are registers. */
2475 if (GET_CODE (x) == PLUS)
2476 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2477 force_reg (Pmode, XEXP (x, 1)));
2479 reg = force_reg (Pmode, x);
2482 else if (SPE_VECTOR_MODE (mode))
2484 /* We accept [reg + reg] and [reg + OFFSET]. */
2486 if (GET_CODE (x) == PLUS)
2488 rtx op1 = XEXP (x, 0);
2489 rtx op2 = XEXP (x, 1);
2491 op1 = force_reg (Pmode, op1);
2493 if (GET_CODE (op2) != REG
2494 && (GET_CODE (op2) != CONST_INT
2495 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2496 op2 = force_reg (Pmode, op2);
2498 return gen_rtx_PLUS (Pmode, op1, op2);
2501 return force_reg (Pmode, x);
2507 && GET_CODE (x) != CONST_INT
2508 && GET_CODE (x) != CONST_DOUBLE
2510 && GET_MODE_NUNITS (mode) == 1
2511 && (GET_MODE_BITSIZE (mode) <= 32
2512 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2514 rtx reg = gen_reg_rtx (Pmode);
2515 emit_insn (gen_elf_high (reg, (x)));
2516 return gen_rtx_LO_SUM (Pmode, reg, (x));
2518 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2521 && ! MACHO_DYNAMIC_NO_PIC_P
2523 && GET_CODE (x) != CONST_INT
2524 && GET_CODE (x) != CONST_DOUBLE
2526 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2530 rtx reg = gen_reg_rtx (Pmode);
2531 emit_insn (gen_macho_high (reg, (x)));
2532 return gen_rtx_LO_SUM (Pmode, reg, (x));
2535 && constant_pool_expr_p (x)
2536 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2538 return create_TOC_reference (x);
2544 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2546 static GTY(()) rtx rs6000_tls_symbol;
2548 rs6000_tls_get_addr ()
2550 if (!rs6000_tls_symbol)
2551 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2553 return rs6000_tls_symbol;
2556 /* Construct the SYMBOL_REF for TLS GOT references. */
2558 static GTY(()) rtx rs6000_got_symbol;
2562 if (!rs6000_got_symbol)
2564 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2565 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2566 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2569 return rs6000_got_symbol;
2572 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2573 this (thread-local) address. */
2576 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2580 dest = gen_reg_rtx (Pmode);
2581 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2587 tlsreg = gen_rtx_REG (Pmode, 13);
2588 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2592 tlsreg = gen_rtx_REG (Pmode, 2);
2593 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2597 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2601 tmp = gen_reg_rtx (Pmode);
2604 tlsreg = gen_rtx_REG (Pmode, 13);
2605 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2609 tlsreg = gen_rtx_REG (Pmode, 2);
2610 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2614 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2616 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2621 rtx r3, got, tga, tmp1, tmp2, eqv;
2624 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2628 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2631 rtx gsym = rs6000_got_sym ();
2632 got = gen_reg_rtx (Pmode);
2634 rs6000_emit_move (got, gsym, Pmode);
2638 static int tls_got_labelno = 0;
2639 rtx tempLR, lab, tmp3, mem;
2642 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2643 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2644 tempLR = gen_reg_rtx (Pmode);
2645 tmp1 = gen_reg_rtx (Pmode);
2646 tmp2 = gen_reg_rtx (Pmode);
2647 tmp3 = gen_reg_rtx (Pmode);
2648 mem = gen_rtx_MEM (Pmode, tmp1);
2649 RTX_UNCHANGING_P (mem) = 1;
2651 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2653 emit_move_insn (tmp1, tempLR);
2654 emit_move_insn (tmp2, mem);
2655 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2656 last = emit_move_insn (got, tmp3);
2657 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2659 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2661 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2667 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2669 r3 = gen_rtx_REG (Pmode, 3);
2671 insn = gen_tls_gd_64 (r3, got, addr);
2673 insn = gen_tls_gd_32 (r3, got, addr);
2676 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2677 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2678 insn = emit_call_insn (insn);
2679 CONST_OR_PURE_CALL_P (insn) = 1;
2680 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2681 insn = get_insns ();
2683 emit_libcall_block (insn, dest, r3, addr);
2685 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2687 r3 = gen_rtx_REG (Pmode, 3);
2689 insn = gen_tls_ld_64 (r3, got);
2691 insn = gen_tls_ld_32 (r3, got);
2694 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2695 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2696 insn = emit_call_insn (insn);
2697 CONST_OR_PURE_CALL_P (insn) = 1;
2698 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2699 insn = get_insns ();
2701 tmp1 = gen_reg_rtx (Pmode);
2702 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2704 emit_libcall_block (insn, tmp1, r3, eqv);
2705 if (rs6000_tls_size == 16)
2708 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2710 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2712 else if (rs6000_tls_size == 32)
2714 tmp2 = gen_reg_rtx (Pmode);
2716 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2718 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2721 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2723 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2727 tmp2 = gen_reg_rtx (Pmode);
2729 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2731 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2733 insn = gen_rtx_SET (Pmode, dest,
2734 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2740 /* IE, or 64 bit offset LE. */
2741 tmp2 = gen_reg_rtx (Pmode);
2743 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2745 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2748 insn = gen_tls_tls_64 (dest, tmp2, addr);
2750 insn = gen_tls_tls_32 (dest, tmp2, addr);
2758 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2759 instruction definitions. */
2762 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
2764 return RS6000_SYMBOL_REF_TLS_P (x);
2767 /* Return 1 if X contains a thread-local symbol. */
2770 rs6000_tls_referenced_p (rtx x)
2772 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2775 /* Return 1 if *X is a thread-local symbol. This is the same as
2776 rs6000_tls_symbol_ref except for the type of the unused argument. */
2779 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
2781 return RS6000_SYMBOL_REF_TLS_P (*x);
2784 /* The convention appears to be to define this wherever it is used.
2785 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2786 is now used here. */
2787 #ifndef REG_MODE_OK_FOR_BASE_P
2788 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2791 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2792 replace the input X, or the original X if no replacement is called for.
2793 The output parameter *WIN is 1 if the calling macro should goto WIN,
2796 For RS/6000, we wish to handle large displacements off a base
2797 register by splitting the addend across an addiu/addis and the mem insn.
2798 This cuts number of extra insns needed from 3 to 1.
2800 On Darwin, we use this to generate code for floating point constants.
2801 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2802 The Darwin code is inside #if TARGET_MACHO because only then is
2803 machopic_function_base_name() defined. */
2805 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
2806 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
2808 /* We must recognize output that we have already generated ourselves. */
2809 if (GET_CODE (x) == PLUS
2810 && GET_CODE (XEXP (x, 0)) == PLUS
2811 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2812 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2813 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2815 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2816 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2817 opnum, (enum reload_type)type);
2823 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2824 && GET_CODE (x) == LO_SUM
2825 && GET_CODE (XEXP (x, 0)) == PLUS
2826 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2827 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2828 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2829 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2830 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2831 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2832 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2834 /* Result of previous invocation of this function on Darwin
2835 floating point constant. */
2836 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2837 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2838 opnum, (enum reload_type)type);
2843 if (GET_CODE (x) == PLUS
2844 && GET_CODE (XEXP (x, 0)) == REG
2845 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2846 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2847 && GET_CODE (XEXP (x, 1)) == CONST_INT
2848 && !SPE_VECTOR_MODE (mode)
2849 && !ALTIVEC_VECTOR_MODE (mode))
2851 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2852 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2854 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2856 /* Check for 32-bit overflow. */
2857 if (high + low != val)
2863 /* Reload the high part into a base reg; leave the low part
2864 in the mem directly. */
2866 x = gen_rtx_PLUS (GET_MODE (x),
2867 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2871 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2872 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2873 opnum, (enum reload_type)type);
2878 if (GET_CODE (x) == SYMBOL_REF
2879 && DEFAULT_ABI == ABI_DARWIN
2880 && !ALTIVEC_VECTOR_MODE (mode)
2883 /* Darwin load of floating point constant. */
2884 rtx offset = gen_rtx (CONST, Pmode,
2885 gen_rtx (MINUS, Pmode, x,
2886 gen_rtx (SYMBOL_REF, Pmode,
2887 machopic_function_base_name ())));
2888 x = gen_rtx (LO_SUM, GET_MODE (x),
2889 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2890 gen_rtx (HIGH, Pmode, offset)), offset);
2891 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2892 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2893 opnum, (enum reload_type)type);
2897 if (GET_CODE (x) == SYMBOL_REF
2898 && DEFAULT_ABI == ABI_DARWIN
2899 && !ALTIVEC_VECTOR_MODE (mode)
2900 && MACHO_DYNAMIC_NO_PIC_P)
2902 /* Darwin load of floating point constant. */
2903 x = gen_rtx (LO_SUM, GET_MODE (x),
2904 gen_rtx (HIGH, Pmode, x), x);
2905 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2906 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2907 opnum, (enum reload_type)type);
2913 && constant_pool_expr_p (x)
2914 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2916 (x) = create_TOC_reference (x);
2924 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2925 that is a valid memory address for an instruction.
2926 The MODE argument is the machine mode for the MEM expression
2927 that wants to use this address.
2929 On the RS/6000, there are four valid address: a SYMBOL_REF that
2930 refers to a constant pool entry of an address (or the sum of it
2931 plus a constant), a short (16-bit signed) constant plus a register,
2932 the sum of two registers, or a register indirect, possibly with an
2933 auto-increment. For DFmode and DImode with a constant plus register,
2934 we must ensure that both words are addressable or PowerPC64 with offset
2937 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2938 32-bit DImode, TImode), indexed addressing cannot be used because
2939 adjacent memory cells are accessed by adding word-sized offsets
2940 during assembly output. */
2942 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
2944 if (RS6000_SYMBOL_REF_TLS_P (x))
2946 if (legitimate_indirect_address_p (x, reg_ok_strict))
2948 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2949 && !ALTIVEC_VECTOR_MODE (mode)
2950 && !SPE_VECTOR_MODE (mode)
2952 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
2954 if (legitimate_small_data_p (mode, x))
2956 if (legitimate_constant_pool_address_p (x))
2958 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2960 && GET_CODE (x) == PLUS
2961 && GET_CODE (XEXP (x, 0)) == REG
2962 && XEXP (x, 0) == virtual_stack_vars_rtx
2963 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2965 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
2968 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2970 || (mode != DFmode && mode != TFmode))
2971 && (TARGET_POWERPC64 || mode != DImode)
2972 && legitimate_indexed_address_p (x, reg_ok_strict))
2974 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
2979 /* Go to LABEL if ADDR (a legitimate address expression)
2980 has an effect that depends on the machine mode it is used for.
2982 On the RS/6000 this is true of all integral offsets (since AltiVec
2983 modes don't allow them) or is a pre-increment or decrement.
2985 ??? Except that due to conceptual problems in offsettable_address_p
2986 we can't really report the problems of integral offsets. So leave
2987 this assuming that the adjustable offset must be valid for the
2988 sub-words of a TFmode operand, which is what we had before. */
2991 rs6000_mode_dependent_address (rtx addr)
2993 switch (GET_CODE (addr))
2996 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
2998 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
2999 return val + 12 + 0x8000 >= 0x10000;
3008 return TARGET_UPDATE;
3017 /* Try to output insns to set TARGET equal to the constant C if it can
3018 be done in less than N insns. Do all computations in MODE.
3019 Returns the place where the output has been placed if it can be
3020 done and the insns have been emitted. If it would take more than N
3021 insns, zero is returned and no insns and emitted. */
3024 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3025 rtx source, int n ATTRIBUTE_UNUSED)
3027 rtx result, insn, set;
3028 HOST_WIDE_INT c0, c1;
3030 if (mode == QImode || mode == HImode)
3033 dest = gen_reg_rtx (mode);
3034 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3037 else if (mode == SImode)
3039 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3041 emit_insn (gen_rtx_SET (VOIDmode, result,
3042 GEN_INT (INTVAL (source)
3043 & (~ (HOST_WIDE_INT) 0xffff))));
3044 emit_insn (gen_rtx_SET (VOIDmode, dest,
3045 gen_rtx_IOR (SImode, result,
3046 GEN_INT (INTVAL (source) & 0xffff))));
3049 else if (mode == DImode)
3051 if (GET_CODE (source) == CONST_INT)
3053 c0 = INTVAL (source);
3056 else if (GET_CODE (source) == CONST_DOUBLE)
3058 #if HOST_BITS_PER_WIDE_INT >= 64
3059 c0 = CONST_DOUBLE_LOW (source);
3062 c0 = CONST_DOUBLE_LOW (source);
3063 c1 = CONST_DOUBLE_HIGH (source);
3069 result = rs6000_emit_set_long_const (dest, c0, c1);
3074 insn = get_last_insn ();
3075 set = single_set (insn);
3076 if (! CONSTANT_P (SET_SRC (set)))
3077 set_unique_reg_note (insn, REG_EQUAL, source);
3082 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3083 fall back to a straight forward decomposition. We do this to avoid
3084 exponential run times encountered when looking for longer sequences
3085 with rs6000_emit_set_const. */
3087 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3089 if (!TARGET_POWERPC64)
3091 rtx operand1, operand2;
3093 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3095 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3097 emit_move_insn (operand1, GEN_INT (c1));
3098 emit_move_insn (operand2, GEN_INT (c2));
3102 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3105 ud2 = (c1 & 0xffff0000) >> 16;
3106 #if HOST_BITS_PER_WIDE_INT >= 64
3110 ud4 = (c2 & 0xffff0000) >> 16;
3112 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3113 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3116 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3118 emit_move_insn (dest, GEN_INT (ud1));
3121 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3122 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3125 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3128 emit_move_insn (dest, GEN_INT (ud2 << 16));
3130 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3132 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3133 || (ud4 == 0 && ! (ud3 & 0x8000)))
3136 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3139 emit_move_insn (dest, GEN_INT (ud3 << 16));
3142 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3143 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3145 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3150 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3153 emit_move_insn (dest, GEN_INT (ud4 << 16));
3156 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3158 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3160 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3161 GEN_INT (ud2 << 16)));
3163 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3169 /* Emit a move from SOURCE to DEST in mode MODE. */
3171 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3175 operands[1] = source;
3177 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3178 if (GET_CODE (operands[1]) == CONST_DOUBLE
3179 && ! FLOAT_MODE_P (mode)
3180 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3182 /* FIXME. This should never happen. */
3183 /* Since it seems that it does, do the safe thing and convert
3185 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3187 if (GET_CODE (operands[1]) == CONST_DOUBLE
3188 && ! FLOAT_MODE_P (mode)
3189 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3190 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3191 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3192 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3195 /* Check if GCC is setting up a block move that will end up using FP
3196 registers as temporaries. We must make sure this is acceptable. */
3197 if (GET_CODE (operands[0]) == MEM
3198 && GET_CODE (operands[1]) == MEM
3200 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3201 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3202 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3203 ? 32 : MEM_ALIGN (operands[0])))
3204 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3206 : MEM_ALIGN (operands[1]))))
3207 && ! MEM_VOLATILE_P (operands [0])
3208 && ! MEM_VOLATILE_P (operands [1]))
3210 emit_move_insn (adjust_address (operands[0], SImode, 0),
3211 adjust_address (operands[1], SImode, 0));
3212 emit_move_insn (adjust_address (operands[0], SImode, 4),
3213 adjust_address (operands[1], SImode, 4));
3217 if (!no_new_pseudos)
3219 if (GET_CODE (operands[1]) == MEM && optimize > 0
3220 && (mode == QImode || mode == HImode || mode == SImode)
3221 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3223 rtx reg = gen_reg_rtx (word_mode);
3225 emit_insn (gen_rtx_SET (word_mode, reg,
3226 gen_rtx_ZERO_EXTEND (word_mode,
3228 operands[1] = gen_lowpart (mode, reg);
3230 if (GET_CODE (operands[0]) != REG)
3231 operands[1] = force_reg (mode, operands[1]);
3234 if (mode == SFmode && ! TARGET_POWERPC
3235 && TARGET_HARD_FLOAT && TARGET_FPRS
3236 && GET_CODE (operands[0]) == MEM)
3240 if (reload_in_progress || reload_completed)
3241 regnum = true_regnum (operands[1]);
3242 else if (GET_CODE (operands[1]) == REG)
3243 regnum = REGNO (operands[1]);
3247 /* If operands[1] is a register, on POWER it may have
3248 double-precision data in it, so truncate it to single
3250 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3253 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3254 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3255 operands[1] = newreg;
3259 /* Recognize the case where operand[1] is a reference to thread-local
3260 data and load its address to a register. */
3261 if (GET_CODE (operands[1]) == SYMBOL_REF)
3263 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3265 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3268 /* Handle the case where reload calls us with an invalid address. */
3269 if (reload_in_progress && mode == Pmode
3270 && (! general_operand (operands[1], mode)
3271 || ! nonimmediate_operand (operands[0], mode)))
3274 /* Handle the case of CONSTANT_P_RTX. */
3275 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3278 /* FIXME: In the long term, this switch statement should go away
3279 and be replaced by a sequence of tests based on things like
3285 if (CONSTANT_P (operands[1])
3286 && GET_CODE (operands[1]) != CONST_INT)
3287 operands[1] = force_const_mem (mode, operands[1]);
3293 if (CONSTANT_P (operands[1])
3294 && ! easy_fp_constant (operands[1], mode))
3295 operands[1] = force_const_mem (mode, operands[1]);
3306 if (CONSTANT_P (operands[1])
3307 && !easy_vector_constant (operands[1], mode))
3308 operands[1] = force_const_mem (mode, operands[1]);
3313 /* Use default pattern for address of ELF small data */
3316 && DEFAULT_ABI == ABI_V4
3317 && (GET_CODE (operands[1]) == SYMBOL_REF
3318 || GET_CODE (operands[1]) == CONST)
3319 && small_data_operand (operands[1], mode))
3321 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3325 if (DEFAULT_ABI == ABI_V4
3326 && mode == Pmode && mode == SImode
3327 && flag_pic == 1 && got_operand (operands[1], mode))
3329 emit_insn (gen_movsi_got (operands[0], operands[1]));
3333 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3337 && CONSTANT_P (operands[1])
3338 && GET_CODE (operands[1]) != HIGH
3339 && GET_CODE (operands[1]) != CONST_INT)
3341 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3343 /* If this is a function address on -mcall-aixdesc,
3344 convert it to the address of the descriptor. */
3345 if (DEFAULT_ABI == ABI_AIX
3346 && GET_CODE (operands[1]) == SYMBOL_REF
3347 && XSTR (operands[1], 0)[0] == '.')
3349 const char *name = XSTR (operands[1], 0);
3351 while (*name == '.')
3353 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3354 CONSTANT_POOL_ADDRESS_P (new_ref)
3355 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3356 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3357 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3358 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3359 operands[1] = new_ref;
3362 if (DEFAULT_ABI == ABI_DARWIN)
3365 if (MACHO_DYNAMIC_NO_PIC_P)
3367 /* Take care of any required data indirection. */
3368 operands[1] = rs6000_machopic_legitimize_pic_address (
3369 operands[1], mode, operands[0]);
3370 if (operands[0] != operands[1])
3371 emit_insn (gen_rtx_SET (VOIDmode,
3372 operands[0], operands[1]));
3376 emit_insn (gen_macho_high (target, operands[1]));
3377 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3381 emit_insn (gen_elf_high (target, operands[1]));
3382 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3386 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3387 and we have put it in the TOC, we just need to make a TOC-relative
3390 && GET_CODE (operands[1]) == SYMBOL_REF
3391 && constant_pool_expr_p (operands[1])
3392 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3393 get_pool_mode (operands[1])))
3395 operands[1] = create_TOC_reference (operands[1]);
3397 else if (mode == Pmode
3398 && CONSTANT_P (operands[1])
3399 && ((GET_CODE (operands[1]) != CONST_INT
3400 && ! easy_fp_constant (operands[1], mode))
3401 || (GET_CODE (operands[1]) == CONST_INT
3402 && num_insns_constant (operands[1], mode) > 2)
3403 || (GET_CODE (operands[0]) == REG
3404 && FP_REGNO_P (REGNO (operands[0]))))
3405 && GET_CODE (operands[1]) != HIGH
3406 && ! legitimate_constant_pool_address_p (operands[1])
3407 && ! toc_relative_expr_p (operands[1]))
3409 /* Emit a USE operation so that the constant isn't deleted if
3410 expensive optimizations are turned on because nobody
3411 references it. This should only be done for operands that
3412 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3413 This should not be done for operands that contain LABEL_REFs.
3414 For now, we just handle the obvious case. */
3415 if (GET_CODE (operands[1]) != LABEL_REF)
3416 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3419 /* Darwin uses a special PIC legitimizer. */
3420 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3423 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3425 if (operands[0] != operands[1])
3426 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3431 /* If we are to limit the number of things we put in the TOC and
3432 this is a symbol plus a constant we can add in one insn,
3433 just put the symbol in the TOC and add the constant. Don't do
3434 this if reload is in progress. */
3435 if (GET_CODE (operands[1]) == CONST
3436 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3437 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3438 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3439 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3440 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3441 && ! side_effects_p (operands[0]))
3444 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3445 rtx other = XEXP (XEXP (operands[1], 0), 1);
3447 sym = force_reg (mode, sym);
3449 emit_insn (gen_addsi3 (operands[0], sym, other));
3451 emit_insn (gen_adddi3 (operands[0], sym, other));
3455 operands[1] = force_const_mem (mode, operands[1]);
3458 && constant_pool_expr_p (XEXP (operands[1], 0))
3459 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3460 get_pool_constant (XEXP (operands[1], 0)),
3461 get_pool_mode (XEXP (operands[1], 0))))
3464 = gen_rtx_MEM (mode,
3465 create_TOC_reference (XEXP (operands[1], 0)));
3466 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3467 RTX_UNCHANGING_P (operands[1]) = 1;
3473 if (GET_CODE (operands[0]) == MEM
3474 && GET_CODE (XEXP (operands[0], 0)) != REG
3475 && ! reload_in_progress)
3477 = replace_equiv_address (operands[0],
3478 copy_addr_to_reg (XEXP (operands[0], 0)));
3480 if (GET_CODE (operands[1]) == MEM
3481 && GET_CODE (XEXP (operands[1], 0)) != REG
3482 && ! reload_in_progress)
3484 = replace_equiv_address (operands[1],
3485 copy_addr_to_reg (XEXP (operands[1], 0)));
3488 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3490 gen_rtx_SET (VOIDmode,
3491 operands[0], operands[1]),
3492 gen_rtx_CLOBBER (VOIDmode,
3493 gen_rtx_SCRATCH (SImode)))));
3502 /* Above, we may have called force_const_mem which may have returned
3503 an invalid address. If we can, fix this up; otherwise, reload will
3504 have to deal with it. */
3505 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3506 operands[1] = validize_mem (operands[1]);
3509 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3512 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3513 for a call to a function whose data type is FNTYPE.
3514 For a library call, FNTYPE is 0.
3516 For incoming args we set the number of arguments in the prototype large
3517 so we never return a PARALLEL. */
3520 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3521 rtx libname ATTRIBUTE_UNUSED, int incoming, int libcall)
3523 static CUMULATIVE_ARGS zero_cumulative;
3525 *cum = zero_cumulative;
3527 cum->fregno = FP_ARG_MIN_REG;
3528 cum->vregno = ALTIVEC_ARG_MIN_REG;
3529 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3530 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3531 ? CALL_LIBCALL : CALL_NORMAL);
3532 cum->sysv_gregno = GP_ARG_MIN_REG;
3533 cum->stdarg = fntype
3534 && (TYPE_ARG_TYPES (fntype) != 0
3535 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3536 != void_type_node));
3539 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3541 else if (cum->prototype)
3542 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3543 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3544 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3547 cum->nargs_prototype = 0;
3549 /* Check for a longcall attribute. */
3551 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3552 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3553 cum->call_cookie = CALL_LONG;
3555 if (TARGET_DEBUG_ARG)
3557 fprintf (stderr, "\ninit_cumulative_args:");
3560 tree ret_type = TREE_TYPE (fntype);
3561 fprintf (stderr, " ret code = %s,",
3562 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3565 if (cum->call_cookie & CALL_LONG)
3566 fprintf (stderr, " longcall,");
3568 fprintf (stderr, " proto = %d, nargs = %d\n",
3569 cum->prototype, cum->nargs_prototype);
3573 /* If defined, a C expression which determines whether, and in which
3574 direction, to pad out an argument with extra space. The value
3575 should be of type `enum direction': either `upward' to pad above
3576 the argument, `downward' to pad below, or `none' to inhibit
3579 For the AIX ABI structs are always stored left shifted in their
3583 function_arg_padding (enum machine_mode mode, tree type)
3585 #ifndef AGGREGATE_PADDING_FIXED
3586 #define AGGREGATE_PADDING_FIXED 0
3588 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3589 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3592 if (!AGGREGATE_PADDING_FIXED)
3594 /* GCC used to pass structures of the same size as integer types as
3595 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3596 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3597 passed padded downward, except that -mstrict-align further
3598 muddied the water in that multi-component structures of 2 and 4
3599 bytes in size were passed padded upward.
3601 The following arranges for best compatibility with previous
3602 versions of gcc, but removes the -mstrict-align dependency. */
3603 if (BYTES_BIG_ENDIAN)
3605 HOST_WIDE_INT size = 0;
3607 if (mode == BLKmode)
3609 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3610 size = int_size_in_bytes (type);
3613 size = GET_MODE_SIZE (mode);
3615 if (size == 1 || size == 2 || size == 4)
3621 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3623 if (type != 0 && AGGREGATE_TYPE_P (type))
3627 /* This is the default definition. */
3628 return (! BYTES_BIG_ENDIAN
3631 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3632 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3633 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3634 ? downward : upward));
3637 /* If defined, a C expression that gives the alignment boundary, in bits,
3638 of an argument with the specified mode and type. If it is not defined,
3639 PARM_BOUNDARY is used for all arguments.
3641 V.4 wants long longs to be double word aligned. */
3644 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
3646 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3648 else if (SPE_VECTOR_MODE (mode))
3650 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3653 return PARM_BOUNDARY;
3656 /* Update the data in CUM to advance over an argument
3657 of mode MODE and data type TYPE.
3658 (TYPE is null for libcalls where that information may not be available.) */
3661 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3662 tree type, int named)
3664 cum->nargs_prototype--;
3666 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3668 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3671 cum->words += RS6000_ARG_SIZE (mode, type);
3673 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3675 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3677 else if (DEFAULT_ABI == ABI_V4)
3679 if (TARGET_HARD_FLOAT && TARGET_FPRS
3680 && (mode == SFmode || mode == DFmode))
3682 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3687 cum->words += cum->words & 1;
3688 cum->words += RS6000_ARG_SIZE (mode, type);
3694 int gregno = cum->sysv_gregno;
3696 /* Aggregates and IEEE quad get passed by reference. */
3697 if ((type && AGGREGATE_TYPE_P (type))
3701 n_words = RS6000_ARG_SIZE (mode, type);
3703 /* Long long and SPE vectors are put in odd registers. */
3704 if (n_words == 2 && (gregno & 1) == 0)
3707 /* Long long and SPE vectors are not split between registers
3709 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3711 /* Long long is aligned on the stack. */
3713 cum->words += cum->words & 1;
3714 cum->words += n_words;
3717 /* Note: continuing to accumulate gregno past when we've started
3718 spilling to the stack indicates the fact that we've started
3719 spilling to the stack to expand_builtin_saveregs. */
3720 cum->sysv_gregno = gregno + n_words;
3723 if (TARGET_DEBUG_ARG)
3725 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3726 cum->words, cum->fregno);
3727 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3728 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3729 fprintf (stderr, "mode = %4s, named = %d\n",
3730 GET_MODE_NAME (mode), named);
3735 int align = (TARGET_32BIT && (cum->words & 1) != 0
3736 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3738 cum->words += align + RS6000_ARG_SIZE (mode, type);
3740 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3741 && TARGET_HARD_FLOAT && TARGET_FPRS)
3742 cum->fregno += (mode == TFmode ? 2 : 1);
3744 if (TARGET_DEBUG_ARG)
3746 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3747 cum->words, cum->fregno);
3748 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3749 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3750 fprintf (stderr, "named = %d, align = %d\n", named, align);
3755 /* Determine where to put a SIMD argument on the SPE. */
3757 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3762 int gregno = cum->sysv_gregno;
3763 int n_words = RS6000_ARG_SIZE (mode, type);
3765 /* SPE vectors are put in odd registers. */
3766 if (n_words == 2 && (gregno & 1) == 0)
3769 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3772 enum machine_mode m = SImode;
3774 r1 = gen_rtx_REG (m, gregno);
3775 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3776 r2 = gen_rtx_REG (m, gregno + 1);
3777 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3778 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3785 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3786 return gen_rtx_REG (mode, cum->sysv_gregno);
3792 /* Determine where to put an argument to a function.
3793 Value is zero to push the argument on the stack,
3794 or a hard register in which to store the argument.
3796 MODE is the argument's machine mode.
3797 TYPE is the data type of the argument (as a tree).
3798 This is null for libcalls where that information may
3800 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3801 the preceding args and about the function being called.
3802 NAMED is nonzero if this argument is a named parameter
3803 (otherwise it is an extra parameter matching an ellipsis).
3805 On RS/6000 the first eight words of non-FP are normally in registers
3806 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3807 Under V.4, the first 8 FP args are in registers.
3809 If this is floating-point and no prototype is specified, we use
3810 both an FP and integer register (or possibly FP reg and stack). Library
3811 functions (when CALL_LIBCALL is set) always have the proper types for args,
3812 so we can pass the FP value just in one register. emit_library_function
3813 doesn't support PARALLEL anyway. */
3816 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3817 tree type, int named)
3819 enum rs6000_abi abi = DEFAULT_ABI;
3821 /* Return a marker to indicate whether CR1 needs to set or clear the
3822 bit that V.4 uses to say fp args were passed in registers.
3823 Assume that we don't need the marker for software floating point,
3824 or compiler generated library calls. */
3825 if (mode == VOIDmode)
3828 && cum->nargs_prototype < 0
3829 && (cum->call_cookie & CALL_LIBCALL) == 0
3830 && (cum->prototype || TARGET_NO_PROTOTYPE))
3832 /* For the SPE, we need to crxor CR6 always. */
3834 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3835 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3836 return GEN_INT (cum->call_cookie
3837 | ((cum->fregno == FP_ARG_MIN_REG)
3838 ? CALL_V4_SET_FP_ARGS
3839 : CALL_V4_CLEAR_FP_ARGS));
3842 return GEN_INT (cum->call_cookie);
3845 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3847 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3848 return gen_rtx_REG (mode, cum->vregno);
3852 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
3853 return rs6000_spe_function_arg (cum, mode, type);
3854 else if (abi == ABI_V4)
3856 if (TARGET_HARD_FLOAT && TARGET_FPRS
3857 && (mode == SFmode || mode == DFmode))
3859 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3860 return gen_rtx_REG (mode, cum->fregno);
3867 int gregno = cum->sysv_gregno;
3869 /* Aggregates and IEEE quad get passed by reference. */
3870 if ((type && AGGREGATE_TYPE_P (type))
3874 n_words = RS6000_ARG_SIZE (mode, type);
3876 /* Long long and SPE vectors are put in odd registers. */
3877 if (n_words == 2 && (gregno & 1) == 0)
3880 /* Long long do not split between registers and stack. */
3881 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3882 return gen_rtx_REG (mode, gregno);
3889 int align = (TARGET_32BIT && (cum->words & 1) != 0
3890 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3891 int align_words = cum->words + align;
3893 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3896 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3899 || ((cum->nargs_prototype > 0)
3900 /* IBM AIX extended its linkage convention definition always
3901 to require FP args after register save area hole on the
3903 && (DEFAULT_ABI != ABI_AIX
3905 || (align_words < GP_ARG_NUM_REG))))
3906 return gen_rtx_REG (mode, cum->fregno);
3908 return gen_rtx_PARALLEL (mode,
3910 gen_rtx_EXPR_LIST (VOIDmode,
3911 ((align_words >= GP_ARG_NUM_REG)
3914 + RS6000_ARG_SIZE (mode, type)
3916 /* If this is partially on the stack, then
3917 we only include the portion actually
3918 in registers here. */
3919 ? gen_rtx_REG (SImode,
3920 GP_ARG_MIN_REG + align_words)
3921 : gen_rtx_REG (mode,
3922 GP_ARG_MIN_REG + align_words))),
3924 gen_rtx_EXPR_LIST (VOIDmode,
3925 gen_rtx_REG (mode, cum->fregno),
3928 else if (align_words < GP_ARG_NUM_REG)
3929 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3935 /* For an arg passed partly in registers and partly in memory,
3936 this is the number of registers used.
3937 For args passed entirely in registers or entirely in memory, zero. */
3940 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3941 tree type, int named ATTRIBUTE_UNUSED)
3943 if (DEFAULT_ABI == ABI_V4)
3946 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3947 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3949 if (cum->nargs_prototype >= 0)
3953 if (cum->words < GP_ARG_NUM_REG
3954 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3956 int ret = GP_ARG_NUM_REG - cum->words;
3957 if (ret && TARGET_DEBUG_ARG)
3958 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3966 /* A C expression that indicates when an argument must be passed by
3967 reference. If nonzero for an argument, a copy of that argument is
3968 made in memory and a pointer to the argument is passed instead of
3969 the argument itself. The pointer is passed in whatever way is
3970 appropriate for passing a pointer to that type.
3972 Under V.4, structures and unions are passed by reference.
3974 As an extension to all ABIs, variable sized types are passed by
3978 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
3979 enum machine_mode mode ATTRIBUTE_UNUSED,
3980 tree type, int named ATTRIBUTE_UNUSED)
3982 if (DEFAULT_ABI == ABI_V4
3983 && ((type && AGGREGATE_TYPE_P (type))
3986 if (TARGET_DEBUG_ARG)
3987 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3991 return type && int_size_in_bytes (type) <= 0;
3994 /* Perform any needed actions needed for a function that is receiving a
3995 variable number of arguments.
3999 MODE and TYPE are the mode and type of the current parameter.
4001 PRETEND_SIZE is a variable that should be set to the amount of stack
4002 that must be pushed by the prolog to pretend that our caller pushed
4005 Normally, this macro will push all remaining incoming registers on the
4006 stack and set PRETEND_SIZE to the length of the registers pushed. */
4009 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4010 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4012 CUMULATIVE_ARGS next_cum;
4013 int reg_size = TARGET_32BIT ? 4 : 8;
4014 rtx save_area = NULL_RTX, mem;
4015 int first_reg_offset, set;
4019 fntype = TREE_TYPE (current_function_decl);
4020 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4021 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4022 != void_type_node));
4024 /* For varargs, we do not want to skip the dummy va_dcl argument.
4025 For stdargs, we do want to skip the last named argument. */
4028 function_arg_advance (&next_cum, mode, type, 1);
4030 if (DEFAULT_ABI == ABI_V4)
4032 /* Indicate to allocate space on the stack for varargs save area. */
4033 cfun->machine->sysv_varargs_p = 1;
4035 save_area = plus_constant (virtual_stack_vars_rtx,
4036 - RS6000_VARARGS_SIZE);
4038 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4042 first_reg_offset = next_cum.words;
4043 save_area = virtual_incoming_args_rtx;
4044 cfun->machine->sysv_varargs_p = 0;
4046 if (MUST_PASS_IN_STACK (mode, type))
4047 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4050 set = get_varargs_alias_set ();
4051 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4053 mem = gen_rtx_MEM (BLKmode,
4054 plus_constant (save_area,
4055 first_reg_offset * reg_size)),
4056 set_mem_alias_set (mem, set);
4057 set_mem_align (mem, BITS_PER_WORD);
4059 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4060 GP_ARG_NUM_REG - first_reg_offset);
4063 /* Save FP registers if needed. */
4064 if (DEFAULT_ABI == ABI_V4
4065 && TARGET_HARD_FLOAT && TARGET_FPRS
4067 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4069 int fregno = next_cum.fregno;
4070 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4071 rtx lab = gen_label_rtx ();
4072 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4074 emit_jump_insn (gen_rtx_SET (VOIDmode,
4076 gen_rtx_IF_THEN_ELSE (VOIDmode,
4077 gen_rtx_NE (VOIDmode, cr1,
4079 gen_rtx_LABEL_REF (VOIDmode, lab),
4082 while (fregno <= FP_ARG_V4_MAX_REG)
4084 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4085 set_mem_alias_set (mem, set);
4086 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4095 /* Create the va_list data type. */
4098 rs6000_build_va_list ()
4100 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4102 /* For AIX, prefer 'char *' because that's what the system
4103 header files like. */
4104 if (DEFAULT_ABI != ABI_V4)
4105 return build_pointer_type (char_type_node);
4107 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4108 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4110 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4111 unsigned_char_type_node);
4112 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4113 unsigned_char_type_node);
4114 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4116 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4117 short_unsigned_type_node);
4118 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4120 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4123 DECL_FIELD_CONTEXT (f_gpr) = record;
4124 DECL_FIELD_CONTEXT (f_fpr) = record;
4125 DECL_FIELD_CONTEXT (f_res) = record;
4126 DECL_FIELD_CONTEXT (f_ovf) = record;
4127 DECL_FIELD_CONTEXT (f_sav) = record;
4129 TREE_CHAIN (record) = type_decl;
4130 TYPE_NAME (record) = type_decl;
4131 TYPE_FIELDS (record) = f_gpr;
4132 TREE_CHAIN (f_gpr) = f_fpr;
4133 TREE_CHAIN (f_fpr) = f_res;
4134 TREE_CHAIN (f_res) = f_ovf;
4135 TREE_CHAIN (f_ovf) = f_sav;
4137 layout_type (record);
4139 /* The correct type is an array type of one element. */
4140 return build_array_type (record, build_index_type (size_zero_node));
4143 /* Implement va_start. */
4146 rs6000_va_start (tree valist, rtx nextarg)
4148 HOST_WIDE_INT words, n_gpr, n_fpr;
4149 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4150 tree gpr, fpr, ovf, sav, t;
4152 /* Only SVR4 needs something special. */
4153 if (DEFAULT_ABI != ABI_V4)
4155 std_expand_builtin_va_start (valist, nextarg);
4159 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4160 f_fpr = TREE_CHAIN (f_gpr);
4161 f_res = TREE_CHAIN (f_fpr);
4162 f_ovf = TREE_CHAIN (f_res);
4163 f_sav = TREE_CHAIN (f_ovf);
4165 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4166 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4167 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4168 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4169 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4171 /* Count number of gp and fp argument registers used. */
4172 words = current_function_args_info.words;
4173 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4174 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4176 if (TARGET_DEBUG_ARG)
4177 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4178 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4179 words, n_gpr, n_fpr);
4181 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4182 TREE_SIDE_EFFECTS (t) = 1;
4183 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4185 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4186 TREE_SIDE_EFFECTS (t) = 1;
4187 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4189 /* Find the overflow area. */
4190 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4192 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4193 build_int_2 (words * UNITS_PER_WORD, 0));
4194 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4195 TREE_SIDE_EFFECTS (t) = 1;
4196 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4198 /* Find the register save area. */
4199 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4200 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4201 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4202 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4203 TREE_SIDE_EFFECTS (t) = 1;
4204 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4207 /* Implement va_arg. */
4210 rs6000_va_arg (tree valist, tree type)
4212 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4213 tree gpr, fpr, ovf, sav, reg, t, u;
4214 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4215 rtx lab_false, lab_over, addr_rtx, r;
4217 if (DEFAULT_ABI != ABI_V4)
4219 /* Variable sized types are passed by reference. */
4220 if (int_size_in_bytes (type) <= 0)
4222 u = build_pointer_type (type);
4224 /* Args grow upward. */
4225 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4226 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4227 TREE_SIDE_EFFECTS (t) = 1;
4229 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4230 TREE_SIDE_EFFECTS (t) = 1;
4232 t = build1 (INDIRECT_REF, u, t);
4233 TREE_SIDE_EFFECTS (t) = 1;
4235 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4238 return std_expand_builtin_va_arg (valist, type);
4241 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4242 f_fpr = TREE_CHAIN (f_gpr);
4243 f_res = TREE_CHAIN (f_fpr);
4244 f_ovf = TREE_CHAIN (f_res);
4245 f_sav = TREE_CHAIN (f_ovf);
4247 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4248 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4249 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4250 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4251 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4253 size = int_size_in_bytes (type);
4254 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4256 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4258 /* Aggregates and long doubles are passed by reference. */
4264 size = UNITS_PER_WORD;
4267 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4269 /* FP args go in FP registers, if present. */
4278 /* Otherwise into GP registers. */
4286 /* Pull the value out of the saved registers ... */
4288 lab_false = gen_label_rtx ();
4289 lab_over = gen_label_rtx ();
4290 addr_rtx = gen_reg_rtx (Pmode);
4292 /* AltiVec vectors never go in registers. */
4293 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4295 TREE_THIS_VOLATILE (reg) = 1;
4296 emit_cmp_and_jump_insns
4297 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4298 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4301 /* Long long is aligned in the registers. */
4304 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4305 build_int_2 (n_reg - 1, 0));
4306 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4307 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4308 TREE_SIDE_EFFECTS (u) = 1;
4309 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4313 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4317 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4318 build_int_2 (n_reg, 0));
4319 TREE_SIDE_EFFECTS (u) = 1;
4321 u = build1 (CONVERT_EXPR, integer_type_node, u);
4322 TREE_SIDE_EFFECTS (u) = 1;
4324 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4325 TREE_SIDE_EFFECTS (u) = 1;
4327 t = build (PLUS_EXPR, ptr_type_node, t, u);
4328 TREE_SIDE_EFFECTS (t) = 1;
4330 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4332 emit_move_insn (addr_rtx, r);
4334 emit_jump_insn (gen_jump (lab_over));
4338 emit_label (lab_false);
4340 /* ... otherwise out of the overflow area. */
4342 /* Make sure we don't find reg 7 for the next int arg.
4344 All AltiVec vectors go in the overflow area. So in the AltiVec
4345 case we need to get the vectors from the overflow area, but
4346 remember where the GPRs and FPRs are. */
4347 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4348 || !TARGET_ALTIVEC))
4350 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4351 TREE_SIDE_EFFECTS (t) = 1;
4352 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4355 /* Care for on-stack alignment if needed. */
4362 /* AltiVec vectors are 16 byte aligned. */
4363 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4368 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4369 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4373 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4375 emit_move_insn (addr_rtx, r);
4377 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4378 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4379 TREE_SIDE_EFFECTS (t) = 1;
4380 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4382 emit_label (lab_over);
4386 r = gen_rtx_MEM (Pmode, addr_rtx);
4387 set_mem_alias_set (r, get_varargs_alias_set ());
4388 emit_move_insn (addr_rtx, r);
4396 #define def_builtin(MASK, NAME, TYPE, CODE) \
4398 if ((MASK) & target_flags) \
4399 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4403 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4405 static const struct builtin_description bdesc_3arg[] =
4407 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4408 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4409 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4410 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4411 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4412 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4413 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4414 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4415 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4416 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4417 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4418 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4419 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4420 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4421 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4422 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4423 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4424 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4425 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4426 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4427 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4428 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4429 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4432 /* DST operations: void foo (void *, const int, const char). */
4434 static const struct builtin_description bdesc_dst[] =
4436 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4437 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4438 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4439 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4442 /* Simple binary operations: VECc = foo (VECa, VECb). */
4444 static struct builtin_description bdesc_2arg[] =
4446 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4447 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4448 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4449 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4450 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4451 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4452 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4453 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4454 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4455 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4456 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4457 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4458 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4459 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4460 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4461 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4462 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4463 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4464 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4465 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4466 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4467 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4468 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4469 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4470 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4471 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4472 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4473 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4474 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4475 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4476 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4477 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4478 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4479 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4480 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4481 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4482 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4483 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4484 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4485 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4486 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4487 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4488 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4489 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4490 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4491 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4492 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4493 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4494 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4495 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4496 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4497 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4498 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4499 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4500 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4501 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4502 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4503 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4504 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4505 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4506 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4507 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4508 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4509 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4510 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4511 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4512 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4513 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4514 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4515 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4516 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4517 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4518 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4519 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4520 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4521 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4522 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4523 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4524 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4525 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4526 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4527 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4528 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4529 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4530 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4531 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4532 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4533 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4534 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4535 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4536 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4537 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4538 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4539 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4540 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4541 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4542 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4543 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4544 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4545 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4546 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4547 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4548 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4549 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4550 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4551 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4552 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4553 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4554 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4555 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4556 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4557 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4558 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4560 /* Place holder, leave as first spe builtin. */
4561 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4562 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4563 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4564 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4565 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4566 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4567 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4568 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4569 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4570 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4571 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4572 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4573 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4574 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4575 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4576 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4577 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4578 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4579 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4580 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4581 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4582 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4583 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4584 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4585 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4586 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4587 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4588 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4589 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4590 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4591 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4592 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4593 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4594 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4595 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4596 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4597 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4598 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4599 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4600 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4601 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4602 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4603 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4604 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4605 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4606 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4607 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4608 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4609 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4610 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4611 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4612 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4613 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4614 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4615 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4616 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4617 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4618 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4619 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4620 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4621 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4622 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4623 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4624 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4625 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4626 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4627 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4628 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4629 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4630 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4631 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4632 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4633 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4634 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4635 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4636 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4637 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4638 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4639 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4640 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4641 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4642 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4643 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4644 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4645 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4646 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4647 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4648 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4649 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4650 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4651 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4652 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4653 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4654 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4655 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4656 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4657 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4658 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4659 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4660 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4661 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4662 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4663 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4664 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4665 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4666 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4667 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4668 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4669 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4671 /* SPE binary operations expecting a 5-bit unsigned literal. */
4672 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4674 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4675 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4676 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4677 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4678 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4679 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4680 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4681 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4682 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4683 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4684 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4685 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4686 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4687 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4688 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4689 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4690 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4691 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4692 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4693 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4694 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4695 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4696 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4697 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4698 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4699 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4701 /* Place-holder. Leave as last binary SPE builtin. */
4702 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4705 /* AltiVec predicates. */
4707 struct builtin_description_predicates
4709 const unsigned int mask;
4710 const enum insn_code icode;
4712 const char *const name;
4713 const enum rs6000_builtins code;
4716 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4718 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4719 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4720 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4721 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4722 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4723 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4724 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4725 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4726 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4727 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4728 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4729 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4730 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4733 /* SPE predicates. */
4734 static struct builtin_description bdesc_spe_predicates[] =
4736 /* Place-holder. Leave as first. */
4737 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4738 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4739 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4740 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4741 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4742 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4743 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4744 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4745 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4746 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4747 /* Place-holder. Leave as last. */
4748 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4751 /* SPE evsel predicates. */
4752 static struct builtin_description bdesc_spe_evsel[] =
4754 /* Place-holder. Leave as first. */
4755 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4756 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4757 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4758 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4759 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4760 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4761 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4762 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4763 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4764 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4765 /* Place-holder. Leave as last. */
4766 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4769 /* ABS* operations. */
4771 static const struct builtin_description bdesc_abs[] =
4773 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4774 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4775 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4776 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4777 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4778 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4779 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4782 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4785 static struct builtin_description bdesc_1arg[] =
4787 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4788 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4789 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4790 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4791 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4792 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4793 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4794 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4795 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4796 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4797 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4798 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4799 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4800 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4801 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4802 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4803 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4805 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4806 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4807 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4808 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4809 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4810 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4811 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4812 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4813 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4814 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4815 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4816 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4817 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4818 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4819 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4820 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4821 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4822 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4823 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4824 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4825 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4826 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4827 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4828 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4829 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4830 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4831 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4832 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4833 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4834 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4836 /* Place-holder. Leave as last unary SPE builtin. */
4837 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4841 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
4844 tree arg0 = TREE_VALUE (arglist);
4845 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4846 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4847 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4849 if (icode == CODE_FOR_nothing)
4850 /* Builtin not supported on this processor. */
4853 /* If we got invalid arguments bail out before generating bad rtl. */
4854 if (arg0 == error_mark_node)
4857 if (icode == CODE_FOR_altivec_vspltisb
4858 || icode == CODE_FOR_altivec_vspltish
4859 || icode == CODE_FOR_altivec_vspltisw
4860 || icode == CODE_FOR_spe_evsplatfi
4861 || icode == CODE_FOR_spe_evsplati)
4863 /* Only allow 5-bit *signed* literals. */
4864 if (GET_CODE (op0) != CONST_INT
4865 || INTVAL (op0) > 0x1f
4866 || INTVAL (op0) < -0x1f)
4868 error ("argument 1 must be a 5-bit signed literal");
4874 || GET_MODE (target) != tmode
4875 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4876 target = gen_reg_rtx (tmode);
4878 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4879 op0 = copy_to_mode_reg (mode0, op0);
4881 pat = GEN_FCN (icode) (target, op0);
4890 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
4892 rtx pat, scratch1, scratch2;
4893 tree arg0 = TREE_VALUE (arglist);
4894 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4895 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4896 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4898 /* If we have invalid arguments, bail out before generating bad rtl. */
4899 if (arg0 == error_mark_node)
4903 || GET_MODE (target) != tmode
4904 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4905 target = gen_reg_rtx (tmode);
4907 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4908 op0 = copy_to_mode_reg (mode0, op0);
4910 scratch1 = gen_reg_rtx (mode0);
4911 scratch2 = gen_reg_rtx (mode0);
4913 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4922 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
4925 tree arg0 = TREE_VALUE (arglist);
4926 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4927 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4928 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4929 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4930 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4931 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4933 if (icode == CODE_FOR_nothing)
4934 /* Builtin not supported on this processor. */
4937 /* If we got invalid arguments bail out before generating bad rtl. */
4938 if (arg0 == error_mark_node || arg1 == error_mark_node)
4941 if (icode == CODE_FOR_altivec_vcfux
4942 || icode == CODE_FOR_altivec_vcfsx
4943 || icode == CODE_FOR_altivec_vctsxs
4944 || icode == CODE_FOR_altivec_vctuxs
4945 || icode == CODE_FOR_altivec_vspltb
4946 || icode == CODE_FOR_altivec_vsplth
4947 || icode == CODE_FOR_altivec_vspltw
4948 || icode == CODE_FOR_spe_evaddiw
4949 || icode == CODE_FOR_spe_evldd
4950 || icode == CODE_FOR_spe_evldh
4951 || icode == CODE_FOR_spe_evldw
4952 || icode == CODE_FOR_spe_evlhhesplat
4953 || icode == CODE_FOR_spe_evlhhossplat
4954 || icode == CODE_FOR_spe_evlhhousplat
4955 || icode == CODE_FOR_spe_evlwhe
4956 || icode == CODE_FOR_spe_evlwhos
4957 || icode == CODE_FOR_spe_evlwhou
4958 || icode == CODE_FOR_spe_evlwhsplat
4959 || icode == CODE_FOR_spe_evlwwsplat
4960 || icode == CODE_FOR_spe_evrlwi
4961 || icode == CODE_FOR_spe_evslwi
4962 || icode == CODE_FOR_spe_evsrwis
4963 || icode == CODE_FOR_spe_evsubifw
4964 || icode == CODE_FOR_spe_evsrwiu)
4966 /* Only allow 5-bit unsigned literals. */
4967 if (TREE_CODE (arg1) != INTEGER_CST
4968 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4970 error ("argument 2 must be a 5-bit unsigned literal");
4976 || GET_MODE (target) != tmode
4977 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4978 target = gen_reg_rtx (tmode);
4980 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4981 op0 = copy_to_mode_reg (mode0, op0);
4982 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4983 op1 = copy_to_mode_reg (mode1, op1);
4985 pat = GEN_FCN (icode) (target, op0, op1);
4994 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
4995 tree arglist, rtx target)
4998 tree cr6_form = TREE_VALUE (arglist);
4999 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5000 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5001 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5002 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5003 enum machine_mode tmode = SImode;
5004 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5005 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5008 if (TREE_CODE (cr6_form) != INTEGER_CST)
5010 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5014 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5019 /* If we have invalid arguments, bail out before generating bad rtl. */
5020 if (arg0 == error_mark_node || arg1 == error_mark_node)
5024 || GET_MODE (target) != tmode
5025 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5026 target = gen_reg_rtx (tmode);
5028 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5029 op0 = copy_to_mode_reg (mode0, op0);
5030 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5031 op1 = copy_to_mode_reg (mode1, op1);
5033 scratch = gen_reg_rtx (mode0);
5035 pat = GEN_FCN (icode) (scratch, op0, op1,
5036 gen_rtx (SYMBOL_REF, Pmode, opcode));
5041 /* The vec_any* and vec_all* predicates use the same opcodes for two
5042 different operations, but the bits in CR6 will be different
5043 depending on what information we want. So we have to play tricks
5044 with CR6 to get the right bits out.
5046 If you think this is disgusting, look at the specs for the
5047 AltiVec predicates. */
5049 switch (cr6_form_int)
5052 emit_insn (gen_cr6_test_for_zero (target));
5055 emit_insn (gen_cr6_test_for_zero_reverse (target));
5058 emit_insn (gen_cr6_test_for_lt (target));
5061 emit_insn (gen_cr6_test_for_lt_reverse (target));
5064 error ("argument 1 of __builtin_altivec_predicate is out of range");
5072 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5074 tree arg0 = TREE_VALUE (arglist);
5075 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5076 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5077 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5078 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5079 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5081 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5082 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5083 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5085 /* Invalid arguments. Bail before doing anything stoopid! */
5086 if (arg0 == error_mark_node
5087 || arg1 == error_mark_node
5088 || arg2 == error_mark_node)
5091 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5092 op0 = copy_to_mode_reg (mode2, op0);
5093 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5094 op1 = copy_to_mode_reg (mode0, op1);
5095 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5096 op2 = copy_to_mode_reg (mode1, op2);
5098 pat = GEN_FCN (icode) (op1, op2, op0);
5105 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5108 tree arg0 = TREE_VALUE (arglist);
5109 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5110 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5111 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5112 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5113 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5114 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5115 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5116 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5117 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5119 if (icode == CODE_FOR_nothing)
5120 /* Builtin not supported on this processor. */
5123 /* If we got invalid arguments bail out before generating bad rtl. */
5124 if (arg0 == error_mark_node
5125 || arg1 == error_mark_node
5126 || arg2 == error_mark_node)
5129 if (icode == CODE_FOR_altivec_vsldoi_4sf
5130 || icode == CODE_FOR_altivec_vsldoi_4si
5131 || icode == CODE_FOR_altivec_vsldoi_8hi
5132 || icode == CODE_FOR_altivec_vsldoi_16qi)
5134 /* Only allow 4-bit unsigned literals. */
5135 if (TREE_CODE (arg2) != INTEGER_CST
5136 || TREE_INT_CST_LOW (arg2) & ~0xf)
5138 error ("argument 3 must be a 4-bit unsigned literal");
5144 || GET_MODE (target) != tmode
5145 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5146 target = gen_reg_rtx (tmode);
5148 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5149 op0 = copy_to_mode_reg (mode0, op0);
5150 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5151 op1 = copy_to_mode_reg (mode1, op1);
5152 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5153 op2 = copy_to_mode_reg (mode2, op2);
5155 pat = GEN_FCN (icode) (target, op0, op1, op2);
5163 /* Expand the lvx builtins. */
5165 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5167 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5168 tree arglist = TREE_OPERAND (exp, 1);
5169 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5171 enum machine_mode tmode, mode0;
5173 enum insn_code icode;
5177 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5178 icode = CODE_FOR_altivec_lvx_16qi;
5180 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5181 icode = CODE_FOR_altivec_lvx_8hi;
5183 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5184 icode = CODE_FOR_altivec_lvx_4si;
5186 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5187 icode = CODE_FOR_altivec_lvx_4sf;
5196 arg0 = TREE_VALUE (arglist);
5197 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5198 tmode = insn_data[icode].operand[0].mode;
5199 mode0 = insn_data[icode].operand[1].mode;
5202 || GET_MODE (target) != tmode
5203 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5204 target = gen_reg_rtx (tmode);
5206 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5207 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5209 pat = GEN_FCN (icode) (target, op0);
5216 /* Expand the stvx builtins. */
5218 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5221 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5222 tree arglist = TREE_OPERAND (exp, 1);
5223 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5225 enum machine_mode mode0, mode1;
5227 enum insn_code icode;
5231 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5232 icode = CODE_FOR_altivec_stvx_16qi;
5234 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5235 icode = CODE_FOR_altivec_stvx_8hi;
5237 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5238 icode = CODE_FOR_altivec_stvx_4si;
5240 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5241 icode = CODE_FOR_altivec_stvx_4sf;
5248 arg0 = TREE_VALUE (arglist);
5249 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5250 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5251 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5252 mode0 = insn_data[icode].operand[0].mode;
5253 mode1 = insn_data[icode].operand[1].mode;
5255 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5256 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5257 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5258 op1 = copy_to_mode_reg (mode1, op1);
5260 pat = GEN_FCN (icode) (op0, op1);
5268 /* Expand the dst builtins. */
5270 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5273 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5274 tree arglist = TREE_OPERAND (exp, 1);
5275 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5276 tree arg0, arg1, arg2;
5277 enum machine_mode mode0, mode1, mode2;
5278 rtx pat, op0, op1, op2;
5279 struct builtin_description *d;
5284 /* Handle DST variants. */
5285 d = (struct builtin_description *) bdesc_dst;
5286 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5287 if (d->code == fcode)
5289 arg0 = TREE_VALUE (arglist);
5290 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5291 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5292 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5293 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5294 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5295 mode0 = insn_data[d->icode].operand[0].mode;
5296 mode1 = insn_data[d->icode].operand[1].mode;
5297 mode2 = insn_data[d->icode].operand[2].mode;
5299 /* Invalid arguments, bail out before generating bad rtl. */
5300 if (arg0 == error_mark_node
5301 || arg1 == error_mark_node
5302 || arg2 == error_mark_node)
5305 if (TREE_CODE (arg2) != INTEGER_CST
5306 || TREE_INT_CST_LOW (arg2) & ~0x3)
5308 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5312 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5313 op0 = copy_to_mode_reg (mode0, op0);
5314 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5315 op1 = copy_to_mode_reg (mode1, op1);
5317 pat = GEN_FCN (d->icode) (op0, op1, op2);
5328 /* Expand the builtin in EXP and store the result in TARGET. Store
5329 true in *EXPANDEDP if we found a builtin to expand. */
5331 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
5333 struct builtin_description *d;
5334 struct builtin_description_predicates *dp;
5336 enum insn_code icode;
5337 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5338 tree arglist = TREE_OPERAND (exp, 1);
5341 enum machine_mode tmode, mode0;
5342 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5344 target = altivec_expand_ld_builtin (exp, target, expandedp);
5348 target = altivec_expand_st_builtin (exp, target, expandedp);
5352 target = altivec_expand_dst_builtin (exp, target, expandedp);
5360 case ALTIVEC_BUILTIN_STVX:
5361 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5362 case ALTIVEC_BUILTIN_STVEBX:
5363 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5364 case ALTIVEC_BUILTIN_STVEHX:
5365 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5366 case ALTIVEC_BUILTIN_STVEWX:
5367 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5368 case ALTIVEC_BUILTIN_STVXL:
5369 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5371 case ALTIVEC_BUILTIN_MFVSCR:
5372 icode = CODE_FOR_altivec_mfvscr;
5373 tmode = insn_data[icode].operand[0].mode;
5376 || GET_MODE (target) != tmode
5377 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5378 target = gen_reg_rtx (tmode);
5380 pat = GEN_FCN (icode) (target);
5386 case ALTIVEC_BUILTIN_MTVSCR:
5387 icode = CODE_FOR_altivec_mtvscr;
5388 arg0 = TREE_VALUE (arglist);
5389 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5390 mode0 = insn_data[icode].operand[0].mode;
5392 /* If we got invalid arguments bail out before generating bad rtl. */
5393 if (arg0 == error_mark_node)
5396 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5397 op0 = copy_to_mode_reg (mode0, op0);
5399 pat = GEN_FCN (icode) (op0);
5404 case ALTIVEC_BUILTIN_DSSALL:
5405 emit_insn (gen_altivec_dssall ());
5408 case ALTIVEC_BUILTIN_DSS:
5409 icode = CODE_FOR_altivec_dss;
5410 arg0 = TREE_VALUE (arglist);
5411 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5412 mode0 = insn_data[icode].operand[0].mode;
5414 /* If we got invalid arguments bail out before generating bad rtl. */
5415 if (arg0 == error_mark_node)
5418 if (TREE_CODE (arg0) != INTEGER_CST
5419 || TREE_INT_CST_LOW (arg0) & ~0x3)
5421 error ("argument to dss must be a 2-bit unsigned literal");
5425 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5426 op0 = copy_to_mode_reg (mode0, op0);
5428 emit_insn (gen_altivec_dss (op0));
5432 /* Expand abs* operations. */
5433 d = (struct builtin_description *) bdesc_abs;
5434 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5435 if (d->code == fcode)
5436 return altivec_expand_abs_builtin (d->icode, arglist, target);
5438 /* Expand the AltiVec predicates. */
5439 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5440 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5441 if (dp->code == fcode)
5442 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5444 /* LV* are funky. We initialized them differently. */
5447 case ALTIVEC_BUILTIN_LVSL:
5448 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5450 case ALTIVEC_BUILTIN_LVSR:
5451 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5453 case ALTIVEC_BUILTIN_LVEBX:
5454 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5456 case ALTIVEC_BUILTIN_LVEHX:
5457 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5459 case ALTIVEC_BUILTIN_LVEWX:
5460 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5462 case ALTIVEC_BUILTIN_LVXL:
5463 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5465 case ALTIVEC_BUILTIN_LVX:
5466 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5477 /* Binops that need to be initialized manually, but can be expanded
5478 automagically by rs6000_expand_binop_builtin. */
5479 static struct builtin_description bdesc_2arg_spe[] =
5481 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5482 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5483 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5484 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5485 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5486 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5487 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5488 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5489 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5490 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5491 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5492 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5493 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5494 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5495 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5496 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5497 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5498 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5499 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5500 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5501 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5502 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5505 /* Expand the builtin in EXP and store the result in TARGET. Store
5506 true in *EXPANDEDP if we found a builtin to expand.
5508 This expands the SPE builtins that are not simple unary and binary
5511 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
5513 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5514 tree arglist = TREE_OPERAND (exp, 1);
5516 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5517 enum insn_code icode;
5518 enum machine_mode tmode, mode0;
5520 struct builtin_description *d;
5525 /* Syntax check for a 5-bit unsigned immediate. */
5528 case SPE_BUILTIN_EVSTDD:
5529 case SPE_BUILTIN_EVSTDH:
5530 case SPE_BUILTIN_EVSTDW:
5531 case SPE_BUILTIN_EVSTWHE:
5532 case SPE_BUILTIN_EVSTWHO:
5533 case SPE_BUILTIN_EVSTWWE:
5534 case SPE_BUILTIN_EVSTWWO:
5535 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5536 if (TREE_CODE (arg1) != INTEGER_CST
5537 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5539 error ("argument 2 must be a 5-bit unsigned literal");
5547 /* The evsplat*i instructions are not quite generic. */
5550 case SPE_BUILTIN_EVSPLATFI:
5551 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5553 case SPE_BUILTIN_EVSPLATI:
5554 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5560 d = (struct builtin_description *) bdesc_2arg_spe;
5561 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5562 if (d->code == fcode)
5563 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5565 d = (struct builtin_description *) bdesc_spe_predicates;
5566 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5567 if (d->code == fcode)
5568 return spe_expand_predicate_builtin (d->icode, arglist, target);
5570 d = (struct builtin_description *) bdesc_spe_evsel;
5571 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5572 if (d->code == fcode)
5573 return spe_expand_evsel_builtin (d->icode, arglist, target);
5577 case SPE_BUILTIN_EVSTDDX:
5578 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5579 case SPE_BUILTIN_EVSTDHX:
5580 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5581 case SPE_BUILTIN_EVSTDWX:
5582 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5583 case SPE_BUILTIN_EVSTWHEX:
5584 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5585 case SPE_BUILTIN_EVSTWHOX:
5586 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5587 case SPE_BUILTIN_EVSTWWEX:
5588 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5589 case SPE_BUILTIN_EVSTWWOX:
5590 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5591 case SPE_BUILTIN_EVSTDD:
5592 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5593 case SPE_BUILTIN_EVSTDH:
5594 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5595 case SPE_BUILTIN_EVSTDW:
5596 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5597 case SPE_BUILTIN_EVSTWHE:
5598 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5599 case SPE_BUILTIN_EVSTWHO:
5600 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5601 case SPE_BUILTIN_EVSTWWE:
5602 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5603 case SPE_BUILTIN_EVSTWWO:
5604 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5605 case SPE_BUILTIN_MFSPEFSCR:
5606 icode = CODE_FOR_spe_mfspefscr;
5607 tmode = insn_data[icode].operand[0].mode;
5610 || GET_MODE (target) != tmode
5611 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5612 target = gen_reg_rtx (tmode);
5614 pat = GEN_FCN (icode) (target);
5619 case SPE_BUILTIN_MTSPEFSCR:
5620 icode = CODE_FOR_spe_mtspefscr;
5621 arg0 = TREE_VALUE (arglist);
5622 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5623 mode0 = insn_data[icode].operand[0].mode;
5625 if (arg0 == error_mark_node)
5628 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5629 op0 = copy_to_mode_reg (mode0, op0);
5631 pat = GEN_FCN (icode) (op0);
5644 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
5646 rtx pat, scratch, tmp;
5647 tree form = TREE_VALUE (arglist);
5648 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5649 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5650 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5651 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5652 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5653 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5657 if (TREE_CODE (form) != INTEGER_CST)
5659 error ("argument 1 of __builtin_spe_predicate must be a constant");
5663 form_int = TREE_INT_CST_LOW (form);
5668 if (arg0 == error_mark_node || arg1 == error_mark_node)
5672 || GET_MODE (target) != SImode
5673 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5674 target = gen_reg_rtx (SImode);
5676 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5677 op0 = copy_to_mode_reg (mode0, op0);
5678 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5679 op1 = copy_to_mode_reg (mode1, op1);
5681 scratch = gen_reg_rtx (CCmode);
5683 pat = GEN_FCN (icode) (scratch, op0, op1);
5688 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5689 _lower_. We use one compare, but look in different bits of the
5690 CR for each variant.
5692 There are 2 elements in each SPE simd type (upper/lower). The CR
5693 bits are set as follows:
5695 BIT0 | BIT 1 | BIT 2 | BIT 3
5696 U | L | (U | L) | (U & L)
5698 So, for an "all" relationship, BIT 3 would be set.
5699 For an "any" relationship, BIT 2 would be set. Etc.
5701 Following traditional nomenclature, these bits map to:
5703 BIT0 | BIT 1 | BIT 2 | BIT 3
5706 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5711 /* All variant. OV bit. */
5713 /* We need to get to the OV bit, which is the ORDERED bit. We
5714 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5715 that's ugly and will trigger a validate_condition_mode abort.
5716 So let's just use another pattern. */
5717 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5719 /* Any variant. EQ bit. */
5723 /* Upper variant. LT bit. */
5727 /* Lower variant. GT bit. */
5732 error ("argument 1 of __builtin_spe_predicate is out of range");
5736 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5737 emit_move_insn (target, tmp);
5742 /* The evsel builtins look like this:
5744 e = __builtin_spe_evsel_OP (a, b, c, d);
5748 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5749 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5753 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
5756 tree arg0 = TREE_VALUE (arglist);
5757 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5758 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5759 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5760 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5761 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5762 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5763 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5764 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5765 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5770 if (arg0 == error_mark_node || arg1 == error_mark_node
5771 || arg2 == error_mark_node || arg3 == error_mark_node)
5775 || GET_MODE (target) != mode0
5776 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5777 target = gen_reg_rtx (mode0);
5779 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5780 op0 = copy_to_mode_reg (mode0, op0);
5781 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5782 op1 = copy_to_mode_reg (mode0, op1);
5783 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5784 op2 = copy_to_mode_reg (mode0, op2);
5785 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5786 op3 = copy_to_mode_reg (mode0, op3);
5788 /* Generate the compare. */
5789 scratch = gen_reg_rtx (CCmode);
5790 pat = GEN_FCN (icode) (scratch, op0, op1);
5795 if (mode0 == V2SImode)
5796 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5798 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5803 /* Expand an expression EXP that calls a built-in function,
5804 with result going to TARGET if that's convenient
5805 (and in mode MODE if that's convenient).
5806 SUBTARGET may be used as the target for computing one of EXP's operands.
5807 IGNORE is nonzero if the value is to be ignored. */
5810 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
5811 enum machine_mode mode ATTRIBUTE_UNUSED,
5812 int ignore ATTRIBUTE_UNUSED)
5814 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5815 tree arglist = TREE_OPERAND (exp, 1);
5816 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5817 struct builtin_description *d;
5824 ret = altivec_expand_builtin (exp, target, &success);
5831 ret = spe_expand_builtin (exp, target, &success);
5837 if (TARGET_ALTIVEC || TARGET_SPE)
5839 /* Handle simple unary operations. */
5840 d = (struct builtin_description *) bdesc_1arg;
5841 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5842 if (d->code == fcode)
5843 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5845 /* Handle simple binary operations. */
5846 d = (struct builtin_description *) bdesc_2arg;
5847 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5848 if (d->code == fcode)
5849 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5851 /* Handle simple ternary operations. */
5852 d = (struct builtin_description *) bdesc_3arg;
5853 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5854 if (d->code == fcode)
5855 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5863 rs6000_init_builtins ()
5865 opaque_V2SI_type_node = copy_node (V2SI_type_node);
5866 opaque_V2SF_type_node = copy_node (V2SF_type_node);
5867 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
5870 spe_init_builtins ();
5872 altivec_init_builtins ();
5873 if (TARGET_ALTIVEC || TARGET_SPE)
5874 rs6000_common_init_builtins ();
5877 /* Search through a set of builtins and enable the mask bits.
5878 DESC is an array of builtins.
5879 SIZE is the total number of builtins.
5880 START is the builtin enum at which to start.
5881 END is the builtin enum at which to end. */
5883 enable_mask_for_builtins (struct builtin_description *desc, int size,
5884 enum rs6000_builtins start,
5885 enum rs6000_builtins end)
5889 for (i = 0; i < size; ++i)
5890 if (desc[i].code == start)
5896 for (; i < size; ++i)
5898 /* Flip all the bits on. */
5899 desc[i].mask = target_flags;
5900 if (desc[i].code == end)
5906 spe_init_builtins ()
5908 tree endlink = void_list_node;
5909 tree puint_type_node = build_pointer_type (unsigned_type_node);
5910 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5911 struct builtin_description *d;
5914 tree v2si_ftype_4_v2si
5915 = build_function_type
5916 (opaque_V2SI_type_node,
5917 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5918 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5919 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5920 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5923 tree v2sf_ftype_4_v2sf
5924 = build_function_type
5925 (opaque_V2SF_type_node,
5926 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5927 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5928 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5929 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5932 tree int_ftype_int_v2si_v2si
5933 = build_function_type
5935 tree_cons (NULL_TREE, integer_type_node,
5936 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5937 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5940 tree int_ftype_int_v2sf_v2sf
5941 = build_function_type
5943 tree_cons (NULL_TREE, integer_type_node,
5944 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5945 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5948 tree void_ftype_v2si_puint_int
5949 = build_function_type (void_type_node,
5950 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5951 tree_cons (NULL_TREE, puint_type_node,
5952 tree_cons (NULL_TREE,
5956 tree void_ftype_v2si_puint_char
5957 = build_function_type (void_type_node,
5958 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5959 tree_cons (NULL_TREE, puint_type_node,
5960 tree_cons (NULL_TREE,
5964 tree void_ftype_v2si_pv2si_int
5965 = build_function_type (void_type_node,
5966 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5967 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
5968 tree_cons (NULL_TREE,
5972 tree void_ftype_v2si_pv2si_char
5973 = build_function_type (void_type_node,
5974 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5975 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
5976 tree_cons (NULL_TREE,
5981 = build_function_type (void_type_node,
5982 tree_cons (NULL_TREE, integer_type_node, endlink));
5985 = build_function_type (integer_type_node, endlink);
5987 tree v2si_ftype_pv2si_int
5988 = build_function_type (opaque_V2SI_type_node,
5989 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
5990 tree_cons (NULL_TREE, integer_type_node,
5993 tree v2si_ftype_puint_int
5994 = build_function_type (opaque_V2SI_type_node,
5995 tree_cons (NULL_TREE, puint_type_node,
5996 tree_cons (NULL_TREE, integer_type_node,
5999 tree v2si_ftype_pushort_int
6000 = build_function_type (opaque_V2SI_type_node,
6001 tree_cons (NULL_TREE, pushort_type_node,
6002 tree_cons (NULL_TREE, integer_type_node,
6005 tree v2si_ftype_signed_char
6006 = build_function_type (opaque_V2SI_type_node,
6007 tree_cons (NULL_TREE, signed_char_type_node,
6010 /* The initialization of the simple binary and unary builtins is
6011 done in rs6000_common_init_builtins, but we have to enable the
6012 mask bits here manually because we have run out of `target_flags'
6013 bits. We really need to redesign this mask business. */
6015 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6016 ARRAY_SIZE (bdesc_2arg),
6019 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6020 ARRAY_SIZE (bdesc_1arg),
6022 SPE_BUILTIN_EVSUBFUSIAAW);
6023 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6024 ARRAY_SIZE (bdesc_spe_predicates),
6025 SPE_BUILTIN_EVCMPEQ,
6026 SPE_BUILTIN_EVFSTSTLT);
6027 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6028 ARRAY_SIZE (bdesc_spe_evsel),
6029 SPE_BUILTIN_EVSEL_CMPGTS,
6030 SPE_BUILTIN_EVSEL_FSTSTEQ);
6032 (*lang_hooks.decls.pushdecl)
6033 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6034 opaque_V2SI_type_node));
6036 /* Initialize irregular SPE builtins. */
6038 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6039 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6040 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6041 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6042 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6043 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6044 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6045 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6046 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6047 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6048 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6049 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6050 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6051 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6052 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6053 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6054 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6055 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6058 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6059 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6060 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6061 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6062 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6063 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6064 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6065 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6066 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6067 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6068 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6069 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6070 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6071 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6072 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6073 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6074 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6075 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6076 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6077 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6078 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6079 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6082 d = (struct builtin_description *) bdesc_spe_predicates;
6083 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6087 switch (insn_data[d->icode].operand[1].mode)
6090 type = int_ftype_int_v2si_v2si;
6093 type = int_ftype_int_v2sf_v2sf;
6099 def_builtin (d->mask, d->name, type, d->code);
6102 /* Evsel predicates. */
6103 d = (struct builtin_description *) bdesc_spe_evsel;
6104 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6108 switch (insn_data[d->icode].operand[1].mode)
6111 type = v2si_ftype_4_v2si;
6114 type = v2sf_ftype_4_v2sf;
6120 def_builtin (d->mask, d->name, type, d->code);
6125 altivec_init_builtins ()
6127 struct builtin_description *d;
6128 struct builtin_description_predicates *dp;
6130 tree pfloat_type_node = build_pointer_type (float_type_node);
6131 tree pint_type_node = build_pointer_type (integer_type_node);
6132 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6133 tree pchar_type_node = build_pointer_type (char_type_node);
6135 tree pvoid_type_node = build_pointer_type (void_type_node);
6137 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6138 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6139 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6140 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6142 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6144 tree int_ftype_int_v4si_v4si
6145 = build_function_type_list (integer_type_node,
6146 integer_type_node, V4SI_type_node,
6147 V4SI_type_node, NULL_TREE);
6148 tree v4sf_ftype_pcfloat
6149 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6150 tree void_ftype_pfloat_v4sf
6151 = build_function_type_list (void_type_node,
6152 pfloat_type_node, V4SF_type_node, NULL_TREE);
6153 tree v4si_ftype_pcint
6154 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6155 tree void_ftype_pint_v4si
6156 = build_function_type_list (void_type_node,
6157 pint_type_node, V4SI_type_node, NULL_TREE);
6158 tree v8hi_ftype_pcshort
6159 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6160 tree void_ftype_pshort_v8hi
6161 = build_function_type_list (void_type_node,
6162 pshort_type_node, V8HI_type_node, NULL_TREE);
6163 tree v16qi_ftype_pcchar
6164 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6165 tree void_ftype_pchar_v16qi
6166 = build_function_type_list (void_type_node,
6167 pchar_type_node, V16QI_type_node, NULL_TREE);
6168 tree void_ftype_v4si
6169 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6170 tree v8hi_ftype_void
6171 = build_function_type (V8HI_type_node, void_list_node);
6172 tree void_ftype_void
6173 = build_function_type (void_type_node, void_list_node);
6175 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6177 tree v16qi_ftype_int_pcvoid
6178 = build_function_type_list (V16QI_type_node,
6179 integer_type_node, pcvoid_type_node, NULL_TREE);
6180 tree v8hi_ftype_int_pcvoid
6181 = build_function_type_list (V8HI_type_node,
6182 integer_type_node, pcvoid_type_node, NULL_TREE);
6183 tree v4si_ftype_int_pcvoid
6184 = build_function_type_list (V4SI_type_node,
6185 integer_type_node, pcvoid_type_node, NULL_TREE);
6187 tree void_ftype_v4si_int_pvoid
6188 = build_function_type_list (void_type_node,
6189 V4SI_type_node, integer_type_node,
6190 pvoid_type_node, NULL_TREE);
6191 tree void_ftype_v16qi_int_pvoid
6192 = build_function_type_list (void_type_node,
6193 V16QI_type_node, integer_type_node,
6194 pvoid_type_node, NULL_TREE);
6195 tree void_ftype_v8hi_int_pvoid
6196 = build_function_type_list (void_type_node,
6197 V8HI_type_node, integer_type_node,
6198 pvoid_type_node, NULL_TREE);
6199 tree int_ftype_int_v8hi_v8hi
6200 = build_function_type_list (integer_type_node,
6201 integer_type_node, V8HI_type_node,
6202 V8HI_type_node, NULL_TREE);
6203 tree int_ftype_int_v16qi_v16qi
6204 = build_function_type_list (integer_type_node,
6205 integer_type_node, V16QI_type_node,
6206 V16QI_type_node, NULL_TREE);
6207 tree int_ftype_int_v4sf_v4sf
6208 = build_function_type_list (integer_type_node,
6209 integer_type_node, V4SF_type_node,
6210 V4SF_type_node, NULL_TREE);
6211 tree v4si_ftype_v4si
6212 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6213 tree v8hi_ftype_v8hi
6214 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6215 tree v16qi_ftype_v16qi
6216 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6217 tree v4sf_ftype_v4sf
6218 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6219 tree void_ftype_pcvoid_int_char
6220 = build_function_type_list (void_type_node,
6221 pcvoid_type_node, integer_type_node,
6222 char_type_node, NULL_TREE);
6224 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6225 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6226 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6227 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6228 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6229 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6230 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6231 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6232 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6233 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6234 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6235 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6236 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6237 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6238 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6239 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6240 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6241 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6242 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6243 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6244 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6245 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6246 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6247 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6248 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6249 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6250 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6251 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6252 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6253 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6254 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6255 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6257 /* Add the DST variants. */
6258 d = (struct builtin_description *) bdesc_dst;
6259 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6260 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6262 /* Initialize the predicates. */
6263 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6264 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6266 enum machine_mode mode1;
6269 mode1 = insn_data[dp->icode].operand[1].mode;
6274 type = int_ftype_int_v4si_v4si;
6277 type = int_ftype_int_v8hi_v8hi;
6280 type = int_ftype_int_v16qi_v16qi;
6283 type = int_ftype_int_v4sf_v4sf;
6289 def_builtin (dp->mask, dp->name, type, dp->code);
6292 /* Initialize the abs* operators. */
6293 d = (struct builtin_description *) bdesc_abs;
6294 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6296 enum machine_mode mode0;
6299 mode0 = insn_data[d->icode].operand[0].mode;
6304 type = v4si_ftype_v4si;
6307 type = v8hi_ftype_v8hi;
6310 type = v16qi_ftype_v16qi;
6313 type = v4sf_ftype_v4sf;
6319 def_builtin (d->mask, d->name, type, d->code);
6324 rs6000_common_init_builtins ()
6326 struct builtin_description *d;
6329 tree v4sf_ftype_v4sf_v4sf_v16qi
6330 = build_function_type_list (V4SF_type_node,
6331 V4SF_type_node, V4SF_type_node,
6332 V16QI_type_node, NULL_TREE);
6333 tree v4si_ftype_v4si_v4si_v16qi
6334 = build_function_type_list (V4SI_type_node,
6335 V4SI_type_node, V4SI_type_node,
6336 V16QI_type_node, NULL_TREE);
6337 tree v8hi_ftype_v8hi_v8hi_v16qi
6338 = build_function_type_list (V8HI_type_node,
6339 V8HI_type_node, V8HI_type_node,
6340 V16QI_type_node, NULL_TREE);
6341 tree v16qi_ftype_v16qi_v16qi_v16qi
6342 = build_function_type_list (V16QI_type_node,
6343 V16QI_type_node, V16QI_type_node,
6344 V16QI_type_node, NULL_TREE);
6345 tree v4si_ftype_char
6346 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6347 tree v8hi_ftype_char
6348 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6349 tree v16qi_ftype_char
6350 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6351 tree v8hi_ftype_v16qi
6352 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6353 tree v4sf_ftype_v4sf
6354 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6356 tree v2si_ftype_v2si_v2si
6357 = build_function_type_list (opaque_V2SI_type_node,
6358 opaque_V2SI_type_node,
6359 opaque_V2SI_type_node, NULL_TREE);
6361 tree v2sf_ftype_v2sf_v2sf
6362 = build_function_type_list (opaque_V2SF_type_node,
6363 opaque_V2SF_type_node,
6364 opaque_V2SF_type_node, NULL_TREE);
6366 tree v2si_ftype_int_int
6367 = build_function_type_list (opaque_V2SI_type_node,
6368 integer_type_node, integer_type_node,
6371 tree v2si_ftype_v2si
6372 = build_function_type_list (opaque_V2SI_type_node,
6373 opaque_V2SI_type_node, NULL_TREE);
6375 tree v2sf_ftype_v2sf
6376 = build_function_type_list (opaque_V2SF_type_node,
6377 opaque_V2SF_type_node, NULL_TREE);
6379 tree v2sf_ftype_v2si
6380 = build_function_type_list (opaque_V2SF_type_node,
6381 opaque_V2SI_type_node, NULL_TREE);
6383 tree v2si_ftype_v2sf
6384 = build_function_type_list (opaque_V2SI_type_node,
6385 opaque_V2SF_type_node, NULL_TREE);
6387 tree v2si_ftype_v2si_char
6388 = build_function_type_list (opaque_V2SI_type_node,
6389 opaque_V2SI_type_node,
6390 char_type_node, NULL_TREE);
6392 tree v2si_ftype_int_char
6393 = build_function_type_list (opaque_V2SI_type_node,
6394 integer_type_node, char_type_node, NULL_TREE);
6396 tree v2si_ftype_char
6397 = build_function_type_list (opaque_V2SI_type_node,
6398 char_type_node, NULL_TREE);
6400 tree int_ftype_int_int
6401 = build_function_type_list (integer_type_node,
6402 integer_type_node, integer_type_node,
6405 tree v4si_ftype_v4si_v4si
6406 = build_function_type_list (V4SI_type_node,
6407 V4SI_type_node, V4SI_type_node, NULL_TREE);
6408 tree v4sf_ftype_v4si_char
6409 = build_function_type_list (V4SF_type_node,
6410 V4SI_type_node, char_type_node, NULL_TREE);
6411 tree v4si_ftype_v4sf_char
6412 = build_function_type_list (V4SI_type_node,
6413 V4SF_type_node, char_type_node, NULL_TREE);
6414 tree v4si_ftype_v4si_char
6415 = build_function_type_list (V4SI_type_node,
6416 V4SI_type_node, char_type_node, NULL_TREE);
6417 tree v8hi_ftype_v8hi_char
6418 = build_function_type_list (V8HI_type_node,
6419 V8HI_type_node, char_type_node, NULL_TREE);
6420 tree v16qi_ftype_v16qi_char
6421 = build_function_type_list (V16QI_type_node,
6422 V16QI_type_node, char_type_node, NULL_TREE);
6423 tree v16qi_ftype_v16qi_v16qi_char
6424 = build_function_type_list (V16QI_type_node,
6425 V16QI_type_node, V16QI_type_node,
6426 char_type_node, NULL_TREE);
6427 tree v8hi_ftype_v8hi_v8hi_char
6428 = build_function_type_list (V8HI_type_node,
6429 V8HI_type_node, V8HI_type_node,
6430 char_type_node, NULL_TREE);
6431 tree v4si_ftype_v4si_v4si_char
6432 = build_function_type_list (V4SI_type_node,
6433 V4SI_type_node, V4SI_type_node,
6434 char_type_node, NULL_TREE);
6435 tree v4sf_ftype_v4sf_v4sf_char
6436 = build_function_type_list (V4SF_type_node,
6437 V4SF_type_node, V4SF_type_node,
6438 char_type_node, NULL_TREE);
6439 tree v4sf_ftype_v4sf_v4sf
6440 = build_function_type_list (V4SF_type_node,
6441 V4SF_type_node, V4SF_type_node, NULL_TREE);
6442 tree v4sf_ftype_v4sf_v4sf_v4si
6443 = build_function_type_list (V4SF_type_node,
6444 V4SF_type_node, V4SF_type_node,
6445 V4SI_type_node, NULL_TREE);
6446 tree v4sf_ftype_v4sf_v4sf_v4sf
6447 = build_function_type_list (V4SF_type_node,
6448 V4SF_type_node, V4SF_type_node,
6449 V4SF_type_node, NULL_TREE);
6450 tree v4si_ftype_v4si_v4si_v4si
6451 = build_function_type_list (V4SI_type_node,
6452 V4SI_type_node, V4SI_type_node,
6453 V4SI_type_node, NULL_TREE);
6454 tree v8hi_ftype_v8hi_v8hi
6455 = build_function_type_list (V8HI_type_node,
6456 V8HI_type_node, V8HI_type_node, NULL_TREE);
6457 tree v8hi_ftype_v8hi_v8hi_v8hi
6458 = build_function_type_list (V8HI_type_node,
6459 V8HI_type_node, V8HI_type_node,
6460 V8HI_type_node, NULL_TREE);
6461 tree v4si_ftype_v8hi_v8hi_v4si
6462 = build_function_type_list (V4SI_type_node,
6463 V8HI_type_node, V8HI_type_node,
6464 V4SI_type_node, NULL_TREE);
6465 tree v4si_ftype_v16qi_v16qi_v4si
6466 = build_function_type_list (V4SI_type_node,
6467 V16QI_type_node, V16QI_type_node,
6468 V4SI_type_node, NULL_TREE);
6469 tree v16qi_ftype_v16qi_v16qi
6470 = build_function_type_list (V16QI_type_node,
6471 V16QI_type_node, V16QI_type_node, NULL_TREE);
6472 tree v4si_ftype_v4sf_v4sf
6473 = build_function_type_list (V4SI_type_node,
6474 V4SF_type_node, V4SF_type_node, NULL_TREE);
6475 tree v8hi_ftype_v16qi_v16qi
6476 = build_function_type_list (V8HI_type_node,
6477 V16QI_type_node, V16QI_type_node, NULL_TREE);
6478 tree v4si_ftype_v8hi_v8hi
6479 = build_function_type_list (V4SI_type_node,
6480 V8HI_type_node, V8HI_type_node, NULL_TREE);
6481 tree v8hi_ftype_v4si_v4si
6482 = build_function_type_list (V8HI_type_node,
6483 V4SI_type_node, V4SI_type_node, NULL_TREE);
6484 tree v16qi_ftype_v8hi_v8hi
6485 = build_function_type_list (V16QI_type_node,
6486 V8HI_type_node, V8HI_type_node, NULL_TREE);
6487 tree v4si_ftype_v16qi_v4si
6488 = build_function_type_list (V4SI_type_node,
6489 V16QI_type_node, V4SI_type_node, NULL_TREE);
6490 tree v4si_ftype_v16qi_v16qi
6491 = build_function_type_list (V4SI_type_node,
6492 V16QI_type_node, V16QI_type_node, NULL_TREE);
6493 tree v4si_ftype_v8hi_v4si
6494 = build_function_type_list (V4SI_type_node,
6495 V8HI_type_node, V4SI_type_node, NULL_TREE);
6496 tree v4si_ftype_v8hi
6497 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6498 tree int_ftype_v4si_v4si
6499 = build_function_type_list (integer_type_node,
6500 V4SI_type_node, V4SI_type_node, NULL_TREE);
6501 tree int_ftype_v4sf_v4sf
6502 = build_function_type_list (integer_type_node,
6503 V4SF_type_node, V4SF_type_node, NULL_TREE);
6504 tree int_ftype_v16qi_v16qi
6505 = build_function_type_list (integer_type_node,
6506 V16QI_type_node, V16QI_type_node, NULL_TREE);
6507 tree int_ftype_v8hi_v8hi
6508 = build_function_type_list (integer_type_node,
6509 V8HI_type_node, V8HI_type_node, NULL_TREE);
6511 /* Add the simple ternary operators. */
6512 d = (struct builtin_description *) bdesc_3arg;
6513 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6516 enum machine_mode mode0, mode1, mode2, mode3;
6519 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6522 mode0 = insn_data[d->icode].operand[0].mode;
6523 mode1 = insn_data[d->icode].operand[1].mode;
6524 mode2 = insn_data[d->icode].operand[2].mode;
6525 mode3 = insn_data[d->icode].operand[3].mode;
6527 /* When all four are of the same mode. */
6528 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6533 type = v4si_ftype_v4si_v4si_v4si;
6536 type = v4sf_ftype_v4sf_v4sf_v4sf;
6539 type = v8hi_ftype_v8hi_v8hi_v8hi;
6542 type = v16qi_ftype_v16qi_v16qi_v16qi;
6548 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6553 type = v4si_ftype_v4si_v4si_v16qi;
6556 type = v4sf_ftype_v4sf_v4sf_v16qi;
6559 type = v8hi_ftype_v8hi_v8hi_v16qi;
6562 type = v16qi_ftype_v16qi_v16qi_v16qi;
6568 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6569 && mode3 == V4SImode)
6570 type = v4si_ftype_v16qi_v16qi_v4si;
6571 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6572 && mode3 == V4SImode)
6573 type = v4si_ftype_v8hi_v8hi_v4si;
6574 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6575 && mode3 == V4SImode)
6576 type = v4sf_ftype_v4sf_v4sf_v4si;
6578 /* vchar, vchar, vchar, 4 bit literal. */
6579 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6581 type = v16qi_ftype_v16qi_v16qi_char;
6583 /* vshort, vshort, vshort, 4 bit literal. */
6584 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6586 type = v8hi_ftype_v8hi_v8hi_char;
6588 /* vint, vint, vint, 4 bit literal. */
6589 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6591 type = v4si_ftype_v4si_v4si_char;
6593 /* vfloat, vfloat, vfloat, 4 bit literal. */
6594 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6596 type = v4sf_ftype_v4sf_v4sf_char;
6601 def_builtin (d->mask, d->name, type, d->code);
6604 /* Add the simple binary operators. */
6605 d = (struct builtin_description *) bdesc_2arg;
6606 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6608 enum machine_mode mode0, mode1, mode2;
6611 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6614 mode0 = insn_data[d->icode].operand[0].mode;
6615 mode1 = insn_data[d->icode].operand[1].mode;
6616 mode2 = insn_data[d->icode].operand[2].mode;
6618 /* When all three operands are of the same mode. */
6619 if (mode0 == mode1 && mode1 == mode2)
6624 type = v4sf_ftype_v4sf_v4sf;
6627 type = v4si_ftype_v4si_v4si;
6630 type = v16qi_ftype_v16qi_v16qi;
6633 type = v8hi_ftype_v8hi_v8hi;
6636 type = v2si_ftype_v2si_v2si;
6639 type = v2sf_ftype_v2sf_v2sf;
6642 type = int_ftype_int_int;
6649 /* A few other combos we really don't want to do manually. */
6651 /* vint, vfloat, vfloat. */
6652 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6653 type = v4si_ftype_v4sf_v4sf;
6655 /* vshort, vchar, vchar. */
6656 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6657 type = v8hi_ftype_v16qi_v16qi;
6659 /* vint, vshort, vshort. */
6660 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6661 type = v4si_ftype_v8hi_v8hi;
6663 /* vshort, vint, vint. */
6664 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6665 type = v8hi_ftype_v4si_v4si;
6667 /* vchar, vshort, vshort. */
6668 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6669 type = v16qi_ftype_v8hi_v8hi;
6671 /* vint, vchar, vint. */
6672 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6673 type = v4si_ftype_v16qi_v4si;
6675 /* vint, vchar, vchar. */
6676 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6677 type = v4si_ftype_v16qi_v16qi;
6679 /* vint, vshort, vint. */
6680 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6681 type = v4si_ftype_v8hi_v4si;
6683 /* vint, vint, 5 bit literal. */
6684 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6685 type = v4si_ftype_v4si_char;
6687 /* vshort, vshort, 5 bit literal. */
6688 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6689 type = v8hi_ftype_v8hi_char;
6691 /* vchar, vchar, 5 bit literal. */
6692 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6693 type = v16qi_ftype_v16qi_char;
6695 /* vfloat, vint, 5 bit literal. */
6696 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6697 type = v4sf_ftype_v4si_char;
6699 /* vint, vfloat, 5 bit literal. */
6700 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6701 type = v4si_ftype_v4sf_char;
6703 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6704 type = v2si_ftype_int_int;
6706 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6707 type = v2si_ftype_v2si_char;
6709 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6710 type = v2si_ftype_int_char;
6713 else if (mode0 == SImode)
6718 type = int_ftype_v4si_v4si;
6721 type = int_ftype_v4sf_v4sf;
6724 type = int_ftype_v16qi_v16qi;
6727 type = int_ftype_v8hi_v8hi;
6737 def_builtin (d->mask, d->name, type, d->code);
6740 /* Add the simple unary operators. */
6741 d = (struct builtin_description *) bdesc_1arg;
6742 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6744 enum machine_mode mode0, mode1;
6747 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6750 mode0 = insn_data[d->icode].operand[0].mode;
6751 mode1 = insn_data[d->icode].operand[1].mode;
6753 if (mode0 == V4SImode && mode1 == QImode)
6754 type = v4si_ftype_char;
6755 else if (mode0 == V8HImode && mode1 == QImode)
6756 type = v8hi_ftype_char;
6757 else if (mode0 == V16QImode && mode1 == QImode)
6758 type = v16qi_ftype_char;
6759 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6760 type = v4sf_ftype_v4sf;
6761 else if (mode0 == V8HImode && mode1 == V16QImode)
6762 type = v8hi_ftype_v16qi;
6763 else if (mode0 == V4SImode && mode1 == V8HImode)
6764 type = v4si_ftype_v8hi;
6765 else if (mode0 == V2SImode && mode1 == V2SImode)
6766 type = v2si_ftype_v2si;
6767 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6768 type = v2sf_ftype_v2sf;
6769 else if (mode0 == V2SFmode && mode1 == V2SImode)
6770 type = v2sf_ftype_v2si;
6771 else if (mode0 == V2SImode && mode1 == V2SFmode)
6772 type = v2si_ftype_v2sf;
6773 else if (mode0 == V2SImode && mode1 == QImode)
6774 type = v2si_ftype_char;
6778 def_builtin (d->mask, d->name, type, d->code);
6783 /* Expand a block move operation, and return 1 if successful. Return 0
6784 if we should let the compiler generate normal code.
6786 operands[0] is the destination
6787 operands[1] is the source
6788 operands[2] is the length
6789 operands[3] is the alignment */
6791 #define MAX_MOVE_REG 4
6794 expand_block_move (rtx operands[])
6796 rtx orig_dest = operands[0];
6797 rtx orig_src = operands[1];
6798 rtx bytes_rtx = operands[2];
6799 rtx align_rtx = operands[3];
6800 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6805 rtx stores[MAX_MOVE_REG];
6808 /* If this is not a fixed size move, just call memcpy */
6812 /* If this is not a fixed size alignment, abort */
6813 if (GET_CODE (align_rtx) != CONST_INT)
6815 align = INTVAL (align_rtx);
6817 /* Anything to move? */
6818 bytes = INTVAL (bytes_rtx);
6822 /* store_one_arg depends on expand_block_move to handle at least the size of
6823 reg_parm_stack_space. */
6824 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6827 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6830 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
6831 rtx (*mov) (rtx, rtx);
6833 enum machine_mode mode = BLKmode;
6837 && bytes > 24 /* move up to 32 bytes at a time */
6845 && ! fixed_regs[12])
6847 move_bytes = (bytes > 32) ? 32 : bytes;
6848 gen_func.movstrsi = gen_movstrsi_8reg;
6850 else if (TARGET_STRING
6851 && bytes > 16 /* move up to 24 bytes at a time */
6857 && ! fixed_regs[10])
6859 move_bytes = (bytes > 24) ? 24 : bytes;
6860 gen_func.movstrsi = gen_movstrsi_6reg;
6862 else if (TARGET_STRING
6863 && bytes > 8 /* move up to 16 bytes at a time */
6869 move_bytes = (bytes > 16) ? 16 : bytes;
6870 gen_func.movstrsi = gen_movstrsi_4reg;
6872 else if (bytes >= 8 && TARGET_POWERPC64
6873 /* 64-bit loads and stores require word-aligned
6875 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6879 gen_func.mov = gen_movdi;
6881 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
6882 { /* move up to 8 bytes at a time */
6883 move_bytes = (bytes > 8) ? 8 : bytes;
6884 gen_func.movstrsi = gen_movstrsi_2reg;
6886 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6887 { /* move 4 bytes */
6890 gen_func.mov = gen_movsi;
6892 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6893 { /* move 2 bytes */
6896 gen_func.mov = gen_movhi;
6898 else if (TARGET_STRING && bytes > 1)
6899 { /* move up to 4 bytes at a time */
6900 move_bytes = (bytes > 4) ? 4 : bytes;
6901 gen_func.movstrsi = gen_movstrsi_1reg;
6903 else /* move 1 byte at a time */
6907 gen_func.mov = gen_movqi;
6910 src = adjust_address (orig_src, mode, offset);
6911 dest = adjust_address (orig_dest, mode, offset);
6913 if (mode != BLKmode)
6915 rtx tmp_reg = gen_reg_rtx (mode);
6917 emit_insn ((*gen_func.mov) (tmp_reg, src));
6918 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
6921 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
6924 for (i = 0; i < num_reg; i++)
6925 emit_insn (stores[i]);
6929 if (mode == BLKmode)
6931 /* Move the address into scratch registers. The movstrsi
6932 patterns require zero offset. */
6933 if (!REG_P (XEXP (src, 0)))
6935 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6936 src = replace_equiv_address (src, src_reg);
6938 set_mem_size (src, GEN_INT (move_bytes));
6940 if (!REG_P (XEXP (dest, 0)))
6942 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6943 dest = replace_equiv_address (dest, dest_reg);
6945 set_mem_size (dest, GEN_INT (move_bytes));
6947 emit_insn ((*gen_func.movstrsi) (dest, src,
6948 GEN_INT (move_bytes & 31),
6957 /* Return 1 if OP is a load multiple operation. It is known to be a
6958 PARALLEL and the first section will be tested. */
6961 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
6963 int count = XVECLEN (op, 0);
6964 unsigned int dest_regno;
6968 /* Perform a quick check so we don't blow up below. */
6970 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6971 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6972 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6975 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6976 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6978 for (i = 1; i < count; i++)
6980 rtx elt = XVECEXP (op, 0, i);
6982 if (GET_CODE (elt) != SET
6983 || GET_CODE (SET_DEST (elt)) != REG
6984 || GET_MODE (SET_DEST (elt)) != SImode
6985 || REGNO (SET_DEST (elt)) != dest_regno + i
6986 || GET_CODE (SET_SRC (elt)) != MEM
6987 || GET_MODE (SET_SRC (elt)) != SImode
6988 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6989 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6990 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6991 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6998 /* Similar, but tests for store multiple. Here, the second vector element
6999 is a CLOBBER. It will be tested later. */
7002 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7004 int count = XVECLEN (op, 0) - 1;
7005 unsigned int src_regno;
7009 /* Perform a quick check so we don't blow up below. */
7011 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7012 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7013 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7016 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7017 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7019 for (i = 1; i < count; i++)
7021 rtx elt = XVECEXP (op, 0, i + 1);
7023 if (GET_CODE (elt) != SET
7024 || GET_CODE (SET_SRC (elt)) != REG
7025 || GET_MODE (SET_SRC (elt)) != SImode
7026 || REGNO (SET_SRC (elt)) != src_regno + i
7027 || GET_CODE (SET_DEST (elt)) != MEM
7028 || GET_MODE (SET_DEST (elt)) != SImode
7029 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7030 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7031 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7032 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7039 /* Return a string to perform a load_multiple operation.
7040 operands[0] is the vector.
7041 operands[1] is the source address.
7042 operands[2] is the first destination register. */
7045 rs6000_output_load_multiple (rtx operands[3])
7047 /* We have to handle the case where the pseudo used to contain the address
7048 is assigned to one of the output registers. */
7050 int words = XVECLEN (operands[0], 0);
7053 if (XVECLEN (operands[0], 0) == 1)
7054 return "{l|lwz} %2,0(%1)";
7056 for (i = 0; i < words; i++)
7057 if (refers_to_regno_p (REGNO (operands[2]) + i,
7058 REGNO (operands[2]) + i + 1, operands[1], 0))
7062 xop[0] = GEN_INT (4 * (words-1));
7063 xop[1] = operands[1];
7064 xop[2] = operands[2];
7065 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7070 xop[0] = GEN_INT (4 * (words-1));
7071 xop[1] = operands[1];
7072 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7073 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7078 for (j = 0; j < words; j++)
7081 xop[0] = GEN_INT (j * 4);
7082 xop[1] = operands[1];
7083 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7084 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7086 xop[0] = GEN_INT (i * 4);
7087 xop[1] = operands[1];
7088 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7093 return "{lsi|lswi} %2,%1,%N0";
7096 /* Return 1 for a parallel vrsave operation. */
7099 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7101 int count = XVECLEN (op, 0);
7102 unsigned int dest_regno, src_regno;
7106 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7107 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7108 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7111 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7112 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7114 if (dest_regno != VRSAVE_REGNO
7115 && src_regno != VRSAVE_REGNO)
7118 for (i = 1; i < count; i++)
7120 rtx elt = XVECEXP (op, 0, i);
7122 if (GET_CODE (elt) != CLOBBER
7123 && GET_CODE (elt) != SET)
7130 /* Return 1 for an PARALLEL suitable for mfcr. */
7133 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7135 int count = XVECLEN (op, 0);
7138 /* Perform a quick check so we don't blow up below. */
7140 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7141 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7142 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7145 for (i = 0; i < count; i++)
7147 rtx exp = XVECEXP (op, 0, i);
7152 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7154 if (GET_CODE (src_reg) != REG
7155 || GET_MODE (src_reg) != CCmode
7156 || ! CR_REGNO_P (REGNO (src_reg)))
7159 if (GET_CODE (exp) != SET
7160 || GET_CODE (SET_DEST (exp)) != REG
7161 || GET_MODE (SET_DEST (exp)) != SImode
7162 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7164 unspec = SET_SRC (exp);
7165 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7167 if (GET_CODE (unspec) != UNSPEC
7168 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7169 || XVECLEN (unspec, 0) != 2
7170 || XVECEXP (unspec, 0, 0) != src_reg
7171 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7172 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7178 /* Return 1 for an PARALLEL suitable for mtcrf. */
7181 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7183 int count = XVECLEN (op, 0);
7187 /* Perform a quick check so we don't blow up below. */
7189 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7190 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7191 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7193 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7195 if (GET_CODE (src_reg) != REG
7196 || GET_MODE (src_reg) != SImode
7197 || ! INT_REGNO_P (REGNO (src_reg)))
7200 for (i = 0; i < count; i++)
7202 rtx exp = XVECEXP (op, 0, i);
7206 if (GET_CODE (exp) != SET
7207 || GET_CODE (SET_DEST (exp)) != REG
7208 || GET_MODE (SET_DEST (exp)) != CCmode
7209 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7211 unspec = SET_SRC (exp);
7212 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7214 if (GET_CODE (unspec) != UNSPEC
7215 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7216 || XVECLEN (unspec, 0) != 2
7217 || XVECEXP (unspec, 0, 0) != src_reg
7218 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7219 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7225 /* Return 1 for an PARALLEL suitable for lmw. */
7228 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7230 int count = XVECLEN (op, 0);
7231 unsigned int dest_regno;
7233 unsigned int base_regno;
7234 HOST_WIDE_INT offset;
7237 /* Perform a quick check so we don't blow up below. */
7239 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7240 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7241 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7244 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7245 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7248 || count != 32 - (int) dest_regno)
7251 if (legitimate_indirect_address_p (src_addr, 0))
7254 base_regno = REGNO (src_addr);
7255 if (base_regno == 0)
7258 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7260 offset = INTVAL (XEXP (src_addr, 1));
7261 base_regno = REGNO (XEXP (src_addr, 0));
7266 for (i = 0; i < count; i++)
7268 rtx elt = XVECEXP (op, 0, i);
7271 HOST_WIDE_INT newoffset;
7273 if (GET_CODE (elt) != SET
7274 || GET_CODE (SET_DEST (elt)) != REG
7275 || GET_MODE (SET_DEST (elt)) != SImode
7276 || REGNO (SET_DEST (elt)) != dest_regno + i
7277 || GET_CODE (SET_SRC (elt)) != MEM
7278 || GET_MODE (SET_SRC (elt)) != SImode)
7280 newaddr = XEXP (SET_SRC (elt), 0);
7281 if (legitimate_indirect_address_p (newaddr, 0))
7286 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7288 addr_reg = XEXP (newaddr, 0);
7289 newoffset = INTVAL (XEXP (newaddr, 1));
7293 if (REGNO (addr_reg) != base_regno
7294 || newoffset != offset + 4 * i)
7301 /* Return 1 for an PARALLEL suitable for stmw. */
7304 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7306 int count = XVECLEN (op, 0);
7307 unsigned int src_regno;
7309 unsigned int base_regno;
7310 HOST_WIDE_INT offset;
7313 /* Perform a quick check so we don't blow up below. */
7315 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7316 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7317 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7320 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7321 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7324 || count != 32 - (int) src_regno)
7327 if (legitimate_indirect_address_p (dest_addr, 0))
7330 base_regno = REGNO (dest_addr);
7331 if (base_regno == 0)
7334 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7336 offset = INTVAL (XEXP (dest_addr, 1));
7337 base_regno = REGNO (XEXP (dest_addr, 0));
7342 for (i = 0; i < count; i++)
7344 rtx elt = XVECEXP (op, 0, i);
7347 HOST_WIDE_INT newoffset;
7349 if (GET_CODE (elt) != SET
7350 || GET_CODE (SET_SRC (elt)) != REG
7351 || GET_MODE (SET_SRC (elt)) != SImode
7352 || REGNO (SET_SRC (elt)) != src_regno + i
7353 || GET_CODE (SET_DEST (elt)) != MEM
7354 || GET_MODE (SET_DEST (elt)) != SImode)
7356 newaddr = XEXP (SET_DEST (elt), 0);
7357 if (legitimate_indirect_address_p (newaddr, 0))
7362 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7364 addr_reg = XEXP (newaddr, 0);
7365 newoffset = INTVAL (XEXP (newaddr, 1));
7369 if (REGNO (addr_reg) != base_regno
7370 || newoffset != offset + 4 * i)
7377 /* A validation routine: say whether CODE, a condition code, and MODE
7378 match. The other alternatives either don't make sense or should
7379 never be generated. */
7382 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
7384 if (GET_RTX_CLASS (code) != '<'
7385 || GET_MODE_CLASS (mode) != MODE_CC)
7388 /* These don't make sense. */
7389 if ((code == GT || code == LT || code == GE || code == LE)
7390 && mode == CCUNSmode)
7393 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7394 && mode != CCUNSmode)
7397 if (mode != CCFPmode
7398 && (code == ORDERED || code == UNORDERED
7399 || code == UNEQ || code == LTGT
7400 || code == UNGT || code == UNLT
7401 || code == UNGE || code == UNLE))
7404 /* These should never be generated except for
7405 flag_finite_math_only. */
7406 if (mode == CCFPmode
7407 && ! flag_finite_math_only
7408 && (code == LE || code == GE
7409 || code == UNEQ || code == LTGT
7410 || code == UNGT || code == UNLT))
7413 /* These are invalid; the information is not there. */
7414 if (mode == CCEQmode
7415 && code != EQ && code != NE)
7419 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7420 We only check the opcode against the mode of the CC value here. */
7423 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7425 enum rtx_code code = GET_CODE (op);
7426 enum machine_mode cc_mode;
7428 if (GET_RTX_CLASS (code) != '<')
7431 cc_mode = GET_MODE (XEXP (op, 0));
7432 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7435 validate_condition_mode (code, cc_mode);
7440 /* Return 1 if OP is a comparison operation that is valid for a branch
7441 insn and which is true if the corresponding bit in the CC register
7445 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
7449 if (! branch_comparison_operator (op, mode))
7452 code = GET_CODE (op);
7453 return (code == EQ || code == LT || code == GT
7454 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7455 || code == LTU || code == GTU
7456 || code == UNORDERED);
7459 /* Return 1 if OP is a comparison operation that is valid for an scc
7460 insn: it must be a positive comparison. */
7463 scc_comparison_operator (rtx op, enum machine_mode mode)
7465 return branch_positive_comparison_operator (op, mode);
7469 trap_comparison_operator (rtx op, enum machine_mode mode)
7471 if (mode != VOIDmode && mode != GET_MODE (op))
7473 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7477 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7479 enum rtx_code code = GET_CODE (op);
7480 return (code == AND || code == IOR || code == XOR);
7484 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7486 enum rtx_code code = GET_CODE (op);
7487 return (code == IOR || code == XOR);
7491 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7493 enum rtx_code code = GET_CODE (op);
7494 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7497 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7498 mask required to convert the result of a rotate insn into a shift
7499 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7502 includes_lshift_p (rtx shiftop, rtx andop)
7504 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7506 shift_mask <<= INTVAL (shiftop);
7508 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7511 /* Similar, but for right shift. */
7514 includes_rshift_p (rtx shiftop, rtx andop)
7516 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7518 shift_mask >>= INTVAL (shiftop);
7520 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7523 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7524 to perform a left shift. It must have exactly SHIFTOP least
7525 significant 0's, then one or more 1's, then zero or more 0's. */
7528 includes_rldic_lshift_p (rtx shiftop, rtx andop)
7530 if (GET_CODE (andop) == CONST_INT)
7532 HOST_WIDE_INT c, lsb, shift_mask;
7535 if (c == 0 || c == ~0)
7539 shift_mask <<= INTVAL (shiftop);
7541 /* Find the least significant one bit. */
7544 /* It must coincide with the LSB of the shift mask. */
7545 if (-lsb != shift_mask)
7548 /* Invert to look for the next transition (if any). */
7551 /* Remove the low group of ones (originally low group of zeros). */
7554 /* Again find the lsb, and check we have all 1's above. */
7558 else if (GET_CODE (andop) == CONST_DOUBLE
7559 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7561 HOST_WIDE_INT low, high, lsb;
7562 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7564 low = CONST_DOUBLE_LOW (andop);
7565 if (HOST_BITS_PER_WIDE_INT < 64)
7566 high = CONST_DOUBLE_HIGH (andop);
7568 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7569 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7572 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7574 shift_mask_high = ~0;
7575 if (INTVAL (shiftop) > 32)
7576 shift_mask_high <<= INTVAL (shiftop) - 32;
7580 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7587 return high == -lsb;
7590 shift_mask_low = ~0;
7591 shift_mask_low <<= INTVAL (shiftop);
7595 if (-lsb != shift_mask_low)
7598 if (HOST_BITS_PER_WIDE_INT < 64)
7603 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7606 return high == -lsb;
7610 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7616 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7617 to perform a left shift. It must have SHIFTOP or more least
7618 significant 0's, with the remainder of the word 1's. */
7621 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
7623 if (GET_CODE (andop) == CONST_INT)
7625 HOST_WIDE_INT c, lsb, shift_mask;
7628 shift_mask <<= INTVAL (shiftop);
7631 /* Find the least significant one bit. */
7634 /* It must be covered by the shift mask.
7635 This test also rejects c == 0. */
7636 if ((lsb & shift_mask) == 0)
7639 /* Check we have all 1's above the transition, and reject all 1's. */
7640 return c == -lsb && lsb != 1;
7642 else if (GET_CODE (andop) == CONST_DOUBLE
7643 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7645 HOST_WIDE_INT low, lsb, shift_mask_low;
7647 low = CONST_DOUBLE_LOW (andop);
7649 if (HOST_BITS_PER_WIDE_INT < 64)
7651 HOST_WIDE_INT high, shift_mask_high;
7653 high = CONST_DOUBLE_HIGH (andop);
7657 shift_mask_high = ~0;
7658 if (INTVAL (shiftop) > 32)
7659 shift_mask_high <<= INTVAL (shiftop) - 32;
7663 if ((lsb & shift_mask_high) == 0)
7666 return high == -lsb;
7672 shift_mask_low = ~0;
7673 shift_mask_low <<= INTVAL (shiftop);
7677 if ((lsb & shift_mask_low) == 0)
7680 return low == -lsb && lsb != 1;
7686 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7687 for lfq and stfq insns.
7689 Note reg1 and reg2 *must* be hard registers. To be sure we will
7690 abort if we are passed pseudo registers. */
7693 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
7695 /* We might have been passed a SUBREG. */
7696 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7699 return (REGNO (reg1) == REGNO (reg2) - 1);
7702 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7703 addr1 and addr2 must be in consecutive memory locations
7704 (addr2 == addr1 + 8). */
7707 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
7712 /* Extract an offset (if used) from the first addr. */
7713 if (GET_CODE (addr1) == PLUS)
7715 /* If not a REG, return zero. */
7716 if (GET_CODE (XEXP (addr1, 0)) != REG)
7720 reg1 = REGNO (XEXP (addr1, 0));
7721 /* The offset must be constant! */
7722 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7724 offset1 = INTVAL (XEXP (addr1, 1));
7727 else if (GET_CODE (addr1) != REG)
7731 reg1 = REGNO (addr1);
7732 /* This was a simple (mem (reg)) expression. Offset is 0. */
7736 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7737 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7738 register as addr1. */
7739 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7741 if (GET_CODE (addr2) != PLUS)
7744 if (GET_CODE (XEXP (addr2, 0)) != REG
7745 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7748 if (reg1 != REGNO (XEXP (addr2, 0)))
7751 /* The offset for the second addr must be 8 more than the first addr. */
7752 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7755 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7760 /* Return the register class of a scratch register needed to copy IN into
7761 or out of a register in CLASS in MODE. If it can be done directly,
7762 NO_REGS is returned. */
7765 secondary_reload_class (enum reg_class class,
7766 enum machine_mode mode ATTRIBUTE_UNUSED, rtx in)
7770 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7772 && MACHOPIC_INDIRECT
7776 /* We cannot copy a symbolic operand directly into anything
7777 other than BASE_REGS for TARGET_ELF. So indicate that a
7778 register from BASE_REGS is needed as an intermediate
7781 On Darwin, pic addresses require a load from memory, which
7782 needs a base register. */
7783 if (class != BASE_REGS
7784 && (GET_CODE (in) == SYMBOL_REF
7785 || GET_CODE (in) == HIGH
7786 || GET_CODE (in) == LABEL_REF
7787 || GET_CODE (in) == CONST))
7791 if (GET_CODE (in) == REG)
7794 if (regno >= FIRST_PSEUDO_REGISTER)
7796 regno = true_regnum (in);
7797 if (regno >= FIRST_PSEUDO_REGISTER)
7801 else if (GET_CODE (in) == SUBREG)
7803 regno = true_regnum (in);
7804 if (regno >= FIRST_PSEUDO_REGISTER)
7810 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7812 if (class == GENERAL_REGS || class == BASE_REGS
7813 || (regno >= 0 && INT_REGNO_P (regno)))
7816 /* Constants, memory, and FP registers can go into FP registers. */
7817 if ((regno == -1 || FP_REGNO_P (regno))
7818 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7821 /* Memory, and AltiVec registers can go into AltiVec registers. */
7822 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7823 && class == ALTIVEC_REGS)
7826 /* We can copy among the CR registers. */
7827 if ((class == CR_REGS || class == CR0_REGS)
7828 && regno >= 0 && CR_REGNO_P (regno))
7831 /* Otherwise, we need GENERAL_REGS. */
7832 return GENERAL_REGS;
7835 /* Given a comparison operation, return the bit number in CCR to test. We
7836 know this is a valid comparison.
7838 SCC_P is 1 if this is for an scc. That means that %D will have been
7839 used instead of %C, so the bits will be in different places.
7841 Return -1 if OP isn't a valid comparison for some reason. */
7844 ccr_bit (rtx op, int scc_p)
7846 enum rtx_code code = GET_CODE (op);
7847 enum machine_mode cc_mode;
7852 if (GET_RTX_CLASS (code) != '<')
7857 if (GET_CODE (reg) != REG
7858 || ! CR_REGNO_P (REGNO (reg)))
7861 cc_mode = GET_MODE (reg);
7862 cc_regnum = REGNO (reg);
7863 base_bit = 4 * (cc_regnum - CR0_REGNO);
7865 validate_condition_mode (code, cc_mode);
7867 /* When generating a sCOND operation, only positive conditions are
7869 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
7870 && code != GTU && code != LTU)
7876 if (TARGET_E500 && !TARGET_FPRS
7877 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7878 return base_bit + 1;
7879 return scc_p ? base_bit + 3 : base_bit + 2;
7881 if (TARGET_E500 && !TARGET_FPRS
7882 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7883 return base_bit + 1;
7884 return base_bit + 2;
7885 case GT: case GTU: case UNLE:
7886 return base_bit + 1;
7887 case LT: case LTU: case UNGE:
7889 case ORDERED: case UNORDERED:
7890 return base_bit + 3;
7893 /* If scc, we will have done a cror to put the bit in the
7894 unordered position. So test that bit. For integer, this is ! LT
7895 unless this is an scc insn. */
7896 return scc_p ? base_bit + 3 : base_bit;
7899 return scc_p ? base_bit + 3 : base_bit + 1;
7906 /* Return the GOT register. */
7909 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
7911 /* The second flow pass currently (June 1999) can't update
7912 regs_ever_live without disturbing other parts of the compiler, so
7913 update it here to make the prolog/epilogue code happy. */
7914 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7915 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7917 current_function_uses_pic_offset_table = 1;
7919 return pic_offset_table_rtx;
7922 /* Function to init struct machine_function.
7923 This will be called, via a pointer variable,
7924 from push_function_context. */
7926 static struct machine_function *
7927 rs6000_init_machine_status ()
7929 return ggc_alloc_cleared (sizeof (machine_function));
7932 /* These macros test for integers and extract the low-order bits. */
7934 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7935 && GET_MODE (X) == VOIDmode)
7937 #define INT_LOWPART(X) \
7938 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7944 unsigned long val = INT_LOWPART (op);
7946 /* If the high bit is zero, the value is the first 1 bit we find
7948 if ((val & 0x80000000) == 0)
7950 if ((val & 0xffffffff) == 0)
7954 while (((val <<= 1) & 0x80000000) == 0)
7959 /* If the high bit is set and the low bit is not, or the mask is all
7960 1's, the value is zero. */
7961 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7964 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7967 while (((val >>= 1) & 1) != 0)
7977 unsigned long val = INT_LOWPART (op);
7979 /* If the low bit is zero, the value is the first 1 bit we find from
7983 if ((val & 0xffffffff) == 0)
7987 while (((val >>= 1) & 1) == 0)
7993 /* If the low bit is set and the high bit is not, or the mask is all
7994 1's, the value is 31. */
7995 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7998 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8001 while (((val <<= 1) & 0x80000000) != 0)
8007 /* Locate some local-dynamic symbol still in use by this function
8008 so that we can print its name in some tls_ld pattern. */
8011 rs6000_get_some_local_dynamic_name ()
8015 if (cfun->machine->some_ld_name)
8016 return cfun->machine->some_ld_name;
8018 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8020 && for_each_rtx (&PATTERN (insn),
8021 rs6000_get_some_local_dynamic_name_1, 0))
8022 return cfun->machine->some_ld_name;
8027 /* Helper function for rs6000_get_some_local_dynamic_name. */
8030 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8034 if (GET_CODE (x) == SYMBOL_REF)
8036 const char *str = XSTR (x, 0);
8037 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8039 cfun->machine->some_ld_name = str;
8047 /* Print an operand. Recognize special options, documented below. */
8050 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8051 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8053 #define SMALL_DATA_RELOC "sda21"
8054 #define SMALL_DATA_REG 0
8058 print_operand (FILE *file, rtx x, int code)
8062 unsigned HOST_WIDE_INT uval;
8067 /* Write out an instruction after the call which may be replaced
8068 with glue code by the loader. This depends on the AIX version. */
8069 asm_fprintf (file, RS6000_CALL_GLUE);
8072 /* %a is output_address. */
8075 /* If X is a constant integer whose low-order 5 bits are zero,
8076 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8077 in the AIX assembler where "sri" with a zero shift count
8078 writes a trash instruction. */
8079 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8086 /* If constant, low-order 16 bits of constant, unsigned.
8087 Otherwise, write normally. */
8089 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8091 print_operand (file, x, 0);
8095 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8096 for 64-bit mask direction. */
8097 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8100 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8104 /* X is a CR register. Print the number of the EQ bit of the CR */
8105 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8106 output_operand_lossage ("invalid %%E value");
8108 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8112 /* X is a CR register. Print the shift count needed to move it
8113 to the high-order four bits. */
8114 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8115 output_operand_lossage ("invalid %%f value");
8117 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8121 /* Similar, but print the count for the rotate in the opposite
8123 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8124 output_operand_lossage ("invalid %%F value");
8126 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8130 /* X is a constant integer. If it is negative, print "m",
8131 otherwise print "z". This is to make an aze or ame insn. */
8132 if (GET_CODE (x) != CONST_INT)
8133 output_operand_lossage ("invalid %%G value");
8134 else if (INTVAL (x) >= 0)
8141 /* If constant, output low-order five bits. Otherwise, write
8144 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8146 print_operand (file, x, 0);
8150 /* If constant, output low-order six bits. Otherwise, write
8153 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8155 print_operand (file, x, 0);
8159 /* Print `i' if this is a constant, else nothing. */
8165 /* Write the bit number in CCR for jump. */
8168 output_operand_lossage ("invalid %%j code");
8170 fprintf (file, "%d", i);
8174 /* Similar, but add one for shift count in rlinm for scc and pass
8175 scc flag to `ccr_bit'. */
8178 output_operand_lossage ("invalid %%J code");
8180 /* If we want bit 31, write a shift count of zero, not 32. */
8181 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8185 /* X must be a constant. Write the 1's complement of the
8188 output_operand_lossage ("invalid %%k value");
8190 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8194 /* X must be a symbolic constant on ELF. Write an
8195 expression suitable for an 'addi' that adds in the low 16
8197 if (GET_CODE (x) != CONST)
8199 print_operand_address (file, x);
8204 if (GET_CODE (XEXP (x, 0)) != PLUS
8205 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8206 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8207 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8208 output_operand_lossage ("invalid %%K value");
8209 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8211 /* For GNU as, there must be a non-alphanumeric character
8212 between 'l' and the number. The '-' is added by
8213 print_operand() already. */
8214 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8216 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8220 /* %l is output_asm_label. */
8223 /* Write second word of DImode or DFmode reference. Works on register
8224 or non-indexed memory only. */
8225 if (GET_CODE (x) == REG)
8226 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8227 else if (GET_CODE (x) == MEM)
8229 /* Handle possible auto-increment. Since it is pre-increment and
8230 we have already done it, we can just use an offset of word. */
8231 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8232 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8233 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8236 output_address (XEXP (adjust_address_nv (x, SImode,
8240 if (small_data_operand (x, GET_MODE (x)))
8241 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8242 reg_names[SMALL_DATA_REG]);
8247 /* MB value for a mask operand. */
8248 if (! mask_operand (x, SImode))
8249 output_operand_lossage ("invalid %%m value");
8251 fprintf (file, "%d", extract_MB (x));
8255 /* ME value for a mask operand. */
8256 if (! mask_operand (x, SImode))
8257 output_operand_lossage ("invalid %%M value");
8259 fprintf (file, "%d", extract_ME (x));
8262 /* %n outputs the negative of its operand. */
8265 /* Write the number of elements in the vector times 4. */
8266 if (GET_CODE (x) != PARALLEL)
8267 output_operand_lossage ("invalid %%N value");
8269 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8273 /* Similar, but subtract 1 first. */
8274 if (GET_CODE (x) != PARALLEL)
8275 output_operand_lossage ("invalid %%O value");
8277 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8281 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8283 || INT_LOWPART (x) < 0
8284 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8285 output_operand_lossage ("invalid %%p value");
8287 fprintf (file, "%d", i);
8291 /* The operand must be an indirect memory reference. The result
8292 is the register number. */
8293 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8294 || REGNO (XEXP (x, 0)) >= 32)
8295 output_operand_lossage ("invalid %%P value");
8297 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8301 /* This outputs the logical code corresponding to a boolean
8302 expression. The expression may have one or both operands
8303 negated (if one, only the first one). For condition register
8304 logical operations, it will also treat the negated
8305 CR codes as NOTs, but not handle NOTs of them. */
8307 const char *const *t = 0;
8309 enum rtx_code code = GET_CODE (x);
8310 static const char * const tbl[3][3] = {
8311 { "and", "andc", "nor" },
8312 { "or", "orc", "nand" },
8313 { "xor", "eqv", "xor" } };
8317 else if (code == IOR)
8319 else if (code == XOR)
8322 output_operand_lossage ("invalid %%q value");
8324 if (GET_CODE (XEXP (x, 0)) != NOT)
8328 if (GET_CODE (XEXP (x, 1)) == NOT)
8346 /* X is a CR register. Print the mask for `mtcrf'. */
8347 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8348 output_operand_lossage ("invalid %%R value");
8350 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8354 /* Low 5 bits of 32 - value */
8356 output_operand_lossage ("invalid %%s value");
8358 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8362 /* PowerPC64 mask position. All 0's is excluded.
8363 CONST_INT 32-bit mask is considered sign-extended so any
8364 transition must occur within the CONST_INT, not on the boundary. */
8365 if (! mask64_operand (x, DImode))
8366 output_operand_lossage ("invalid %%S value");
8368 uval = INT_LOWPART (x);
8370 if (uval & 1) /* Clear Left */
8372 #if HOST_BITS_PER_WIDE_INT > 64
8373 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8377 else /* Clear Right */
8380 #if HOST_BITS_PER_WIDE_INT > 64
8381 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8389 fprintf (file, "%d", i);
8393 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8394 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8397 /* Bit 3 is OV bit. */
8398 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8400 /* If we want bit 31, write a shift count of zero, not 32. */
8401 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8405 /* Print the symbolic name of a branch target register. */
8406 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8407 && REGNO (x) != COUNT_REGISTER_REGNUM))
8408 output_operand_lossage ("invalid %%T value");
8409 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8410 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8412 fputs ("ctr", file);
8416 /* High-order 16 bits of constant for use in unsigned operand. */
8418 output_operand_lossage ("invalid %%u value");
8420 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8421 (INT_LOWPART (x) >> 16) & 0xffff);
8425 /* High-order 16 bits of constant for use in signed operand. */
8427 output_operand_lossage ("invalid %%v value");
8429 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8430 (INT_LOWPART (x) >> 16) & 0xffff);
8434 /* Print `u' if this has an auto-increment or auto-decrement. */
8435 if (GET_CODE (x) == MEM
8436 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8437 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8442 /* Print the trap code for this operand. */
8443 switch (GET_CODE (x))
8446 fputs ("eq", file); /* 4 */
8449 fputs ("ne", file); /* 24 */
8452 fputs ("lt", file); /* 16 */
8455 fputs ("le", file); /* 20 */
8458 fputs ("gt", file); /* 8 */
8461 fputs ("ge", file); /* 12 */
8464 fputs ("llt", file); /* 2 */
8467 fputs ("lle", file); /* 6 */
8470 fputs ("lgt", file); /* 1 */
8473 fputs ("lge", file); /* 5 */
8481 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8484 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8485 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8487 print_operand (file, x, 0);
8491 /* MB value for a PowerPC64 rldic operand. */
8492 val = (GET_CODE (x) == CONST_INT
8493 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8498 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8499 if ((val <<= 1) < 0)
8502 #if HOST_BITS_PER_WIDE_INT == 32
8503 if (GET_CODE (x) == CONST_INT && i >= 0)
8504 i += 32; /* zero-extend high-part was all 0's */
8505 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8507 val = CONST_DOUBLE_LOW (x);
8514 for ( ; i < 64; i++)
8515 if ((val <<= 1) < 0)
8520 fprintf (file, "%d", i + 1);
8524 if (GET_CODE (x) == MEM
8525 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8530 /* Like 'L', for third word of TImode */
8531 if (GET_CODE (x) == REG)
8532 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8533 else if (GET_CODE (x) == MEM)
8535 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8536 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8537 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8539 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8540 if (small_data_operand (x, GET_MODE (x)))
8541 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8542 reg_names[SMALL_DATA_REG]);
8547 /* X is a SYMBOL_REF. Write out the name preceded by a
8548 period and without any trailing data in brackets. Used for function
8549 names. If we are configured for System V (or the embedded ABI) on
8550 the PowerPC, do not emit the period, since those systems do not use
8551 TOCs and the like. */
8552 if (GET_CODE (x) != SYMBOL_REF)
8555 if (XSTR (x, 0)[0] != '.')
8557 switch (DEFAULT_ABI)
8572 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8574 assemble_name (file, XSTR (x, 0));
8578 /* Like 'L', for last word of TImode. */
8579 if (GET_CODE (x) == REG)
8580 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8581 else if (GET_CODE (x) == MEM)
8583 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8584 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8585 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8587 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8588 if (small_data_operand (x, GET_MODE (x)))
8589 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8590 reg_names[SMALL_DATA_REG]);
8594 /* Print AltiVec or SPE memory operand. */
8599 if (GET_CODE (x) != MEM)
8607 if (GET_CODE (tmp) == REG)
8609 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8612 /* Handle [reg+UIMM]. */
8613 else if (GET_CODE (tmp) == PLUS &&
8614 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8618 if (GET_CODE (XEXP (tmp, 0)) != REG)
8621 x = INTVAL (XEXP (tmp, 1));
8622 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8626 /* Fall through. Must be [reg+reg]. */
8628 if (GET_CODE (tmp) == REG)
8629 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8630 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8632 if (REGNO (XEXP (tmp, 0)) == 0)
8633 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8634 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8636 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8637 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8645 if (GET_CODE (x) == REG)
8646 fprintf (file, "%s", reg_names[REGNO (x)]);
8647 else if (GET_CODE (x) == MEM)
8649 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8650 know the width from the mode. */
8651 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8652 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8653 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8654 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8655 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8656 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8658 output_address (XEXP (x, 0));
8661 output_addr_const (file, x);
8665 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8669 output_operand_lossage ("invalid %%xn code");
8673 /* Print the address of an operand. */
8676 print_operand_address (FILE *file, rtx x)
8678 if (GET_CODE (x) == REG)
8679 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8680 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8681 || GET_CODE (x) == LABEL_REF)
8683 output_addr_const (file, x);
8684 if (small_data_operand (x, GET_MODE (x)))
8685 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8686 reg_names[SMALL_DATA_REG]);
8687 else if (TARGET_TOC)
8690 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8692 if (REGNO (XEXP (x, 0)) == 0)
8693 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8694 reg_names[ REGNO (XEXP (x, 0)) ]);
8696 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8697 reg_names[ REGNO (XEXP (x, 1)) ]);
8699 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8700 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
8701 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
8703 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8704 && CONSTANT_P (XEXP (x, 1)))
8706 output_addr_const (file, XEXP (x, 1));
8707 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8711 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8712 && CONSTANT_P (XEXP (x, 1)))
8714 fprintf (file, "lo16(");
8715 output_addr_const (file, XEXP (x, 1));
8716 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8719 else if (legitimate_constant_pool_address_p (x))
8721 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8723 rtx contains_minus = XEXP (x, 1);
8727 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8728 turn it into (sym) for output_addr_const. */
8729 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8730 contains_minus = XEXP (contains_minus, 0);
8732 minus = XEXP (contains_minus, 0);
8733 symref = XEXP (minus, 0);
8734 XEXP (contains_minus, 0) = symref;
8739 name = XSTR (symref, 0);
8740 newname = alloca (strlen (name) + sizeof ("@toc"));
8741 strcpy (newname, name);
8742 strcat (newname, "@toc");
8743 XSTR (symref, 0) = newname;
8745 output_addr_const (file, XEXP (x, 1));
8747 XSTR (symref, 0) = name;
8748 XEXP (contains_minus, 0) = minus;
8751 output_addr_const (file, XEXP (x, 1));
8753 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8759 /* Target hook for assembling integer objects. The PowerPC version has
8760 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8761 is defined. It also needs to handle DI-mode objects on 64-bit
8765 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
8767 #ifdef RELOCATABLE_NEEDS_FIXUP
8768 /* Special handling for SI values. */
8769 if (size == 4 && aligned_p)
8771 extern int in_toc_section (void);
8772 static int recurse = 0;
8774 /* For -mrelocatable, we mark all addresses that need to be fixed up
8775 in the .fixup section. */
8776 if (TARGET_RELOCATABLE
8777 && !in_toc_section ()
8778 && !in_text_section ()
8780 && GET_CODE (x) != CONST_INT
8781 && GET_CODE (x) != CONST_DOUBLE
8787 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8789 ASM_OUTPUT_LABEL (asm_out_file, buf);
8790 fprintf (asm_out_file, "\t.long\t(");
8791 output_addr_const (asm_out_file, x);
8792 fprintf (asm_out_file, ")@fixup\n");
8793 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8794 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8795 fprintf (asm_out_file, "\t.long\t");
8796 assemble_name (asm_out_file, buf);
8797 fprintf (asm_out_file, "\n\t.previous\n");
8801 /* Remove initial .'s to turn a -mcall-aixdesc function
8802 address into the address of the descriptor, not the function
8804 else if (GET_CODE (x) == SYMBOL_REF
8805 && XSTR (x, 0)[0] == '.'
8806 && DEFAULT_ABI == ABI_AIX)
8808 const char *name = XSTR (x, 0);
8809 while (*name == '.')
8812 fprintf (asm_out_file, "\t.long\t%s\n", name);
8816 #endif /* RELOCATABLE_NEEDS_FIXUP */
8817 return default_assemble_integer (x, size, aligned_p);
8820 #ifdef HAVE_GAS_HIDDEN
8821 /* Emit an assembler directive to set symbol visibility for DECL to
8825 rs6000_assemble_visibility (tree decl, int vis)
8827 /* Functions need to have their entry point symbol visibility set as
8828 well as their descriptor symbol visibility. */
8829 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8831 static const char * const visibility_types[] = {
8832 NULL, "internal", "hidden", "protected"
8835 const char *name, *type;
8837 name = ((* targetm.strip_name_encoding)
8838 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8839 type = visibility_types[vis];
8841 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8842 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8845 default_assemble_visibility (decl, vis);
8850 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
8852 /* Reversal of FP compares takes care -- an ordered compare
8853 becomes an unordered compare and vice versa. */
8854 if (mode == CCFPmode
8855 && (!flag_finite_math_only
8856 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
8857 || code == UNEQ || code == LTGT))
8858 return reverse_condition_maybe_unordered (code);
8860 return reverse_condition (code);
8863 /* Generate a compare for CODE. Return a brand-new rtx that
8864 represents the result of the compare. */
8867 rs6000_generate_compare (enum rtx_code code)
8869 enum machine_mode comp_mode;
8872 if (rs6000_compare_fp_p)
8873 comp_mode = CCFPmode;
8874 else if (code == GTU || code == LTU
8875 || code == GEU || code == LEU)
8876 comp_mode = CCUNSmode;
8880 /* First, the compare. */
8881 compare_result = gen_reg_rtx (comp_mode);
8883 /* SPE FP compare instructions on the GPRs. Yuck! */
8884 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
8885 && rs6000_compare_fp_p)
8887 rtx cmp, or1, or2, or_result, compare_result2;
8895 cmp = flag_finite_math_only
8896 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8898 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8899 rs6000_compare_op1);
8907 cmp = flag_finite_math_only
8908 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8910 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8911 rs6000_compare_op1);
8919 cmp = flag_finite_math_only
8920 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8922 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8923 rs6000_compare_op1);
8929 /* Synthesize LE and GE from LT/GT || EQ. */
8930 if (code == LE || code == GE || code == LEU || code == GEU)
8932 /* Synthesize GE/LE frome GT/LT || EQ. */
8938 case LE: code = LT; break;
8939 case GE: code = GT; break;
8940 case LEU: code = LT; break;
8941 case GEU: code = GT; break;
8945 or1 = gen_reg_rtx (SImode);
8946 or2 = gen_reg_rtx (SImode);
8947 or_result = gen_reg_rtx (CCEQmode);
8948 compare_result2 = gen_reg_rtx (CCFPmode);
8951 cmp = flag_finite_math_only
8952 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8954 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8955 rs6000_compare_op1);
8958 /* The MC8540 FP compare instructions set the CR bits
8959 differently than other PPC compare instructions. For
8960 that matter, there is no generic test instruction, but a
8961 testgt, testlt, and testeq. For a true condition, bit 2
8962 is set (x1xx) in the CR. Following the traditional CR
8968 ... bit 2 would be a GT CR alias, so later on we
8969 look in the GT bits for the branch instructions.
8970 However, we must be careful to emit correct RTL in
8971 the meantime, so optimizations don't get confused. */
8973 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8974 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8976 /* OR them together. */
8977 cmp = gen_rtx_SET (VOIDmode, or_result,
8978 gen_rtx_COMPARE (CCEQmode,
8979 gen_rtx_IOR (SImode, or1, or2),
8981 compare_result = or_result;
8986 /* We only care about 1 bit (x1xx), so map everything to NE to
8987 maintain rtl sanity. We'll get to the right bit (x1xx) at
8988 code output time. */
8989 if (code == NE || code == LTGT)
8990 /* Do the inverse here because we have no cmpne
8991 instruction. We use the cmpeq instruction and expect
8992 to get a 0 instead. */
9001 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9002 gen_rtx_COMPARE (comp_mode,
9004 rs6000_compare_op1)));
9006 /* Some kinds of FP comparisons need an OR operation;
9007 under flag_finite_math_only we don't bother. */
9008 if (rs6000_compare_fp_p
9009 && ! flag_finite_math_only
9010 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9011 && (code == LE || code == GE
9012 || code == UNEQ || code == LTGT
9013 || code == UNGT || code == UNLT))
9015 enum rtx_code or1, or2;
9016 rtx or1_rtx, or2_rtx, compare2_rtx;
9017 rtx or_result = gen_reg_rtx (CCEQmode);
9021 case LE: or1 = LT; or2 = EQ; break;
9022 case GE: or1 = GT; or2 = EQ; break;
9023 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9024 case LTGT: or1 = LT; or2 = GT; break;
9025 case UNGT: or1 = UNORDERED; or2 = GT; break;
9026 case UNLT: or1 = UNORDERED; or2 = LT; break;
9029 validate_condition_mode (or1, comp_mode);
9030 validate_condition_mode (or2, comp_mode);
9031 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9032 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9033 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9034 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9036 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9038 compare_result = or_result;
9042 validate_condition_mode (code, GET_MODE (compare_result));
9044 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9048 /* Emit the RTL for an sCOND pattern. */
9051 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9054 enum machine_mode op_mode;
9055 enum rtx_code cond_code;
9057 condition_rtx = rs6000_generate_compare (code);
9058 cond_code = GET_CODE (condition_rtx);
9061 || cond_code == GE || cond_code == LE
9062 || cond_code == GEU || cond_code == LEU
9063 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9065 rtx not_result = gen_reg_rtx (CCEQmode);
9066 rtx not_op, rev_cond_rtx;
9067 enum machine_mode cc_mode;
9069 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9071 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9072 SImode, XEXP (condition_rtx, 0), const0_rtx);
9073 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9074 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9075 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9078 op_mode = GET_MODE (rs6000_compare_op0);
9079 if (op_mode == VOIDmode)
9080 op_mode = GET_MODE (rs6000_compare_op1);
9082 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9084 PUT_MODE (condition_rtx, DImode);
9085 convert_move (result, condition_rtx, 0);
9089 PUT_MODE (condition_rtx, SImode);
9090 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9094 /* Emit a branch of kind CODE to location LOC. */
9097 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9099 rtx condition_rtx, loc_ref;
9101 condition_rtx = rs6000_generate_compare (code);
9102 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9103 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9104 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9108 /* Return the string to output a conditional branch to LABEL, which is
9109 the operand number of the label, or -1 if the branch is really a
9112 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9113 condition code register and its mode specifies what kind of
9116 REVERSED is nonzero if we should reverse the sense of the comparison.
9118 INSN is the insn. */
9121 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9123 static char string[64];
9124 enum rtx_code code = GET_CODE (op);
9125 rtx cc_reg = XEXP (op, 0);
9126 enum machine_mode mode = GET_MODE (cc_reg);
9127 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9128 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9129 int really_reversed = reversed ^ need_longbranch;
9135 validate_condition_mode (code, mode);
9137 /* Work out which way this really branches. We could use
9138 reverse_condition_maybe_unordered here always but this
9139 makes the resulting assembler clearer. */
9140 if (really_reversed)
9142 /* Reversal of FP compares takes care -- an ordered compare
9143 becomes an unordered compare and vice versa. */
9144 if (mode == CCFPmode)
9145 code = reverse_condition_maybe_unordered (code);
9147 code = reverse_condition (code);
9150 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9152 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9155 /* Opposite of GT. */
9157 else if (code == NE)
9165 /* Not all of these are actually distinct opcodes, but
9166 we distinguish them for clarity of the resulting assembler. */
9168 ccode = "ne"; break;
9170 ccode = "eq"; break;
9172 ccode = "ge"; break;
9173 case GT: case GTU: case UNGT:
9174 ccode = "gt"; break;
9176 ccode = "le"; break;
9177 case LT: case LTU: case UNLT:
9178 ccode = "lt"; break;
9179 case UNORDERED: ccode = "un"; break;
9180 case ORDERED: ccode = "nu"; break;
9181 case UNGE: ccode = "nl"; break;
9182 case UNLE: ccode = "ng"; break;
9187 /* Maybe we have a guess as to how likely the branch is.
9188 The old mnemonics don't have a way to specify this information. */
9190 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9191 if (note != NULL_RTX)
9193 /* PROB is the difference from 50%. */
9194 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9195 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9197 /* Only hint for highly probable/improbable branches on newer
9198 cpus as static prediction overrides processor dynamic
9199 prediction. For older cpus we may as well always hint, but
9200 assume not taken for branches that are very close to 50% as a
9201 mispredicted taken branch is more expensive than a
9202 mispredicted not-taken branch. */
9204 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9206 if (abs (prob) > REG_BR_PROB_BASE / 20
9207 && ((prob > 0) ^ need_longbranch))
9215 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9217 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9219 /* We need to escape any '%' characters in the reg_names string.
9220 Assume they'd only be the first character... */
9221 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9223 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9227 /* If the branch distance was too far, we may have to use an
9228 unconditional branch to go the distance. */
9229 if (need_longbranch)
9230 s += sprintf (s, ",$+8\n\tb %s", label);
9232 s += sprintf (s, ",%s", label);
9238 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9239 operands of the last comparison is nonzero/true, FALSE_COND if it
9240 is zero/false. Return 0 if the hardware has no such operation. */
9243 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9245 enum rtx_code code = GET_CODE (op);
9246 rtx op0 = rs6000_compare_op0;
9247 rtx op1 = rs6000_compare_op1;
9249 enum machine_mode compare_mode = GET_MODE (op0);
9250 enum machine_mode result_mode = GET_MODE (dest);
9253 /* These modes should always match. */
9254 if (GET_MODE (op1) != compare_mode
9255 /* In the isel case however, we can use a compare immediate, so
9256 op1 may be a small constant. */
9257 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9259 if (GET_MODE (true_cond) != result_mode)
9261 if (GET_MODE (false_cond) != result_mode)
9264 /* First, work out if the hardware can do this at all, or
9265 if it's too slow... */
9266 if (! rs6000_compare_fp_p)
9269 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9273 /* Eliminate half of the comparisons by switching operands, this
9274 makes the remaining code simpler. */
9275 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9276 || code == LTGT || code == LT || code == UNLE)
9278 code = reverse_condition_maybe_unordered (code);
9280 true_cond = false_cond;
9284 /* UNEQ and LTGT take four instructions for a comparison with zero,
9285 it'll probably be faster to use a branch here too. */
9286 if (code == UNEQ && HONOR_NANS (compare_mode))
9289 if (GET_CODE (op1) == CONST_DOUBLE)
9290 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9292 /* We're going to try to implement comparisons by performing
9293 a subtract, then comparing against zero. Unfortunately,
9294 Inf - Inf is NaN which is not zero, and so if we don't
9295 know that the operand is finite and the comparison
9296 would treat EQ different to UNORDERED, we can't do it. */
9297 if (HONOR_INFINITIES (compare_mode)
9298 && code != GT && code != UNGE
9299 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9300 /* Constructs of the form (a OP b ? a : b) are safe. */
9301 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9302 || (! rtx_equal_p (op0, true_cond)
9303 && ! rtx_equal_p (op1, true_cond))))
9305 /* At this point we know we can use fsel. */
9307 /* Reduce the comparison to a comparison against zero. */
9308 temp = gen_reg_rtx (compare_mode);
9309 emit_insn (gen_rtx_SET (VOIDmode, temp,
9310 gen_rtx_MINUS (compare_mode, op0, op1)));
9312 op1 = CONST0_RTX (compare_mode);
9314 /* If we don't care about NaNs we can reduce some of the comparisons
9315 down to faster ones. */
9316 if (! HONOR_NANS (compare_mode))
9322 true_cond = false_cond;
9335 /* Now, reduce everything down to a GE. */
9342 temp = gen_reg_rtx (compare_mode);
9343 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9348 temp = gen_reg_rtx (compare_mode);
9349 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9354 temp = gen_reg_rtx (compare_mode);
9355 emit_insn (gen_rtx_SET (VOIDmode, temp,
9356 gen_rtx_NEG (compare_mode,
9357 gen_rtx_ABS (compare_mode, op0))));
9362 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9363 temp = gen_reg_rtx (result_mode);
9364 emit_insn (gen_rtx_SET (VOIDmode, temp,
9365 gen_rtx_IF_THEN_ELSE (result_mode,
9366 gen_rtx_GE (VOIDmode,
9368 true_cond, false_cond)));
9369 false_cond = true_cond;
9372 temp = gen_reg_rtx (compare_mode);
9373 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9378 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9379 temp = gen_reg_rtx (result_mode);
9380 emit_insn (gen_rtx_SET (VOIDmode, temp,
9381 gen_rtx_IF_THEN_ELSE (result_mode,
9382 gen_rtx_GE (VOIDmode,
9384 true_cond, false_cond)));
9385 true_cond = false_cond;
9388 temp = gen_reg_rtx (compare_mode);
9389 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9397 emit_insn (gen_rtx_SET (VOIDmode, dest,
9398 gen_rtx_IF_THEN_ELSE (result_mode,
9399 gen_rtx_GE (VOIDmode,
9401 true_cond, false_cond)));
9405 /* Same as above, but for ints (isel). */
9408 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9410 rtx condition_rtx, cr;
9412 /* All isel implementations thus far are 32-bits. */
9413 if (GET_MODE (rs6000_compare_op0) != SImode)
9416 /* We still have to do the compare, because isel doesn't do a
9417 compare, it just looks at the CRx bits set by a previous compare
9419 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9420 cr = XEXP (condition_rtx, 0);
9422 if (GET_MODE (cr) == CCmode)
9423 emit_insn (gen_isel_signed (dest, condition_rtx,
9424 true_cond, false_cond, cr));
9426 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9427 true_cond, false_cond, cr));
9433 output_isel (rtx *operands)
9437 code = GET_CODE (operands[1]);
9438 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9440 PUT_CODE (operands[1], reverse_condition (code));
9441 return "isel %0,%3,%2,%j1";
9444 return "isel %0,%2,%3,%j1";
9448 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
9450 enum machine_mode mode = GET_MODE (op0);
9454 if (code == SMAX || code == SMIN)
9459 if (code == SMAX || code == UMAX)
9460 target = emit_conditional_move (dest, c, op0, op1, mode,
9463 target = emit_conditional_move (dest, c, op0, op1, mode,
9465 if (target == NULL_RTX)
9468 emit_move_insn (dest, target);
9471 /* Called by altivec splitter.
9473 operands[0] : Destination of move
9474 operands[1] : Source of move
9475 noperands : Size of operands vector
9477 operands[2-5] ([2-3] in 64 bit) : Destination slots
9478 operands[6-9] ([4-5] in 64 bit) : Source slots
9480 Splits the move of operands[1] to operands[0].
9481 This is done, if GPRs are one of the operands. In this case
9482 a sequence of simple move insns has to be issued. The sequence of these
9483 move insns has to be done in correct order to avoid early clobber of the
9484 base register or destructive overlap of registers.
9488 rs6000_split_altivec_in_gprs (rtx *operands)
9490 int nregs, reg, i, j;
9491 enum machine_mode mode;
9493 /* Calculate number to move (2/4 for 32/64 bit mode). */
9495 reg = REG_P (operands[0]) ? REGNO (operands[0]) : REGNO (operands[1]);
9496 mode = GET_MODE (operands[0]);
9497 nregs = HARD_REGNO_NREGS (reg, mode);
9499 if (REG_P (operands[1])
9500 && REG_P (operands[0])
9501 && (REGNO (operands[1]) < REGNO (operands[0])))
9503 /* Move register range backwards, if we have destructive overlap. */
9506 for (i = 0; i < nregs; i++)
9509 operands[i + 2] = operand_subword (operands[0], j, 0, mode);
9510 operands[i + 2 + nregs] =
9511 operand_subword (operands[1], j, 0, mode);
9518 if (GET_CODE (operands[1]) == MEM)
9521 /* We have offsettable addresses only. If we use one of the
9522 registers to address memory, we have change that register last. */
9523 breg = GET_CODE (XEXP (operands[1], 0)) == PLUS ?
9524 XEXP (XEXP (operands[1], 0), 0) :
9525 XEXP (operands[1], 0);
9527 if (REGNO (breg) >= REGNO (operands[0])
9528 && REGNO (breg) < REGNO (operands[0]) + nregs)
9529 j = REGNO (breg) - REGNO (operands[0]);
9532 for (i = 0; i < nregs; i++)
9534 /* Calculate index to next subword. */
9539 operands[i + 2] = operand_subword (operands[0], j, 0, mode);
9540 operands[i + 2 + nregs] =
9541 operand_subword (operands[1], j, 0, mode);
9548 /* This page contains routines that are used to determine what the
9549 function prologue and epilogue code will do and write them out. */
9551 /* Return the first fixed-point register that is required to be
9552 saved. 32 if none. */
9555 first_reg_to_save ()
9559 /* Find lowest numbered live register. */
9560 for (first_reg = 13; first_reg <= 31; first_reg++)
9561 if (regs_ever_live[first_reg]
9562 && (! call_used_regs[first_reg]
9563 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9564 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9565 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9570 && current_function_uses_pic_offset_table
9571 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9572 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9578 /* Similar, for FP regs. */
9581 first_fp_reg_to_save ()
9585 /* Find lowest numbered live register. */
9586 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9587 if (regs_ever_live[first_reg])
9593 /* Similar, for AltiVec regs. */
9596 first_altivec_reg_to_save ()
9600 /* Stack frame remains as is unless we are in AltiVec ABI. */
9601 if (! TARGET_ALTIVEC_ABI)
9602 return LAST_ALTIVEC_REGNO + 1;
9604 /* Find lowest numbered live register. */
9605 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9606 if (regs_ever_live[i])
9612 /* Return a 32-bit mask of the AltiVec registers we need to set in
9613 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9614 the 32-bit word is 0. */
9617 compute_vrsave_mask ()
9619 unsigned int i, mask = 0;
9621 /* First, find out if we use _any_ altivec registers. */
9622 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9623 if (regs_ever_live[i])
9624 mask |= ALTIVEC_REG_BIT (i);
9629 /* Next, remove the argument registers from the set. These must
9630 be in the VRSAVE mask set by the caller, so we don't need to add
9631 them in again. More importantly, the mask we compute here is
9632 used to generate CLOBBERs in the set_vrsave insn, and we do not
9633 wish the argument registers to die. */
9634 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9635 mask &= ~ALTIVEC_REG_BIT (i);
9637 /* Similarly, remove the return value from the set. */
9640 diddle_return_value (is_altivec_return_reg, &yes);
9642 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9649 is_altivec_return_reg (rtx reg, void *xyes)
9651 bool *yes = (bool *) xyes;
9652 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9657 /* Calculate the stack information for the current function. This is
9658 complicated by having two separate calling sequences, the AIX calling
9659 sequence and the V.4 calling sequence.
9661 AIX (and Darwin/Mac OS X) stack frames look like:
9663 SP----> +---------------------------------------+
9664 | back chain to caller | 0 0
9665 +---------------------------------------+
9666 | saved CR | 4 8 (8-11)
9667 +---------------------------------------+
9669 +---------------------------------------+
9670 | reserved for compilers | 12 24
9671 +---------------------------------------+
9672 | reserved for binders | 16 32
9673 +---------------------------------------+
9674 | saved TOC pointer | 20 40
9675 +---------------------------------------+
9676 | Parameter save area (P) | 24 48
9677 +---------------------------------------+
9678 | Alloca space (A) | 24+P etc.
9679 +---------------------------------------+
9680 | Local variable space (L) | 24+P+A
9681 +---------------------------------------+
9682 | Float/int conversion temporary (X) | 24+P+A+L
9683 +---------------------------------------+
9684 | Save area for AltiVec registers (W) | 24+P+A+L+X
9685 +---------------------------------------+
9686 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9687 +---------------------------------------+
9688 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9689 +---------------------------------------+
9690 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9691 +---------------------------------------+
9692 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9693 +---------------------------------------+
9694 old SP->| back chain to caller's caller |
9695 +---------------------------------------+
9697 The required alignment for AIX configurations is two words (i.e., 8
9701 V.4 stack frames look like:
9703 SP----> +---------------------------------------+
9704 | back chain to caller | 0
9705 +---------------------------------------+
9706 | caller's saved LR | 4
9707 +---------------------------------------+
9708 | Parameter save area (P) | 8
9709 +---------------------------------------+
9710 | Alloca space (A) | 8+P
9711 +---------------------------------------+
9712 | Varargs save area (V) | 8+P+A
9713 +---------------------------------------+
9714 | Local variable space (L) | 8+P+A+V
9715 +---------------------------------------+
9716 | Float/int conversion temporary (X) | 8+P+A+V+L
9717 +---------------------------------------+
9718 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9719 +---------------------------------------+
9720 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9721 +---------------------------------------+
9722 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9723 +---------------------------------------+
9724 | SPE: area for 64-bit GP registers |
9725 +---------------------------------------+
9726 | SPE alignment padding |
9727 +---------------------------------------+
9728 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9729 +---------------------------------------+
9730 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9731 +---------------------------------------+
9732 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9733 +---------------------------------------+
9734 old SP->| back chain to caller's caller |
9735 +---------------------------------------+
9737 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9738 given. (But note below and in sysv4.h that we require only 8 and
9739 may round up the size of our stack frame anyways. The historical
9740 reason is early versions of powerpc-linux which didn't properly
9741 align the stack at program startup. A happy side-effect is that
9742 -mno-eabi libraries can be used with -meabi programs.)
9744 The EABI configuration defaults to the V.4 layout. However,
9745 the stack alignment requirements may differ. If -mno-eabi is not
9746 given, the required stack alignment is 8 bytes; if -mno-eabi is
9747 given, the required alignment is 16 bytes. (But see V.4 comment
9750 #ifndef ABI_STACK_BOUNDARY
9751 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9755 rs6000_stack_info ()
9757 static rs6000_stack_t info, zero_info;
9758 rs6000_stack_t *info_ptr = &info;
9759 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9763 /* Zero all fields portably. */
9768 /* Cache value so we don't rescan instruction chain over and over. */
9769 if (cfun->machine->insn_chain_scanned_p == 0)
9771 cfun->machine->insn_chain_scanned_p = 1;
9772 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9776 /* Select which calling sequence. */
9777 info_ptr->abi = DEFAULT_ABI;
9779 /* Calculate which registers need to be saved & save area size. */
9780 info_ptr->first_gp_reg_save = first_reg_to_save ();
9781 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9782 even if it currently looks like we won't. */
9783 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9784 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9785 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9786 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9787 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9789 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9791 /* For the SPE, we have an additional upper 32-bits on each GPR.
9792 Ideally we should save the entire 64-bits only when the upper
9793 half is used in SIMD instructions. Since we only record
9794 registers live (not the size they are used in), this proves
9795 difficult because we'd have to traverse the instruction chain at
9796 the right time, taking reload into account. This is a real pain,
9797 so we opt to save the GPRs in 64-bits always if but one register
9798 gets used in 64-bits. Otherwise, all the registers in the frame
9799 get saved in 32-bits.
9801 So... since when we save all GPRs (except the SP) in 64-bits, the
9802 traditional GP save area will be empty. */
9803 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9804 info_ptr->gp_size = 0;
9806 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9807 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9809 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9810 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9811 - info_ptr->first_altivec_reg_save);
9813 /* Does this function call anything? */
9814 info_ptr->calls_p = (! current_function_is_leaf
9815 || cfun->machine->ra_needs_full_frame);
9817 /* Determine if we need to save the link register. */
9818 if (rs6000_ra_ever_killed ()
9819 || (DEFAULT_ABI == ABI_AIX
9820 && current_function_profile
9821 && !TARGET_PROFILE_KERNEL)
9822 #ifdef TARGET_RELOCATABLE
9823 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9825 || (info_ptr->first_fp_reg_save != 64
9826 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9827 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9828 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9829 || (DEFAULT_ABI == ABI_DARWIN
9831 && current_function_uses_pic_offset_table)
9832 || info_ptr->calls_p)
9834 info_ptr->lr_save_p = 1;
9835 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9838 /* Determine if we need to save the condition code registers. */
9839 if (regs_ever_live[CR2_REGNO]
9840 || regs_ever_live[CR3_REGNO]
9841 || regs_ever_live[CR4_REGNO])
9843 info_ptr->cr_save_p = 1;
9844 if (DEFAULT_ABI == ABI_V4)
9845 info_ptr->cr_size = reg_size;
9848 /* If the current function calls __builtin_eh_return, then we need
9849 to allocate stack space for registers that will hold data for
9850 the exception handler. */
9851 if (current_function_calls_eh_return)
9854 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9857 /* SPE saves EH registers in 64-bits. */
9858 ehrd_size = i * (TARGET_SPE_ABI
9859 && info_ptr->spe_64bit_regs_used != 0
9860 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9865 /* Determine various sizes. */
9866 info_ptr->reg_size = reg_size;
9867 info_ptr->fixed_size = RS6000_SAVE_AREA;
9868 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9869 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9870 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9873 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9874 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9876 info_ptr->spe_gp_size = 0;
9878 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9880 info_ptr->vrsave_mask = compute_vrsave_mask ();
9881 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9885 info_ptr->vrsave_mask = 0;
9886 info_ptr->vrsave_size = 0;
9889 /* Calculate the offsets. */
9890 switch (DEFAULT_ABI)
9898 info_ptr->fp_save_offset = - info_ptr->fp_size;
9899 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9901 if (TARGET_ALTIVEC_ABI)
9903 info_ptr->vrsave_save_offset
9904 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9906 /* Align stack so vector save area is on a quadword boundary. */
9907 if (info_ptr->altivec_size != 0)
9908 info_ptr->altivec_padding_size
9909 = 16 - (-info_ptr->vrsave_save_offset % 16);
9911 info_ptr->altivec_padding_size = 0;
9913 info_ptr->altivec_save_offset
9914 = info_ptr->vrsave_save_offset
9915 - info_ptr->altivec_padding_size
9916 - info_ptr->altivec_size;
9918 /* Adjust for AltiVec case. */
9919 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9922 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9923 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9924 info_ptr->lr_save_offset = 2*reg_size;
9928 info_ptr->fp_save_offset = - info_ptr->fp_size;
9929 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9930 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9932 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9934 /* Align stack so SPE GPR save area is aligned on a
9935 double-word boundary. */
9936 if (info_ptr->spe_gp_size != 0)
9937 info_ptr->spe_padding_size
9938 = 8 - (-info_ptr->cr_save_offset % 8);
9940 info_ptr->spe_padding_size = 0;
9942 info_ptr->spe_gp_save_offset
9943 = info_ptr->cr_save_offset
9944 - info_ptr->spe_padding_size
9945 - info_ptr->spe_gp_size;
9947 /* Adjust for SPE case. */
9948 info_ptr->toc_save_offset
9949 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9951 else if (TARGET_ALTIVEC_ABI)
9953 info_ptr->vrsave_save_offset
9954 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9956 /* Align stack so vector save area is on a quadword boundary. */
9957 if (info_ptr->altivec_size != 0)
9958 info_ptr->altivec_padding_size
9959 = 16 - (-info_ptr->vrsave_save_offset % 16);
9961 info_ptr->altivec_padding_size = 0;
9963 info_ptr->altivec_save_offset
9964 = info_ptr->vrsave_save_offset
9965 - info_ptr->altivec_padding_size
9966 - info_ptr->altivec_size;
9968 /* Adjust for AltiVec case. */
9969 info_ptr->toc_save_offset
9970 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9973 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9974 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9975 info_ptr->lr_save_offset = reg_size;
9979 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9981 + info_ptr->altivec_size
9982 + info_ptr->altivec_padding_size
9983 + info_ptr->spe_gp_size
9984 + info_ptr->spe_padding_size
9988 + info_ptr->vrsave_size
9989 + info_ptr->toc_size,
9990 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9993 total_raw_size = (info_ptr->vars_size
9994 + info_ptr->parm_size
9995 + info_ptr->save_size
9996 + info_ptr->varargs_size
9997 + info_ptr->fixed_size);
9999 info_ptr->total_size =
10000 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10002 /* Determine if we need to allocate any stack frame:
10004 For AIX we need to push the stack if a frame pointer is needed
10005 (because the stack might be dynamically adjusted), if we are
10006 debugging, if we make calls, or if the sum of fp_save, gp_save,
10007 and local variables are more than the space needed to save all
10008 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10009 + 18*8 = 288 (GPR13 reserved).
10011 For V.4 we don't have the stack cushion that AIX uses, but assume
10012 that the debugger can handle stackless frames. */
10014 if (info_ptr->calls_p)
10015 info_ptr->push_p = 1;
10017 else if (DEFAULT_ABI == ABI_V4)
10018 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10020 else if (frame_pointer_needed)
10021 info_ptr->push_p = 1;
10023 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10024 info_ptr->push_p = 1;
10028 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10030 /* Zero offsets if we're not saving those registers. */
10031 if (info_ptr->fp_size == 0)
10032 info_ptr->fp_save_offset = 0;
10034 if (info_ptr->gp_size == 0)
10035 info_ptr->gp_save_offset = 0;
10037 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10038 info_ptr->altivec_save_offset = 0;
10040 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10041 info_ptr->vrsave_save_offset = 0;
10043 if (! TARGET_SPE_ABI
10044 || info_ptr->spe_64bit_regs_used == 0
10045 || info_ptr->spe_gp_size == 0)
10046 info_ptr->spe_gp_save_offset = 0;
10048 if (! info_ptr->lr_save_p)
10049 info_ptr->lr_save_offset = 0;
10051 if (! info_ptr->cr_save_p)
10052 info_ptr->cr_save_offset = 0;
10054 if (! info_ptr->toc_save_p)
10055 info_ptr->toc_save_offset = 0;
10060 /* Return true if the current function uses any GPRs in 64-bit SIMD
10064 spe_func_has_64bit_regs_p ()
10068 /* Functions that save and restore all the call-saved registers will
10069 need to save/restore the registers in 64-bits. */
10070 if (current_function_calls_eh_return
10071 || current_function_calls_setjmp
10072 || current_function_has_nonlocal_goto)
10075 insns = get_insns ();
10077 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10083 i = PATTERN (insn);
10084 if (GET_CODE (i) == SET
10085 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10094 debug_stack_info (rs6000_stack_t *info)
10096 const char *abi_string;
10099 info = rs6000_stack_info ();
10101 fprintf (stderr, "\nStack information for function %s:\n",
10102 ((current_function_decl && DECL_NAME (current_function_decl))
10103 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10108 default: abi_string = "Unknown"; break;
10109 case ABI_NONE: abi_string = "NONE"; break;
10110 case ABI_AIX: abi_string = "AIX"; break;
10111 case ABI_DARWIN: abi_string = "Darwin"; break;
10112 case ABI_V4: abi_string = "V.4"; break;
10115 fprintf (stderr, "\tABI = %5s\n", abi_string);
10117 if (TARGET_ALTIVEC_ABI)
10118 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10120 if (TARGET_SPE_ABI)
10121 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10123 if (info->first_gp_reg_save != 32)
10124 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10126 if (info->first_fp_reg_save != 64)
10127 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10129 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10130 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10131 info->first_altivec_reg_save);
10133 if (info->lr_save_p)
10134 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10136 if (info->cr_save_p)
10137 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10139 if (info->toc_save_p)
10140 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10142 if (info->vrsave_mask)
10143 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10146 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10149 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10151 if (info->gp_save_offset)
10152 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10154 if (info->fp_save_offset)
10155 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10157 if (info->altivec_save_offset)
10158 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10159 info->altivec_save_offset);
10161 if (info->spe_gp_save_offset)
10162 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10163 info->spe_gp_save_offset);
10165 if (info->vrsave_save_offset)
10166 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10167 info->vrsave_save_offset);
10169 if (info->lr_save_offset)
10170 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10172 if (info->cr_save_offset)
10173 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10175 if (info->toc_save_offset)
10176 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10178 if (info->varargs_save_offset)
10179 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10181 if (info->total_size)
10182 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10184 if (info->varargs_size)
10185 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10187 if (info->vars_size)
10188 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10190 if (info->parm_size)
10191 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10193 if (info->fixed_size)
10194 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10197 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10199 if (info->spe_gp_size)
10200 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10203 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10205 if (info->altivec_size)
10206 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10208 if (info->vrsave_size)
10209 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10211 if (info->altivec_padding_size)
10212 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10213 info->altivec_padding_size);
10215 if (info->spe_padding_size)
10216 fprintf (stderr, "\tspe_padding_size = %5d\n",
10217 info->spe_padding_size);
10220 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10223 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10225 if (info->toc_size)
10226 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10228 if (info->save_size)
10229 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10231 if (info->reg_size != 4)
10232 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10234 fprintf (stderr, "\n");
10238 rs6000_return_addr (int count, rtx frame)
10240 /* Currently we don't optimize very well between prolog and body
10241 code and for PIC code the code can be actually quite bad, so
10242 don't try to be too clever here. */
10243 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10245 cfun->machine->ra_needs_full_frame = 1;
10252 plus_constant (copy_to_reg
10253 (gen_rtx_MEM (Pmode,
10254 memory_address (Pmode, frame))),
10255 RETURN_ADDRESS_OFFSET)));
10258 cfun->machine->ra_need_lr = 1;
10259 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10262 /* Say whether a function is a candidate for sibcall handling or not.
10263 We do not allow indirect calls to be optimized into sibling calls.
10264 Also, we can't do it if there are any vector parameters; there's
10265 nowhere to put the VRsave code so it works; note that functions with
10266 vector parameters are required to have a prototype, so the argument
10267 type info must be available here. (The tail recursion case can work
10268 with vector parameters, but there's no way to distinguish here.) */
10270 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
10275 if (TARGET_ALTIVEC_VRSAVE)
10277 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10278 type; type = TREE_CHAIN (type))
10280 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10284 if (DEFAULT_ABI == ABI_DARWIN
10285 || (*targetm.binds_local_p) (decl))
10287 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10289 if (!lookup_attribute ("longcall", attr_list)
10290 || lookup_attribute ("shortcall", attr_list))
10298 rs6000_ra_ever_killed ()
10304 /* Irritatingly, there are two kinds of thunks -- those created with
10305 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10306 through the regular part of the compiler. This is a very hacky
10307 way to tell them apart. */
10308 if (current_function_is_thunk && !no_new_pseudos)
10311 /* regs_ever_live has LR marked as used if any sibcalls are present,
10312 but this should not force saving and restoring in the
10313 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10314 clobbers LR, so that is inappropriate. */
10316 /* Also, the prologue can generate a store into LR that
10317 doesn't really count, like this:
10320 bcl to set PIC register
10324 When we're called from the epilogue, we need to avoid counting
10325 this as a store. */
10327 push_topmost_sequence ();
10328 top = get_insns ();
10329 pop_topmost_sequence ();
10330 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10332 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10336 if (FIND_REG_INC_NOTE (insn, reg))
10338 else if (GET_CODE (insn) == CALL_INSN
10339 && !SIBLING_CALL_P (insn))
10341 else if (set_of (reg, insn) != NULL_RTX
10342 && !prologue_epilogue_contains (insn))
10349 /* Add a REG_MAYBE_DEAD note to the insn. */
10351 rs6000_maybe_dead (rtx insn)
10353 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10358 /* Emit instructions needed to load the TOC register.
10359 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10360 a constant pool; or for SVR4 -fpic. */
10363 rs6000_emit_load_toc_table (int fromprolog)
10366 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10368 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10370 rtx temp = (fromprolog
10371 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10372 : gen_reg_rtx (Pmode));
10373 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10375 rs6000_maybe_dead (insn);
10376 insn = emit_move_insn (dest, temp);
10378 rs6000_maybe_dead (insn);
10380 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10383 rtx tempLR = (fromprolog
10384 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10385 : gen_reg_rtx (Pmode));
10386 rtx temp0 = (fromprolog
10387 ? gen_rtx_REG (Pmode, 0)
10388 : gen_reg_rtx (Pmode));
10391 /* possibly create the toc section */
10392 if (! toc_initialized)
10395 function_section (current_function_decl);
10402 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10403 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10405 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10406 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10408 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10410 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10411 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10418 static int reload_toc_labelno = 0;
10420 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10422 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10423 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10425 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10426 emit_move_insn (dest, tempLR);
10427 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10429 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10431 rs6000_maybe_dead (insn);
10433 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10435 /* This is for AIX code running in non-PIC ELF32. */
10438 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10439 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10441 insn = emit_insn (gen_elf_high (dest, realsym));
10443 rs6000_maybe_dead (insn);
10444 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10446 rs6000_maybe_dead (insn);
10448 else if (DEFAULT_ABI == ABI_AIX)
10451 insn = emit_insn (gen_load_toc_aix_si (dest));
10453 insn = emit_insn (gen_load_toc_aix_di (dest));
10455 rs6000_maybe_dead (insn);
10462 get_TOC_alias_set ()
10464 static int set = -1;
10466 set = new_alias_set ();
10470 /* This returns nonzero if the current function uses the TOC. This is
10471 determined by the presence of (unspec ... UNSPEC_TOC) or
10472 use (unspec ... UNSPEC_TOC), which are generated by the various
10473 load_toc_* patterns. */
10480 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10483 rtx pat = PATTERN (insn);
10486 if (GET_CODE (pat) == PARALLEL)
10487 for (i = 0; i < XVECLEN (pat, 0); i++)
10489 rtx sub = XVECEXP (pat, 0, i);
10490 if (GET_CODE (sub) == USE)
10492 sub = XEXP (sub, 0);
10493 if (GET_CODE (sub) == UNSPEC
10494 && XINT (sub, 1) == UNSPEC_TOC)
10503 create_TOC_reference (rtx symbol)
10505 return gen_rtx_PLUS (Pmode,
10506 gen_rtx_REG (Pmode, TOC_REGISTER),
10507 gen_rtx_CONST (Pmode,
10508 gen_rtx_MINUS (Pmode, symbol,
10509 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10512 /* If _Unwind_* has been called from within the same module,
10513 toc register is not guaranteed to be saved to 40(1) on function
10514 entry. Save it there in that case. */
10517 rs6000_aix_emit_builtin_unwind_init ()
10520 rtx stack_top = gen_reg_rtx (Pmode);
10521 rtx opcode_addr = gen_reg_rtx (Pmode);
10522 rtx opcode = gen_reg_rtx (SImode);
10523 rtx tocompare = gen_reg_rtx (SImode);
10524 rtx no_toc_save_needed = gen_label_rtx ();
10526 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10527 emit_move_insn (stack_top, mem);
10529 mem = gen_rtx_MEM (Pmode,
10530 gen_rtx_PLUS (Pmode, stack_top,
10531 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10532 emit_move_insn (opcode_addr, mem);
10533 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10534 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10535 : 0xE8410028, SImode));
10537 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
10538 SImode, NULL_RTX, NULL_RTX,
10539 no_toc_save_needed);
10541 mem = gen_rtx_MEM (Pmode,
10542 gen_rtx_PLUS (Pmode, stack_top,
10543 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10544 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
10545 emit_label (no_toc_save_needed);
10548 /* This ties together stack memory (MEM with an alias set of
10549 rs6000_sr_alias_set) and the change to the stack pointer. */
10552 rs6000_emit_stack_tie ()
10554 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10556 set_mem_alias_set (mem, rs6000_sr_alias_set);
10557 emit_insn (gen_stack_tie (mem));
10560 /* Emit the correct code for allocating stack space, as insns.
10561 If COPY_R12, make sure a copy of the old frame is left in r12.
10562 The generated code may use hard register 0 as a temporary. */
10565 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
10568 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10569 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10570 rtx todec = GEN_INT (-size);
10572 if (current_function_limit_stack)
10574 if (REG_P (stack_limit_rtx)
10575 && REGNO (stack_limit_rtx) > 1
10576 && REGNO (stack_limit_rtx) <= 31)
10578 emit_insn (TARGET_32BIT
10579 ? gen_addsi3 (tmp_reg,
10582 : gen_adddi3 (tmp_reg,
10586 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10589 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10591 && DEFAULT_ABI == ABI_V4)
10593 rtx toload = gen_rtx_CONST (VOIDmode,
10594 gen_rtx_PLUS (Pmode,
10598 emit_insn (gen_elf_high (tmp_reg, toload));
10599 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10600 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10604 warning ("stack limit expression is not supported");
10607 if (copy_r12 || ! TARGET_UPDATE)
10608 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10614 /* Need a note here so that try_split doesn't get confused. */
10615 if (get_last_insn() == NULL_RTX)
10616 emit_note (NOTE_INSN_DELETED);
10617 insn = emit_move_insn (tmp_reg, todec);
10618 try_split (PATTERN (insn), insn, 0);
10622 insn = emit_insn (TARGET_32BIT
10623 ? gen_movsi_update (stack_reg, stack_reg,
10625 : gen_movdi_update (stack_reg, stack_reg,
10626 todec, stack_reg));
10630 insn = emit_insn (TARGET_32BIT
10631 ? gen_addsi3 (stack_reg, stack_reg, todec)
10632 : gen_adddi3 (stack_reg, stack_reg, todec));
10633 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10634 gen_rtx_REG (Pmode, 12));
10637 RTX_FRAME_RELATED_P (insn) = 1;
10639 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10640 gen_rtx_SET (VOIDmode, stack_reg,
10641 gen_rtx_PLUS (Pmode, stack_reg,
10646 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10647 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10648 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10649 deduce these equivalences by itself so it wasn't necessary to hold
10650 its hand so much. */
10653 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
10654 rtx reg2, rtx rreg)
10658 /* copy_rtx will not make unique copies of registers, so we need to
10659 ensure we don't have unwanted sharing here. */
10661 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10664 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10666 real = copy_rtx (PATTERN (insn));
10668 if (reg2 != NULL_RTX)
10669 real = replace_rtx (real, reg2, rreg);
10671 real = replace_rtx (real, reg,
10672 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10673 STACK_POINTER_REGNUM),
10676 /* We expect that 'real' is either a SET or a PARALLEL containing
10677 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10678 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10680 if (GET_CODE (real) == SET)
10684 temp = simplify_rtx (SET_SRC (set));
10686 SET_SRC (set) = temp;
10687 temp = simplify_rtx (SET_DEST (set));
10689 SET_DEST (set) = temp;
10690 if (GET_CODE (SET_DEST (set)) == MEM)
10692 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10694 XEXP (SET_DEST (set), 0) = temp;
10697 else if (GET_CODE (real) == PARALLEL)
10700 for (i = 0; i < XVECLEN (real, 0); i++)
10701 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10703 rtx set = XVECEXP (real, 0, i);
10705 temp = simplify_rtx (SET_SRC (set));
10707 SET_SRC (set) = temp;
10708 temp = simplify_rtx (SET_DEST (set));
10710 SET_DEST (set) = temp;
10711 if (GET_CODE (SET_DEST (set)) == MEM)
10713 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10715 XEXP (SET_DEST (set), 0) = temp;
10717 RTX_FRAME_RELATED_P (set) = 1;
10724 real = spe_synthesize_frame_save (real);
10726 RTX_FRAME_RELATED_P (insn) = 1;
10727 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10732 /* Given an SPE frame note, return a PARALLEL of SETs with the
10733 original note, plus a synthetic register save. */
10736 spe_synthesize_frame_save (rtx real)
10738 rtx synth, offset, reg, real2;
10740 if (GET_CODE (real) != SET
10741 || GET_MODE (SET_SRC (real)) != V2SImode)
10744 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10745 frame related note. The parallel contains a set of the register
10746 being saved, and another set to a synthetic register (n+1200).
10747 This is so we can differentiate between 64-bit and 32-bit saves.
10748 Words cannot describe this nastiness. */
10750 if (GET_CODE (SET_DEST (real)) != MEM
10751 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10752 || GET_CODE (SET_SRC (real)) != REG)
10756 (set (mem (plus (reg x) (const y)))
10759 (set (mem (plus (reg x) (const y+4)))
10763 real2 = copy_rtx (real);
10764 PUT_MODE (SET_DEST (real2), SImode);
10765 reg = SET_SRC (real2);
10766 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10767 synth = copy_rtx (real2);
10769 if (BYTES_BIG_ENDIAN)
10771 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10772 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10775 reg = SET_SRC (synth);
10777 synth = replace_rtx (synth, reg,
10778 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10780 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10781 synth = replace_rtx (synth, offset,
10782 GEN_INT (INTVAL (offset)
10783 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10785 RTX_FRAME_RELATED_P (synth) = 1;
10786 RTX_FRAME_RELATED_P (real2) = 1;
10787 if (BYTES_BIG_ENDIAN)
10788 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10790 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10795 /* Returns an insn that has a vrsave set operation with the
10796 appropriate CLOBBERs. */
10799 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
10802 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10803 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10806 = gen_rtx_SET (VOIDmode,
10808 gen_rtx_UNSPEC_VOLATILE (SImode,
10809 gen_rtvec (2, reg, vrsave),
10814 /* We need to clobber the registers in the mask so the scheduler
10815 does not move sets to VRSAVE before sets of AltiVec registers.
10817 However, if the function receives nonlocal gotos, reload will set
10818 all call saved registers live. We will end up with:
10820 (set (reg 999) (mem))
10821 (parallel [ (set (reg vrsave) (unspec blah))
10822 (clobber (reg 999))])
10824 The clobber will cause the store into reg 999 to be dead, and
10825 flow will attempt to delete an epilogue insn. In this case, we
10826 need an unspec use/set of the register. */
10828 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10829 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10831 if (!epiloguep || call_used_regs [i])
10832 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10833 gen_rtx_REG (V4SImode, i));
10836 rtx reg = gen_rtx_REG (V4SImode, i);
10839 = gen_rtx_SET (VOIDmode,
10841 gen_rtx_UNSPEC (V4SImode,
10842 gen_rtvec (1, reg), 27));
10846 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10848 for (i = 0; i < nclobs; ++i)
10849 XVECEXP (insn, 0, i) = clobs[i];
10854 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10855 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10858 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
10859 unsigned int regno, int offset, int total_size)
10861 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10862 rtx replacea, replaceb;
10864 int_rtx = GEN_INT (offset);
10866 /* Some cases that need register indexed addressing. */
10867 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10869 && SPE_VECTOR_MODE (mode)
10870 && !SPE_CONST_OFFSET_OK (offset)))
10872 /* Whomever calls us must make sure r11 is available in the
10873 flow path of instructions in the prologue. */
10874 offset_rtx = gen_rtx_REG (Pmode, 11);
10875 emit_move_insn (offset_rtx, int_rtx);
10877 replacea = offset_rtx;
10878 replaceb = int_rtx;
10882 offset_rtx = int_rtx;
10883 replacea = NULL_RTX;
10884 replaceb = NULL_RTX;
10887 reg = gen_rtx_REG (mode, regno);
10888 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10889 mem = gen_rtx_MEM (mode, addr);
10890 set_mem_alias_set (mem, rs6000_sr_alias_set);
10892 insn = emit_move_insn (mem, reg);
10894 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10897 /* Emit an offset memory reference suitable for a frame store, while
10898 converting to a valid addressing mode. */
10901 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
10903 rtx int_rtx, offset_rtx;
10905 int_rtx = GEN_INT (offset);
10907 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10909 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10910 emit_move_insn (offset_rtx, int_rtx);
10913 offset_rtx = int_rtx;
10915 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10918 /* Emit function prologue as insns. */
10921 rs6000_emit_prologue ()
10923 rs6000_stack_t *info = rs6000_stack_info ();
10924 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10925 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10926 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10927 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10928 rtx frame_reg_rtx = sp_reg_rtx;
10929 rtx cr_save_rtx = NULL;
10931 int saving_FPRs_inline;
10932 int using_store_multiple;
10933 HOST_WIDE_INT sp_offset = 0;
10935 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10937 reg_mode = V2SImode;
10941 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10942 && (!TARGET_SPE_ABI
10943 || info->spe_64bit_regs_used == 0)
10944 && info->first_gp_reg_save < 31);
10945 saving_FPRs_inline = (info->first_fp_reg_save == 64
10946 || FP_SAVE_INLINE (info->first_fp_reg_save)
10947 || current_function_calls_eh_return
10948 || cfun->machine->ra_need_lr);
10950 /* For V.4, update stack before we do any saving and set back pointer. */
10952 && (DEFAULT_ABI == ABI_V4
10953 || current_function_calls_eh_return))
10955 if (info->total_size < 32767)
10956 sp_offset = info->total_size;
10958 frame_reg_rtx = frame_ptr_rtx;
10959 rs6000_emit_allocate_stack (info->total_size,
10960 (frame_reg_rtx != sp_reg_rtx
10961 && (info->cr_save_p
10963 || info->first_fp_reg_save < 64
10964 || info->first_gp_reg_save < 32
10966 if (frame_reg_rtx != sp_reg_rtx)
10967 rs6000_emit_stack_tie ();
10970 /* Save AltiVec registers if needed. */
10971 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10975 /* There should be a non inline version of this, for when we
10976 are saving lots of vector registers. */
10977 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10978 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10980 rtx areg, savereg, mem;
10983 offset = info->altivec_save_offset + sp_offset
10984 + 16 * (i - info->first_altivec_reg_save);
10986 savereg = gen_rtx_REG (V4SImode, i);
10988 areg = gen_rtx_REG (Pmode, 0);
10989 emit_move_insn (areg, GEN_INT (offset));
10991 /* AltiVec addressing mode is [reg+reg]. */
10992 mem = gen_rtx_MEM (V4SImode,
10993 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10995 set_mem_alias_set (mem, rs6000_sr_alias_set);
10997 insn = emit_move_insn (mem, savereg);
10999 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11000 areg, GEN_INT (offset));
11004 /* VRSAVE is a bit vector representing which AltiVec registers
11005 are used. The OS uses this to determine which vector
11006 registers to save on a context switch. We need to save
11007 VRSAVE on the stack frame, add whatever AltiVec registers we
11008 used in this function, and do the corresponding magic in the
11011 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11013 rtx reg, mem, vrsave;
11016 /* Get VRSAVE onto a GPR. */
11017 reg = gen_rtx_REG (SImode, 12);
11018 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11020 emit_insn (gen_get_vrsave_internal (reg));
11022 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11025 offset = info->vrsave_save_offset + sp_offset;
11027 = gen_rtx_MEM (SImode,
11028 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11029 set_mem_alias_set (mem, rs6000_sr_alias_set);
11030 insn = emit_move_insn (mem, reg);
11032 /* Include the registers in the mask. */
11033 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11035 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11038 /* If we use the link register, get it into r0. */
11039 if (info->lr_save_p)
11040 emit_move_insn (gen_rtx_REG (Pmode, 0),
11041 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11043 /* If we need to save CR, put it into r12. */
11044 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11046 cr_save_rtx = gen_rtx_REG (SImode, 12);
11047 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11050 /* Do any required saving of fpr's. If only one or two to save, do
11051 it ourselves. Otherwise, call function. */
11052 if (saving_FPRs_inline)
11055 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11056 if ((regs_ever_live[info->first_fp_reg_save+i]
11057 && ! call_used_regs[info->first_fp_reg_save+i]))
11058 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11059 info->first_fp_reg_save + i,
11060 info->fp_save_offset + sp_offset + 8 * i,
11063 else if (info->first_fp_reg_save != 64)
11067 const char *alloc_rname;
11069 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11071 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11072 gen_rtx_REG (Pmode,
11073 LINK_REGISTER_REGNUM));
11074 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11075 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11076 alloc_rname = ggc_strdup (rname);
11077 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11078 gen_rtx_SYMBOL_REF (Pmode,
11080 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11082 rtx addr, reg, mem;
11083 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11084 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11085 GEN_INT (info->fp_save_offset
11086 + sp_offset + 8*i));
11087 mem = gen_rtx_MEM (DFmode, addr);
11088 set_mem_alias_set (mem, rs6000_sr_alias_set);
11090 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11092 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11093 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11094 NULL_RTX, NULL_RTX);
11097 /* Save GPRs. This is done as a PARALLEL if we are using
11098 the store-multiple instructions. */
11099 if (using_store_multiple)
11103 p = rtvec_alloc (32 - info->first_gp_reg_save);
11104 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11106 rtx addr, reg, mem;
11107 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11108 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11109 GEN_INT (info->gp_save_offset
11112 mem = gen_rtx_MEM (reg_mode, addr);
11113 set_mem_alias_set (mem, rs6000_sr_alias_set);
11115 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11117 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11118 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11119 NULL_RTX, NULL_RTX);
11124 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11125 if ((regs_ever_live[info->first_gp_reg_save+i]
11126 && ! call_used_regs[info->first_gp_reg_save+i])
11127 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11128 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11129 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11131 rtx addr, reg, mem;
11132 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11134 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11136 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11139 if (!SPE_CONST_OFFSET_OK (offset))
11141 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11142 emit_move_insn (b, GEN_INT (offset));
11145 b = GEN_INT (offset);
11147 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11148 mem = gen_rtx_MEM (V2SImode, addr);
11149 set_mem_alias_set (mem, rs6000_sr_alias_set);
11150 insn = emit_move_insn (mem, reg);
11152 if (GET_CODE (b) == CONST_INT)
11153 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11154 NULL_RTX, NULL_RTX);
11156 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11157 b, GEN_INT (offset));
11161 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11162 GEN_INT (info->gp_save_offset
11165 mem = gen_rtx_MEM (reg_mode, addr);
11166 set_mem_alias_set (mem, rs6000_sr_alias_set);
11168 insn = emit_move_insn (mem, reg);
11169 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11170 NULL_RTX, NULL_RTX);
11175 /* ??? There's no need to emit actual instructions here, but it's the
11176 easiest way to get the frame unwind information emitted. */
11177 if (current_function_calls_eh_return)
11179 unsigned int i, regno;
11181 /* In AIX ABI we need to pretend we save r2 here. */
11184 rtx addr, reg, mem;
11186 reg = gen_rtx_REG (reg_mode, 2);
11187 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11188 GEN_INT (sp_offset + 5 * reg_size));
11189 mem = gen_rtx_MEM (reg_mode, addr);
11190 set_mem_alias_set (mem, rs6000_sr_alias_set);
11192 insn = emit_move_insn (mem, reg);
11193 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11194 NULL_RTX, NULL_RTX);
11195 PATTERN (insn) = gen_blockage ();
11200 regno = EH_RETURN_DATA_REGNO (i);
11201 if (regno == INVALID_REGNUM)
11204 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11205 info->ehrd_offset + sp_offset
11206 + reg_size * (int) i,
11211 /* Save lr if we used it. */
11212 if (info->lr_save_p)
11214 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11215 GEN_INT (info->lr_save_offset + sp_offset));
11216 rtx reg = gen_rtx_REG (Pmode, 0);
11217 rtx mem = gen_rtx_MEM (Pmode, addr);
11218 /* This should not be of rs6000_sr_alias_set, because of
11219 __builtin_return_address. */
11221 insn = emit_move_insn (mem, reg);
11222 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11223 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11226 /* Save CR if we use any that must be preserved. */
11227 if (info->cr_save_p)
11229 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11230 GEN_INT (info->cr_save_offset + sp_offset));
11231 rtx mem = gen_rtx_MEM (SImode, addr);
11233 set_mem_alias_set (mem, rs6000_sr_alias_set);
11235 /* If r12 was used to hold the original sp, copy cr into r0 now
11237 if (REGNO (frame_reg_rtx) == 12)
11239 cr_save_rtx = gen_rtx_REG (SImode, 0);
11240 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11242 insn = emit_move_insn (mem, cr_save_rtx);
11244 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11245 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11246 But that's OK. All we have to do is specify that _one_ condition
11247 code register is saved in this stack slot. The thrower's epilogue
11248 will then restore all the call-saved registers.
11249 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11250 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11251 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11254 /* Update stack and set back pointer unless this is V.4,
11255 for which it was done previously. */
11257 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
11258 rs6000_emit_allocate_stack (info->total_size, FALSE);
11260 /* Set frame pointer, if needed. */
11261 if (frame_pointer_needed)
11263 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11265 RTX_FRAME_RELATED_P (insn) = 1;
11268 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11269 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11270 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11271 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11273 /* If emit_load_toc_table will use the link register, we need to save
11274 it. We use R12 for this purpose because emit_load_toc_table
11275 can use register 0. This allows us to use a plain 'blr' to return
11276 from the procedure more often. */
11277 int save_LR_around_toc_setup = (TARGET_ELF
11278 && DEFAULT_ABI != ABI_AIX
11280 && ! info->lr_save_p
11281 && EXIT_BLOCK_PTR->pred != NULL);
11282 if (save_LR_around_toc_setup)
11284 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11285 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11286 rs6000_emit_load_toc_table (TRUE);
11287 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11290 rs6000_emit_load_toc_table (TRUE);
11294 if (DEFAULT_ABI == ABI_DARWIN
11295 && flag_pic && current_function_uses_pic_offset_table)
11297 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11298 const char *picbase = machopic_function_base_name ();
11299 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11301 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11303 rs6000_maybe_dead (
11304 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11305 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11310 /* Write function prologue. */
11313 rs6000_output_function_prologue (FILE *file,
11314 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
11316 rs6000_stack_t *info = rs6000_stack_info ();
11318 if (TARGET_DEBUG_STACK)
11319 debug_stack_info (info);
11321 /* Write .extern for any function we will call to save and restore
11323 if (info->first_fp_reg_save < 64
11324 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11325 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11326 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11327 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11328 RESTORE_FP_SUFFIX);
11330 /* Write .extern for AIX common mode routines, if needed. */
11331 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11333 fputs ("\t.extern __mulh\n", file);
11334 fputs ("\t.extern __mull\n", file);
11335 fputs ("\t.extern __divss\n", file);
11336 fputs ("\t.extern __divus\n", file);
11337 fputs ("\t.extern __quoss\n", file);
11338 fputs ("\t.extern __quous\n", file);
11339 common_mode_defined = 1;
11342 if (! HAVE_prologue)
11346 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11347 the "toplevel" insn chain. */
11348 emit_note (NOTE_INSN_DELETED);
11349 rs6000_emit_prologue ();
11350 emit_note (NOTE_INSN_DELETED);
11352 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11356 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11358 INSN_ADDRESSES_NEW (insn, addr);
11363 if (TARGET_DEBUG_STACK)
11364 debug_rtx_list (get_insns (), 100);
11365 final (get_insns (), file, FALSE, FALSE);
11369 rs6000_pic_labelno++;
11372 /* Emit function epilogue as insns.
11374 At present, dwarf2out_frame_debug_expr doesn't understand
11375 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11376 anywhere in the epilogue. Most of the insns below would in any case
11377 need special notes to explain where r11 is in relation to the stack. */
11380 rs6000_emit_epilogue (int sibcall)
11382 rs6000_stack_t *info;
11383 int restoring_FPRs_inline;
11384 int using_load_multiple;
11385 int using_mfcr_multiple;
11386 int use_backchain_to_restore_sp;
11388 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11389 rtx frame_reg_rtx = sp_reg_rtx;
11390 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11391 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11394 info = rs6000_stack_info ();
11396 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11398 reg_mode = V2SImode;
11402 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11403 && (!TARGET_SPE_ABI
11404 || info->spe_64bit_regs_used == 0)
11405 && info->first_gp_reg_save < 31);
11406 restoring_FPRs_inline = (sibcall
11407 || current_function_calls_eh_return
11408 || info->first_fp_reg_save == 64
11409 || FP_SAVE_INLINE (info->first_fp_reg_save));
11410 use_backchain_to_restore_sp = (frame_pointer_needed
11411 || current_function_calls_alloca
11412 || info->total_size > 32767);
11413 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11414 || rs6000_cpu == PROCESSOR_PPC603
11415 || rs6000_cpu == PROCESSOR_PPC750
11418 /* If we have a frame pointer, a call to alloca, or a large stack
11419 frame, restore the old stack pointer using the backchain. Otherwise,
11420 we know what size to update it with. */
11421 if (use_backchain_to_restore_sp)
11423 /* Under V.4, don't reset the stack pointer until after we're done
11424 loading the saved registers. */
11425 if (DEFAULT_ABI == ABI_V4)
11426 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11428 emit_move_insn (frame_reg_rtx,
11429 gen_rtx_MEM (Pmode, sp_reg_rtx));
11432 else if (info->push_p)
11434 if (DEFAULT_ABI == ABI_V4
11435 || current_function_calls_eh_return)
11436 sp_offset = info->total_size;
11439 emit_insn (TARGET_32BIT
11440 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11441 GEN_INT (info->total_size))
11442 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11443 GEN_INT (info->total_size)));
11447 /* Restore AltiVec registers if needed. */
11448 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11452 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11453 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11455 rtx addr, areg, mem;
11457 areg = gen_rtx_REG (Pmode, 0);
11459 (areg, GEN_INT (info->altivec_save_offset
11461 + 16 * (i - info->first_altivec_reg_save)));
11463 /* AltiVec addressing mode is [reg+reg]. */
11464 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11465 mem = gen_rtx_MEM (V4SImode, addr);
11466 set_mem_alias_set (mem, rs6000_sr_alias_set);
11468 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11472 /* Restore VRSAVE if needed. */
11473 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11475 rtx addr, mem, reg;
11477 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11478 GEN_INT (info->vrsave_save_offset + sp_offset));
11479 mem = gen_rtx_MEM (SImode, addr);
11480 set_mem_alias_set (mem, rs6000_sr_alias_set);
11481 reg = gen_rtx_REG (SImode, 12);
11482 emit_move_insn (reg, mem);
11484 emit_insn (generate_set_vrsave (reg, info, 1));
11487 /* Get the old lr if we saved it. */
11488 if (info->lr_save_p)
11490 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11491 info->lr_save_offset + sp_offset);
11493 set_mem_alias_set (mem, rs6000_sr_alias_set);
11495 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11498 /* Get the old cr if we saved it. */
11499 if (info->cr_save_p)
11501 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11502 GEN_INT (info->cr_save_offset + sp_offset));
11503 rtx mem = gen_rtx_MEM (SImode, addr);
11505 set_mem_alias_set (mem, rs6000_sr_alias_set);
11507 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11510 /* Set LR here to try to overlap restores below. */
11511 if (info->lr_save_p)
11512 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11513 gen_rtx_REG (Pmode, 0));
11515 /* Load exception handler data registers, if needed. */
11516 if (current_function_calls_eh_return)
11518 unsigned int i, regno;
11522 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11523 GEN_INT (sp_offset + 5 * reg_size));
11524 rtx mem = gen_rtx_MEM (reg_mode, addr);
11526 set_mem_alias_set (mem, rs6000_sr_alias_set);
11528 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
11535 regno = EH_RETURN_DATA_REGNO (i);
11536 if (regno == INVALID_REGNUM)
11539 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11540 info->ehrd_offset + sp_offset
11541 + reg_size * (int) i);
11542 set_mem_alias_set (mem, rs6000_sr_alias_set);
11544 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11548 /* Restore GPRs. This is done as a PARALLEL if we are using
11549 the load-multiple instructions. */
11550 if (using_load_multiple)
11553 p = rtvec_alloc (32 - info->first_gp_reg_save);
11554 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11556 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11557 GEN_INT (info->gp_save_offset
11560 rtx mem = gen_rtx_MEM (reg_mode, addr);
11562 set_mem_alias_set (mem, rs6000_sr_alias_set);
11565 gen_rtx_SET (VOIDmode,
11566 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11569 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11572 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11573 if ((regs_ever_live[info->first_gp_reg_save+i]
11574 && ! call_used_regs[info->first_gp_reg_save+i])
11575 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11576 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11577 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11579 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11580 GEN_INT (info->gp_save_offset
11583 rtx mem = gen_rtx_MEM (reg_mode, addr);
11585 /* Restore 64-bit quantities for SPE. */
11586 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11588 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11591 if (!SPE_CONST_OFFSET_OK (offset))
11593 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11594 emit_move_insn (b, GEN_INT (offset));
11597 b = GEN_INT (offset);
11599 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11600 mem = gen_rtx_MEM (V2SImode, addr);
11603 set_mem_alias_set (mem, rs6000_sr_alias_set);
11605 emit_move_insn (gen_rtx_REG (reg_mode,
11606 info->first_gp_reg_save + i), mem);
11609 /* Restore fpr's if we need to do it without calling a function. */
11610 if (restoring_FPRs_inline)
11611 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11612 if ((regs_ever_live[info->first_fp_reg_save+i]
11613 && ! call_used_regs[info->first_fp_reg_save+i]))
11616 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11617 GEN_INT (info->fp_save_offset
11620 mem = gen_rtx_MEM (DFmode, addr);
11621 set_mem_alias_set (mem, rs6000_sr_alias_set);
11623 emit_move_insn (gen_rtx_REG (DFmode,
11624 info->first_fp_reg_save + i),
11628 /* If we saved cr, restore it here. Just those that were used. */
11629 if (info->cr_save_p)
11631 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11634 if (using_mfcr_multiple)
11636 for (i = 0; i < 8; i++)
11637 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11643 if (using_mfcr_multiple && count > 1)
11648 p = rtvec_alloc (count);
11651 for (i = 0; i < 8; i++)
11652 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11654 rtvec r = rtvec_alloc (2);
11655 RTVEC_ELT (r, 0) = r12_rtx;
11656 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11657 RTVEC_ELT (p, ndx) =
11658 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11659 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11662 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11667 for (i = 0; i < 8; i++)
11668 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11670 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11676 /* If this is V.4, unwind the stack pointer after all of the loads
11677 have been done. We need to emit a block here so that sched
11678 doesn't decide to move the sp change before the register restores
11679 (which may not have any obvious dependency on the stack). This
11680 doesn't hurt performance, because there is no scheduling that can
11681 be done after this point. */
11682 if (DEFAULT_ABI == ABI_V4
11683 || current_function_calls_eh_return)
11685 if (frame_reg_rtx != sp_reg_rtx)
11686 rs6000_emit_stack_tie ();
11688 if (use_backchain_to_restore_sp)
11690 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11692 else if (sp_offset != 0)
11694 emit_insn (TARGET_32BIT
11695 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11696 GEN_INT (sp_offset))
11697 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11698 GEN_INT (sp_offset)));
11702 if (current_function_calls_eh_return)
11704 rtx sa = EH_RETURN_STACKADJ_RTX;
11705 emit_insn (TARGET_32BIT
11706 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11707 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11713 if (! restoring_FPRs_inline)
11714 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11716 p = rtvec_alloc (2);
11718 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11719 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11720 gen_rtx_REG (Pmode,
11721 LINK_REGISTER_REGNUM));
11723 /* If we have to restore more than two FP registers, branch to the
11724 restore function. It will return to our caller. */
11725 if (! restoring_FPRs_inline)
11729 const char *alloc_rname;
11731 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11732 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11733 alloc_rname = ggc_strdup (rname);
11734 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11735 gen_rtx_SYMBOL_REF (Pmode,
11738 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11741 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11742 GEN_INT (info->fp_save_offset + 8*i));
11743 mem = gen_rtx_MEM (DFmode, addr);
11744 set_mem_alias_set (mem, rs6000_sr_alias_set);
11746 RTVEC_ELT (p, i+3) =
11747 gen_rtx_SET (VOIDmode,
11748 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11753 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11757 /* Write function epilogue. */
11760 rs6000_output_function_epilogue (FILE *file,
11761 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
11763 rs6000_stack_t *info = rs6000_stack_info ();
11765 if (! HAVE_epilogue)
11767 rtx insn = get_last_insn ();
11768 /* If the last insn was a BARRIER, we don't have to write anything except
11769 the trace table. */
11770 if (GET_CODE (insn) == NOTE)
11771 insn = prev_nonnote_insn (insn);
11772 if (insn == 0 || GET_CODE (insn) != BARRIER)
11774 /* This is slightly ugly, but at least we don't have two
11775 copies of the epilogue-emitting code. */
11778 /* A NOTE_INSN_DELETED is supposed to be at the start
11779 and end of the "toplevel" insn chain. */
11780 emit_note (NOTE_INSN_DELETED);
11781 rs6000_emit_epilogue (FALSE);
11782 emit_note (NOTE_INSN_DELETED);
11784 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11788 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11790 INSN_ADDRESSES_NEW (insn, addr);
11795 if (TARGET_DEBUG_STACK)
11796 debug_rtx_list (get_insns (), 100);
11797 final (get_insns (), file, FALSE, FALSE);
11802 #if TARGET_OBJECT_FORMAT == OBJECT_MACHO
11803 /* Mach-O doesn't support labels at the end of objects, so if
11804 it looks like we might want one, insert a NOP. */
11806 rtx insn = get_last_insn ();
11809 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
11810 insn = PREV_INSN (insn);
11814 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
11815 fputs ("\tnop\n", file);
11819 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11822 We don't output a traceback table if -finhibit-size-directive was
11823 used. The documentation for -finhibit-size-directive reads
11824 ``don't output a @code{.size} assembler directive, or anything
11825 else that would cause trouble if the function is split in the
11826 middle, and the two halves are placed at locations far apart in
11827 memory.'' The traceback table has this property, since it
11828 includes the offset from the start of the function to the
11829 traceback table itself.
11831 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11832 different traceback table. */
11833 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11834 && rs6000_traceback != traceback_none)
11836 const char *fname = NULL;
11837 const char *language_string = lang_hooks.name;
11838 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11840 int optional_tbtab;
11842 if (rs6000_traceback == traceback_full)
11843 optional_tbtab = 1;
11844 else if (rs6000_traceback == traceback_part)
11845 optional_tbtab = 0;
11847 optional_tbtab = !optimize_size && !TARGET_ELF;
11849 if (optional_tbtab)
11851 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11852 while (*fname == '.') /* V.4 encodes . in the name */
11855 /* Need label immediately before tbtab, so we can compute
11856 its offset from the function start. */
11857 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11858 ASM_OUTPUT_LABEL (file, fname);
11861 /* The .tbtab pseudo-op can only be used for the first eight
11862 expressions, since it can't handle the possibly variable
11863 length fields that follow. However, if you omit the optional
11864 fields, the assembler outputs zeros for all optional fields
11865 anyways, giving each variable length field is minimum length
11866 (as defined in sys/debug.h). Thus we can not use the .tbtab
11867 pseudo-op at all. */
11869 /* An all-zero word flags the start of the tbtab, for debuggers
11870 that have to find it by searching forward from the entry
11871 point or from the current pc. */
11872 fputs ("\t.long 0\n", file);
11874 /* Tbtab format type. Use format type 0. */
11875 fputs ("\t.byte 0,", file);
11877 /* Language type. Unfortunately, there doesn't seem to be any
11878 official way to get this info, so we use language_string. C
11879 is 0. C++ is 9. No number defined for Obj-C, so use the
11880 value for C for now. There is no official value for Java,
11881 although IBM appears to be using 13. There is no official value
11882 for Chill, so we've chosen 44 pseudo-randomly. */
11883 if (! strcmp (language_string, "GNU C")
11884 || ! strcmp (language_string, "GNU Objective-C"))
11886 else if (! strcmp (language_string, "GNU F77"))
11888 else if (! strcmp (language_string, "GNU Ada"))
11890 else if (! strcmp (language_string, "GNU Pascal"))
11892 else if (! strcmp (language_string, "GNU C++"))
11894 else if (! strcmp (language_string, "GNU Java"))
11896 else if (! strcmp (language_string, "GNU CHILL"))
11900 fprintf (file, "%d,", i);
11902 /* 8 single bit fields: global linkage (not set for C extern linkage,
11903 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11904 from start of procedure stored in tbtab, internal function, function
11905 has controlled storage, function has no toc, function uses fp,
11906 function logs/aborts fp operations. */
11907 /* Assume that fp operations are used if any fp reg must be saved. */
11908 fprintf (file, "%d,",
11909 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11911 /* 6 bitfields: function is interrupt handler, name present in
11912 proc table, function calls alloca, on condition directives
11913 (controls stack walks, 3 bits), saves condition reg, saves
11915 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11916 set up as a frame pointer, even when there is no alloca call. */
11917 fprintf (file, "%d,",
11918 ((optional_tbtab << 6)
11919 | ((optional_tbtab & frame_pointer_needed) << 5)
11920 | (info->cr_save_p << 1)
11921 | (info->lr_save_p)));
11923 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11925 fprintf (file, "%d,",
11926 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11928 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11929 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11931 if (optional_tbtab)
11933 /* Compute the parameter info from the function decl argument
11936 int next_parm_info_bit = 31;
11938 for (decl = DECL_ARGUMENTS (current_function_decl);
11939 decl; decl = TREE_CHAIN (decl))
11941 rtx parameter = DECL_INCOMING_RTL (decl);
11942 enum machine_mode mode = GET_MODE (parameter);
11944 if (GET_CODE (parameter) == REG)
11946 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11952 if (mode == SFmode)
11954 else if (mode == DFmode || mode == TFmode)
11959 /* If only one bit will fit, don't or in this entry. */
11960 if (next_parm_info_bit > 0)
11961 parm_info |= (bits << (next_parm_info_bit - 1));
11962 next_parm_info_bit -= 2;
11966 fixed_parms += ((GET_MODE_SIZE (mode)
11967 + (UNITS_PER_WORD - 1))
11969 next_parm_info_bit -= 1;
11975 /* Number of fixed point parameters. */
11976 /* This is actually the number of words of fixed point parameters; thus
11977 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11978 fprintf (file, "%d,", fixed_parms);
11980 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11982 /* This is actually the number of fp registers that hold parameters;
11983 and thus the maximum value is 13. */
11984 /* Set parameters on stack bit if parameters are not in their original
11985 registers, regardless of whether they are on the stack? Xlc
11986 seems to set the bit when not optimizing. */
11987 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11989 if (! optional_tbtab)
11992 /* Optional fields follow. Some are variable length. */
11994 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11995 11 double float. */
11996 /* There is an entry for each parameter in a register, in the order that
11997 they occur in the parameter list. Any intervening arguments on the
11998 stack are ignored. If the list overflows a long (max possible length
11999 34 bits) then completely leave off all elements that don't fit. */
12000 /* Only emit this long if there was at least one parameter. */
12001 if (fixed_parms || float_parms)
12002 fprintf (file, "\t.long %d\n", parm_info);
12004 /* Offset from start of code to tb table. */
12005 fputs ("\t.long ", file);
12006 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12008 RS6000_OUTPUT_BASENAME (file, fname);
12010 assemble_name (file, fname);
12012 fputs ("-.", file);
12014 RS6000_OUTPUT_BASENAME (file, fname);
12016 assemble_name (file, fname);
12020 /* Interrupt handler mask. */
12021 /* Omit this long, since we never set the interrupt handler bit
12024 /* Number of CTL (controlled storage) anchors. */
12025 /* Omit this long, since the has_ctl bit is never set above. */
12027 /* Displacement into stack of each CTL anchor. */
12028 /* Omit this list of longs, because there are no CTL anchors. */
12030 /* Length of function name. */
12033 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12035 /* Function name. */
12036 assemble_string (fname, strlen (fname));
12038 /* Register for alloca automatic storage; this is always reg 31.
12039 Only emit this if the alloca bit was set above. */
12040 if (frame_pointer_needed)
12041 fputs ("\t.byte 31\n", file);
12043 fputs ("\t.align 2\n", file);
12047 /* A C compound statement that outputs the assembler code for a thunk
12048 function, used to implement C++ virtual function calls with
12049 multiple inheritance. The thunk acts as a wrapper around a virtual
12050 function, adjusting the implicit object parameter before handing
12051 control off to the real function.
12053 First, emit code to add the integer DELTA to the location that
12054 contains the incoming first argument. Assume that this argument
12055 contains a pointer, and is the one used to pass the `this' pointer
12056 in C++. This is the incoming argument *before* the function
12057 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12058 values of all other incoming arguments.
12060 After the addition, emit code to jump to FUNCTION, which is a
12061 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12062 not touch the return address. Hence returning from FUNCTION will
12063 return to whoever called the current `thunk'.
12065 The effect must be as if FUNCTION had been called directly with the
12066 adjusted first argument. This macro is responsible for emitting
12067 all of the code for a thunk function; output_function_prologue()
12068 and output_function_epilogue() are not invoked.
12070 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12071 been extracted from it.) It might possibly be useful on some
12072 targets, but probably not.
12074 If you do not define this macro, the target-independent code in the
12075 C++ frontend will generate a less efficient heavyweight thunk that
12076 calls FUNCTION instead of jumping to it. The generic approach does
12077 not support varargs. */
12080 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
12081 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12084 rtx this, insn, funexp;
12086 reload_completed = 1;
12087 epilogue_completed = 1;
12088 no_new_pseudos = 1;
12090 /* Mark the end of the (empty) prologue. */
12091 emit_note (NOTE_INSN_PROLOGUE_END);
12093 /* Find the "this" pointer. If the function returns a structure,
12094 the structure return pointer is in r3. */
12095 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12096 this = gen_rtx_REG (Pmode, 4);
12098 this = gen_rtx_REG (Pmode, 3);
12100 /* Apply the constant offset, if required. */
12103 rtx delta_rtx = GEN_INT (delta);
12104 emit_insn (TARGET_32BIT
12105 ? gen_addsi3 (this, this, delta_rtx)
12106 : gen_adddi3 (this, this, delta_rtx));
12109 /* Apply the offset from the vtable, if required. */
12112 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12113 rtx tmp = gen_rtx_REG (Pmode, 12);
12115 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12116 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12118 emit_insn (TARGET_32BIT
12119 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12120 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12121 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12125 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12127 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12129 emit_insn (TARGET_32BIT
12130 ? gen_addsi3 (this, this, tmp)
12131 : gen_adddi3 (this, this, tmp));
12134 /* Generate a tail call to the target function. */
12135 if (!TREE_USED (function))
12137 assemble_external (function);
12138 TREE_USED (function) = 1;
12140 funexp = XEXP (DECL_RTL (function), 0);
12141 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12144 if (MACHOPIC_INDIRECT)
12145 funexp = machopic_indirect_call_target (funexp);
12148 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12149 generate sibcall RTL explicitly to avoid constraint abort. */
12150 insn = emit_call_insn (
12151 gen_rtx_PARALLEL (VOIDmode,
12153 gen_rtx_CALL (VOIDmode,
12154 funexp, const0_rtx),
12155 gen_rtx_USE (VOIDmode, const0_rtx),
12156 gen_rtx_USE (VOIDmode,
12157 gen_rtx_REG (SImode,
12158 LINK_REGISTER_REGNUM)),
12159 gen_rtx_RETURN (VOIDmode))));
12160 SIBLING_CALL_P (insn) = 1;
12163 /* Run just enough of rest_of_compilation to get the insns emitted.
12164 There's not really enough bulk here to make other passes such as
12165 instruction scheduling worth while. Note that use_thunk calls
12166 assemble_start_function and assemble_end_function. */
12167 insn = get_insns ();
12168 insn_locators_initialize ();
12169 shorten_branches (insn);
12170 final_start_function (insn, file, 1);
12171 final (insn, file, 1, 0);
12172 final_end_function ();
12174 reload_completed = 0;
12175 epilogue_completed = 0;
12176 no_new_pseudos = 0;
12179 /* A quick summary of the various types of 'constant-pool tables'
12182 Target Flags Name One table per
12183 AIX (none) AIX TOC object file
12184 AIX -mfull-toc AIX TOC object file
12185 AIX -mminimal-toc AIX minimal TOC translation unit
12186 SVR4/EABI (none) SVR4 SDATA object file
12187 SVR4/EABI -fpic SVR4 pic object file
12188 SVR4/EABI -fPIC SVR4 PIC translation unit
12189 SVR4/EABI -mrelocatable EABI TOC function
12190 SVR4/EABI -maix AIX TOC object file
12191 SVR4/EABI -maix -mminimal-toc
12192 AIX minimal TOC translation unit
12194 Name Reg. Set by entries contains:
12195 made by addrs? fp? sum?
12197 AIX TOC 2 crt0 as Y option option
12198 AIX minimal TOC 30 prolog gcc Y Y option
12199 SVR4 SDATA 13 crt0 gcc N Y N
12200 SVR4 pic 30 prolog ld Y not yet N
12201 SVR4 PIC 30 prolog gcc Y option option
12202 EABI TOC 30 prolog gcc Y option option
12206 /* Hash functions for the hash table. */
12209 rs6000_hash_constant (rtx k)
12211 enum rtx_code code = GET_CODE (k);
12212 enum machine_mode mode = GET_MODE (k);
12213 unsigned result = (code << 3) ^ mode;
12214 const char *format;
12217 format = GET_RTX_FORMAT (code);
12218 flen = strlen (format);
12224 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12227 if (mode != VOIDmode)
12228 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12240 for (; fidx < flen; fidx++)
12241 switch (format[fidx])
12246 const char *str = XSTR (k, fidx);
12247 len = strlen (str);
12248 result = result * 613 + len;
12249 for (i = 0; i < len; i++)
12250 result = result * 613 + (unsigned) str[i];
12255 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12259 result = result * 613 + (unsigned) XINT (k, fidx);
12262 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12263 result = result * 613 + (unsigned) XWINT (k, fidx);
12267 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12268 result = result * 613 + (unsigned) (XWINT (k, fidx)
12282 toc_hash_function (const void *hash_entry)
12284 const struct toc_hash_struct *thc =
12285 (const struct toc_hash_struct *) hash_entry;
12286 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12289 /* Compare H1 and H2 for equivalence. */
12292 toc_hash_eq (const void *h1, const void *h2)
12294 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12295 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12297 if (((const struct toc_hash_struct *) h1)->key_mode
12298 != ((const struct toc_hash_struct *) h2)->key_mode)
12301 return rtx_equal_p (r1, r2);
12304 /* These are the names given by the C++ front-end to vtables, and
12305 vtable-like objects. Ideally, this logic should not be here;
12306 instead, there should be some programmatic way of inquiring as
12307 to whether or not an object is a vtable. */
12309 #define VTABLE_NAME_P(NAME) \
12310 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12311 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12312 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12313 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12316 rs6000_output_symbol_ref (FILE *file, rtx x)
12318 /* Currently C++ toc references to vtables can be emitted before it
12319 is decided whether the vtable is public or private. If this is
12320 the case, then the linker will eventually complain that there is
12321 a reference to an unknown section. Thus, for vtables only,
12322 we emit the TOC reference to reference the symbol and not the
12324 const char *name = XSTR (x, 0);
12326 if (VTABLE_NAME_P (name))
12328 RS6000_OUTPUT_BASENAME (file, name);
12331 assemble_name (file, name);
12334 /* Output a TOC entry. We derive the entry name from what is being
12338 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
12341 const char *name = buf;
12342 const char *real_name;
12349 /* When the linker won't eliminate them, don't output duplicate
12350 TOC entries (this happens on AIX if there is any kind of TOC,
12351 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12353 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12355 struct toc_hash_struct *h;
12358 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12359 time because GGC is not initialized at that point. */
12360 if (toc_hash_table == NULL)
12361 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12362 toc_hash_eq, NULL);
12364 h = ggc_alloc (sizeof (*h));
12366 h->key_mode = mode;
12367 h->labelno = labelno;
12369 found = htab_find_slot (toc_hash_table, h, 1);
12370 if (*found == NULL)
12372 else /* This is indeed a duplicate.
12373 Set this label equal to that label. */
12375 fputs ("\t.set ", file);
12376 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12377 fprintf (file, "%d,", labelno);
12378 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12379 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12385 /* If we're going to put a double constant in the TOC, make sure it's
12386 aligned properly when strict alignment is on. */
12387 if (GET_CODE (x) == CONST_DOUBLE
12388 && STRICT_ALIGNMENT
12389 && GET_MODE_BITSIZE (mode) >= 64
12390 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12391 ASM_OUTPUT_ALIGN (file, 3);
12394 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12396 /* Handle FP constants specially. Note that if we have a minimal
12397 TOC, things we put here aren't actually in the TOC, so we can allow
12399 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12401 REAL_VALUE_TYPE rv;
12404 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12405 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12409 if (TARGET_MINIMAL_TOC)
12410 fputs (DOUBLE_INT_ASM_OP, file);
12412 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12413 k[0] & 0xffffffff, k[1] & 0xffffffff,
12414 k[2] & 0xffffffff, k[3] & 0xffffffff);
12415 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12416 k[0] & 0xffffffff, k[1] & 0xffffffff,
12417 k[2] & 0xffffffff, k[3] & 0xffffffff);
12422 if (TARGET_MINIMAL_TOC)
12423 fputs ("\t.long ", file);
12425 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12426 k[0] & 0xffffffff, k[1] & 0xffffffff,
12427 k[2] & 0xffffffff, k[3] & 0xffffffff);
12428 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12429 k[0] & 0xffffffff, k[1] & 0xffffffff,
12430 k[2] & 0xffffffff, k[3] & 0xffffffff);
12434 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12436 REAL_VALUE_TYPE rv;
12439 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12440 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12444 if (TARGET_MINIMAL_TOC)
12445 fputs (DOUBLE_INT_ASM_OP, file);
12447 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12448 k[0] & 0xffffffff, k[1] & 0xffffffff);
12449 fprintf (file, "0x%lx%08lx\n",
12450 k[0] & 0xffffffff, k[1] & 0xffffffff);
12455 if (TARGET_MINIMAL_TOC)
12456 fputs ("\t.long ", file);
12458 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12459 k[0] & 0xffffffff, k[1] & 0xffffffff);
12460 fprintf (file, "0x%lx,0x%lx\n",
12461 k[0] & 0xffffffff, k[1] & 0xffffffff);
12465 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12467 REAL_VALUE_TYPE rv;
12470 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12471 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12475 if (TARGET_MINIMAL_TOC)
12476 fputs (DOUBLE_INT_ASM_OP, file);
12478 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12479 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12484 if (TARGET_MINIMAL_TOC)
12485 fputs ("\t.long ", file);
12487 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12488 fprintf (file, "0x%lx\n", l & 0xffffffff);
12492 else if (GET_MODE (x) == VOIDmode
12493 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12495 unsigned HOST_WIDE_INT low;
12496 HOST_WIDE_INT high;
12498 if (GET_CODE (x) == CONST_DOUBLE)
12500 low = CONST_DOUBLE_LOW (x);
12501 high = CONST_DOUBLE_HIGH (x);
12504 #if HOST_BITS_PER_WIDE_INT == 32
12507 high = (low & 0x80000000) ? ~0 : 0;
12511 low = INTVAL (x) & 0xffffffff;
12512 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12516 /* TOC entries are always Pmode-sized, but since this
12517 is a bigendian machine then if we're putting smaller
12518 integer constants in the TOC we have to pad them.
12519 (This is still a win over putting the constants in
12520 a separate constant pool, because then we'd have
12521 to have both a TOC entry _and_ the actual constant.)
12523 For a 32-bit target, CONST_INT values are loaded and shifted
12524 entirely within `low' and can be stored in one TOC entry. */
12526 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12527 abort ();/* It would be easy to make this work, but it doesn't now. */
12529 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12531 #if HOST_BITS_PER_WIDE_INT == 32
12532 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12533 POINTER_SIZE, &low, &high, 0);
12536 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12537 high = (HOST_WIDE_INT) low >> 32;
12544 if (TARGET_MINIMAL_TOC)
12545 fputs (DOUBLE_INT_ASM_OP, file);
12547 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12548 (long) high & 0xffffffff, (long) low & 0xffffffff);
12549 fprintf (file, "0x%lx%08lx\n",
12550 (long) high & 0xffffffff, (long) low & 0xffffffff);
12555 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12557 if (TARGET_MINIMAL_TOC)
12558 fputs ("\t.long ", file);
12560 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12561 (long) high & 0xffffffff, (long) low & 0xffffffff);
12562 fprintf (file, "0x%lx,0x%lx\n",
12563 (long) high & 0xffffffff, (long) low & 0xffffffff);
12567 if (TARGET_MINIMAL_TOC)
12568 fputs ("\t.long ", file);
12570 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12571 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12577 if (GET_CODE (x) == CONST)
12579 if (GET_CODE (XEXP (x, 0)) != PLUS)
12582 base = XEXP (XEXP (x, 0), 0);
12583 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12586 if (GET_CODE (base) == SYMBOL_REF)
12587 name = XSTR (base, 0);
12588 else if (GET_CODE (base) == LABEL_REF)
12589 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12590 else if (GET_CODE (base) == CODE_LABEL)
12591 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12595 real_name = (*targetm.strip_name_encoding) (name);
12596 if (TARGET_MINIMAL_TOC)
12597 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12600 fprintf (file, "\t.tc %s", real_name);
12603 fprintf (file, ".N%d", - offset);
12605 fprintf (file, ".P%d", offset);
12607 fputs ("[TC],", file);
12610 /* Currently C++ toc references to vtables can be emitted before it
12611 is decided whether the vtable is public or private. If this is
12612 the case, then the linker will eventually complain that there is
12613 a TOC reference to an unknown section. Thus, for vtables only,
12614 we emit the TOC reference to reference the symbol and not the
12616 if (VTABLE_NAME_P (name))
12618 RS6000_OUTPUT_BASENAME (file, name);
12620 fprintf (file, "%d", offset);
12621 else if (offset > 0)
12622 fprintf (file, "+%d", offset);
12625 output_addr_const (file, x);
12629 /* Output an assembler pseudo-op to write an ASCII string of N characters
12630 starting at P to FILE.
12632 On the RS/6000, we have to do this using the .byte operation and
12633 write out special characters outside the quoted string.
12634 Also, the assembler is broken; very long strings are truncated,
12635 so we must artificially break them up early. */
12638 output_ascii (FILE *file, const char *p, int n)
12641 int i, count_string;
12642 const char *for_string = "\t.byte \"";
12643 const char *for_decimal = "\t.byte ";
12644 const char *to_close = NULL;
12647 for (i = 0; i < n; i++)
12650 if (c >= ' ' && c < 0177)
12653 fputs (for_string, file);
12656 /* Write two quotes to get one. */
12664 for_decimal = "\"\n\t.byte ";
12668 if (count_string >= 512)
12670 fputs (to_close, file);
12672 for_string = "\t.byte \"";
12673 for_decimal = "\t.byte ";
12681 fputs (for_decimal, file);
12682 fprintf (file, "%d", c);
12684 for_string = "\n\t.byte \"";
12685 for_decimal = ", ";
12691 /* Now close the string if we have written one. Then end the line. */
12693 fputs (to_close, file);
12696 /* Generate a unique section name for FILENAME for a section type
12697 represented by SECTION_DESC. Output goes into BUF.
12699 SECTION_DESC can be any string, as long as it is different for each
12700 possible section type.
12702 We name the section in the same manner as xlc. The name begins with an
12703 underscore followed by the filename (after stripping any leading directory
12704 names) with the last period replaced by the string SECTION_DESC. If
12705 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12709 rs6000_gen_section_name (char **buf, const char *filename,
12710 const char *section_desc)
12712 const char *q, *after_last_slash, *last_period = 0;
12716 after_last_slash = filename;
12717 for (q = filename; *q; q++)
12720 after_last_slash = q + 1;
12721 else if (*q == '.')
12725 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12726 *buf = (char *) xmalloc (len);
12731 for (q = after_last_slash; *q; q++)
12733 if (q == last_period)
12735 strcpy (p, section_desc);
12736 p += strlen (section_desc);
12740 else if (ISALNUM (*q))
12744 if (last_period == 0)
12745 strcpy (p, section_desc);
12750 /* Emit profile function. */
12753 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
12755 if (TARGET_PROFILE_KERNEL)
12758 if (DEFAULT_ABI == ABI_AIX)
12760 #ifndef NO_PROFILE_COUNTERS
12761 # define NO_PROFILE_COUNTERS 0
12763 if (NO_PROFILE_COUNTERS)
12764 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12768 const char *label_name;
12771 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12772 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12773 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12775 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12779 else if (DEFAULT_ABI == ABI_DARWIN)
12781 const char *mcount_name = RS6000_MCOUNT;
12782 int caller_addr_regno = LINK_REGISTER_REGNUM;
12784 /* Be conservative and always set this, at least for now. */
12785 current_function_uses_pic_offset_table = 1;
12788 /* For PIC code, set up a stub and collect the caller's address
12789 from r0, which is where the prologue puts it. */
12790 if (MACHOPIC_INDIRECT)
12792 mcount_name = machopic_stub_name (mcount_name);
12793 if (current_function_uses_pic_offset_table)
12794 caller_addr_regno = 0;
12797 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12799 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12803 /* Write function profiler code. */
12806 output_function_profiler (FILE *file, int labelno)
12811 switch (DEFAULT_ABI)
12820 warning ("no profiling of 64-bit code for this ABI");
12823 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12824 fprintf (file, "\tmflr %s\n", reg_names[0]);
12827 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12828 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12829 reg_names[0], save_lr, reg_names[1]);
12830 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12831 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12832 assemble_name (file, buf);
12833 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12835 else if (flag_pic > 1)
12837 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12838 reg_names[0], save_lr, reg_names[1]);
12839 /* Now, we need to get the address of the label. */
12840 fputs ("\tbl 1f\n\t.long ", file);
12841 assemble_name (file, buf);
12842 fputs ("-.\n1:", file);
12843 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12844 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12845 reg_names[0], reg_names[11]);
12846 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12847 reg_names[0], reg_names[0], reg_names[11]);
12851 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12852 assemble_name (file, buf);
12853 fputs ("@ha\n", file);
12854 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12855 reg_names[0], save_lr, reg_names[1]);
12856 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12857 assemble_name (file, buf);
12858 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12861 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12862 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12867 if (!TARGET_PROFILE_KERNEL)
12869 /* Don't do anything, done in output_profile_hook (). */
12876 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
12877 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
12879 if (current_function_needs_context)
12881 asm_fprintf (file, "\tstd %s,24(%s)\n",
12882 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12883 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12884 asm_fprintf (file, "\tld %s,24(%s)\n",
12885 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12888 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12896 rs6000_use_dfa_pipeline_interface ()
12901 /* Power4 load update and store update instructions are cracked into a
12902 load or store and an integer insn which are executed in the same cycle.
12903 Branches have their own dispatch slot which does not count against the
12904 GCC issue rate, but it changes the program flow so there are no other
12905 instructions to issue in this cycle. */
12908 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
12909 int verbose ATTRIBUTE_UNUSED,
12910 rtx insn, int more)
12912 if (GET_CODE (PATTERN (insn)) == USE
12913 || GET_CODE (PATTERN (insn)) == CLOBBER)
12916 if (rs6000_cpu == PROCESSOR_POWER4)
12918 enum attr_type type = get_attr_type (insn);
12919 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12920 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX
12921 || type == TYPE_MFCR)
12923 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12924 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12925 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
12926 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
12927 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
12928 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
12929 || type == TYPE_IDIV || type == TYPE_LDIV
12930 || type == TYPE_INSERT_WORD)
12931 return more > 2 ? more - 2 : 0;
12937 /* Adjust the cost of a scheduling dependency. Return the new cost of
12938 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12941 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
12944 if (! recog_memoized (insn))
12947 if (REG_NOTE_KIND (link) != 0)
12950 if (REG_NOTE_KIND (link) == 0)
12952 /* Data dependency; DEP_INSN writes a register that INSN reads
12953 some cycles later. */
12954 switch (get_attr_type (insn))
12957 /* Tell the first scheduling pass about the latency between
12958 a mtctr and bctr (and mtlr and br/blr). The first
12959 scheduling pass will not know about this latency since
12960 the mtctr instruction, which has the latency associated
12961 to it, will be generated by reload. */
12962 return TARGET_POWER ? 5 : 4;
12964 /* Leave some extra cycles between a compare and its
12965 dependent branch, to inhibit expensive mispredicts. */
12966 if ((rs6000_cpu_attr == CPU_PPC603
12967 || rs6000_cpu_attr == CPU_PPC604
12968 || rs6000_cpu_attr == CPU_PPC604E
12969 || rs6000_cpu_attr == CPU_PPC620
12970 || rs6000_cpu_attr == CPU_PPC630
12971 || rs6000_cpu_attr == CPU_PPC750
12972 || rs6000_cpu_attr == CPU_PPC7400
12973 || rs6000_cpu_attr == CPU_PPC7450
12974 || rs6000_cpu_attr == CPU_POWER4)
12975 && recog_memoized (dep_insn)
12976 && (INSN_CODE (dep_insn) >= 0)
12977 && (get_attr_type (dep_insn) == TYPE_CMP
12978 || get_attr_type (dep_insn) == TYPE_COMPARE
12979 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12980 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
12981 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
12982 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12983 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12984 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12989 /* Fall out to return default cost. */
12995 /* A C statement (sans semicolon) to update the integer scheduling
12996 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12997 INSN earlier, increase the priority to execute INSN later. Do not
12998 define this macro if you do not need to adjust the scheduling
12999 priorities of insns. */
13002 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
13004 /* On machines (like the 750) which have asymmetric integer units,
13005 where one integer unit can do multiply and divides and the other
13006 can't, reduce the priority of multiply/divide so it is scheduled
13007 before other integer operations. */
13010 if (! INSN_P (insn))
13013 if (GET_CODE (PATTERN (insn)) == USE)
13016 switch (rs6000_cpu_attr) {
13018 switch (get_attr_type (insn))
13025 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13026 priority, priority);
13027 if (priority >= 0 && priority < 0x01000000)
13037 /* Return how many instructions the machine can issue per cycle. */
13040 rs6000_issue_rate ()
13042 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13043 if (!reload_completed)
13046 switch (rs6000_cpu_attr) {
13047 case CPU_RIOS1: /* ? */
13049 case CPU_PPC601: /* ? */
13070 /* Return how many instructions to look ahead for better insn
13074 rs6000_use_sched_lookahead ()
13076 if (rs6000_cpu_attr == CPU_PPC8540)
13082 /* Length in units of the trampoline for entering a nested function. */
13085 rs6000_trampoline_size ()
13089 switch (DEFAULT_ABI)
13095 ret = (TARGET_32BIT) ? 12 : 24;
13100 ret = (TARGET_32BIT) ? 40 : 48;
13107 /* Emit RTL insns to initialize the variable parts of a trampoline.
13108 FNADDR is an RTX for the address of the function's pure code.
13109 CXT is an RTX for the static chain value for the function. */
13112 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
13114 enum machine_mode pmode = Pmode;
13115 int regsize = (TARGET_32BIT) ? 4 : 8;
13116 rtx ctx_reg = force_reg (pmode, cxt);
13118 switch (DEFAULT_ABI)
13123 /* Macros to shorten the code expansions below. */
13124 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13125 #define MEM_PLUS(addr,offset) \
13126 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13128 /* Under AIX, just build the 3 word function descriptor */
13131 rtx fn_reg = gen_reg_rtx (pmode);
13132 rtx toc_reg = gen_reg_rtx (pmode);
13133 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13134 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13135 emit_move_insn (MEM_DEREF (addr), fn_reg);
13136 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13137 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13141 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13144 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13145 FALSE, VOIDmode, 4,
13147 GEN_INT (rs6000_trampoline_size ()), SImode,
13157 /* Table of valid machine attributes. */
13159 const struct attribute_spec rs6000_attribute_table[] =
13161 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13162 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13163 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13164 { NULL, 0, 0, false, false, false, NULL }
13167 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13168 struct attribute_spec.handler. */
13171 rs6000_handle_longcall_attribute (tree *node, tree name,
13172 tree args ATTRIBUTE_UNUSED,
13173 int flags ATTRIBUTE_UNUSED,
13174 bool *no_add_attrs)
13176 if (TREE_CODE (*node) != FUNCTION_TYPE
13177 && TREE_CODE (*node) != FIELD_DECL
13178 && TREE_CODE (*node) != TYPE_DECL)
13180 warning ("`%s' attribute only applies to functions",
13181 IDENTIFIER_POINTER (name));
13182 *no_add_attrs = true;
13188 /* Set longcall attributes on all functions declared when
13189 rs6000_default_long_calls is true. */
13191 rs6000_set_default_type_attributes (tree type)
13193 if (rs6000_default_long_calls
13194 && (TREE_CODE (type) == FUNCTION_TYPE
13195 || TREE_CODE (type) == METHOD_TYPE))
13196 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13198 TYPE_ATTRIBUTES (type));
13201 /* Return a reference suitable for calling a function with the
13202 longcall attribute. */
13205 rs6000_longcall_ref (rtx call_ref)
13207 const char *call_name;
13210 if (GET_CODE (call_ref) != SYMBOL_REF)
13213 /* System V adds '.' to the internal name, so skip them. */
13214 call_name = XSTR (call_ref, 0);
13215 if (*call_name == '.')
13217 while (*call_name == '.')
13220 node = get_identifier (call_name);
13221 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13224 return force_reg (Pmode, call_ref);
13227 #ifdef USING_ELFOS_H
13229 /* A C statement or statements to switch to the appropriate section
13230 for output of RTX in mode MODE. You can assume that RTX is some
13231 kind of constant in RTL. The argument MODE is redundant except in
13232 the case of a `const_int' rtx. Select the section by calling
13233 `text_section' or one of the alternatives for other sections.
13235 Do not define this macro if you put all constants in the read-only
13239 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
13240 unsigned HOST_WIDE_INT align)
13242 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13245 default_elf_select_rtx_section (mode, x, align);
13248 /* A C statement or statements to switch to the appropriate
13249 section for output of DECL. DECL is either a `VAR_DECL' node
13250 or a constant of some sort. RELOC indicates whether forming
13251 the initial value of DECL requires link-time relocations. */
13254 rs6000_elf_select_section (tree decl, int reloc,
13255 unsigned HOST_WIDE_INT align)
13257 /* Pretend that we're always building for a shared library when
13258 ABI_AIX, because otherwise we end up with dynamic relocations
13259 in read-only sections. This happens for function pointers,
13260 references to vtables in typeinfo, and probably other cases. */
13261 default_elf_select_section_1 (decl, reloc, align,
13262 flag_pic || DEFAULT_ABI == ABI_AIX);
13265 /* A C statement to build up a unique section name, expressed as a
13266 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13267 RELOC indicates whether the initial value of EXP requires
13268 link-time relocations. If you do not define this macro, GCC will use
13269 the symbol name prefixed by `.' as the section name. Note - this
13270 macro can now be called for uninitialized data items as well as
13271 initialized data and functions. */
13274 rs6000_elf_unique_section (tree decl, int reloc)
13276 /* As above, pretend that we're always building for a shared library
13277 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13278 default_unique_section_1 (decl, reloc,
13279 flag_pic || DEFAULT_ABI == ABI_AIX);
13282 /* For a SYMBOL_REF, set generic flags and then perform some
13283 target-specific processing.
13285 When the AIX ABI is requested on a non-AIX system, replace the
13286 function name with the real name (with a leading .) rather than the
13287 function descriptor name. This saves a lot of overriding code to
13288 read the prefixes. */
13291 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
13293 default_encode_section_info (decl, rtl, first);
13296 && TREE_CODE (decl) == FUNCTION_DECL
13298 && DEFAULT_ABI == ABI_AIX)
13300 rtx sym_ref = XEXP (rtl, 0);
13301 size_t len = strlen (XSTR (sym_ref, 0));
13302 char *str = alloca (len + 2);
13304 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13305 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13310 rs6000_elf_in_small_data_p (tree decl)
13312 if (rs6000_sdata == SDATA_NONE)
13315 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13317 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13318 if (strcmp (section, ".sdata") == 0
13319 || strcmp (section, ".sdata2") == 0
13320 || strcmp (section, ".sbss") == 0
13321 || strcmp (section, ".sbss2") == 0
13322 || strcmp (section, ".PPC.EMB.sdata0") == 0
13323 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13328 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13331 && (unsigned HOST_WIDE_INT) size <= g_switch_value
13332 /* If it's not public, and we're not going to reference it there,
13333 there's no need to put it in the small data section. */
13334 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13341 #endif /* USING_ELFOS_H */
13344 /* Return a REG that occurs in ADDR with coefficient 1.
13345 ADDR can be effectively incremented by incrementing REG.
13347 r0 is special and we must not select it as an address
13348 register by this routine since our caller will try to
13349 increment the returned register via an "la" instruction. */
13352 find_addr_reg (rtx addr)
13354 while (GET_CODE (addr) == PLUS)
13356 if (GET_CODE (XEXP (addr, 0)) == REG
13357 && REGNO (XEXP (addr, 0)) != 0)
13358 addr = XEXP (addr, 0);
13359 else if (GET_CODE (XEXP (addr, 1)) == REG
13360 && REGNO (XEXP (addr, 1)) != 0)
13361 addr = XEXP (addr, 1);
13362 else if (CONSTANT_P (XEXP (addr, 0)))
13363 addr = XEXP (addr, 1);
13364 else if (CONSTANT_P (XEXP (addr, 1)))
13365 addr = XEXP (addr, 0);
13369 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13375 rs6000_fatal_bad_address (rtx op)
13377 fatal_insn ("bad address", op);
13383 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13384 reference and a constant. */
13387 symbolic_operand (rtx op)
13389 switch (GET_CODE (op))
13396 return (GET_CODE (op) == SYMBOL_REF ||
13397 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13398 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13399 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13406 #ifdef RS6000_LONG_BRANCH
13408 static tree stub_list = 0;
13410 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13411 procedure calls to the linked list. */
13414 add_compiler_stub (tree label_name, tree function_name, int line_number)
13416 tree stub = build_tree_list (function_name, label_name);
13417 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13418 TREE_CHAIN (stub) = stub_list;
13422 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13423 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13424 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13426 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13427 handling procedure calls from the linked list and initializes the
13431 output_compiler_stub ()
13434 char label_buf[256];
13438 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13440 fprintf (asm_out_file,
13441 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13443 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13444 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13445 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13446 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13448 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13450 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13453 label_buf[0] = '_';
13454 strcpy (label_buf+1,
13455 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13458 strcpy (tmp_buf, "lis r12,hi16(");
13459 strcat (tmp_buf, label_buf);
13460 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13461 strcat (tmp_buf, label_buf);
13462 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13463 output_asm_insn (tmp_buf, 0);
13465 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13466 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13467 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13468 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13474 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13475 already there or not. */
13478 no_previous_def (tree function_name)
13481 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13482 if (function_name == STUB_FUNCTION_NAME (stub))
13487 /* GET_PREV_LABEL gets the label name from the previous definition of
13491 get_prev_label (tree function_name)
13494 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13495 if (function_name == STUB_FUNCTION_NAME (stub))
13496 return STUB_LABEL_NAME (stub);
13500 /* INSN is either a function call or a millicode call. It may have an
13501 unconditional jump in its delay slot.
13503 CALL_DEST is the routine we are calling. */
13506 output_call (rtx insn, rtx call_dest, int operand_number)
13508 static char buf[256];
13509 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13512 tree funname = get_identifier (XSTR (call_dest, 0));
13514 if (no_previous_def (funname))
13516 int line_number = 0;
13517 rtx label_rtx = gen_label_rtx ();
13518 char *label_buf, temp_buf[256];
13519 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13520 CODE_LABEL_NUMBER (label_rtx));
13521 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13522 labelname = get_identifier (label_buf);
13523 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13525 line_number = NOTE_LINE_NUMBER (insn);
13526 add_compiler_stub (labelname, funname, line_number);
13529 labelname = get_prev_label (funname);
13531 sprintf (buf, "jbsr %%z%d,%.246s",
13532 operand_number, IDENTIFIER_POINTER (labelname));
13537 sprintf (buf, "bl %%z%d", operand_number);
13542 #endif /* RS6000_LONG_BRANCH */
13544 /* Generate PIC and indirect symbol stubs. */
13547 machopic_output_stub (FILE *file, const char *symb, const char *stub)
13549 unsigned int length;
13550 char *symbol_name, *lazy_ptr_name;
13551 char *local_label_0;
13552 static int label = 0;
13554 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13555 symb = (*targetm.strip_name_encoding) (symb);
13558 length = strlen (symb);
13559 symbol_name = alloca (length + 32);
13560 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13562 lazy_ptr_name = alloca (length + 32);
13563 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13566 machopic_picsymbol_stub1_section ();
13568 machopic_symbol_stub1_section ();
13569 fprintf (file, "\t.align 2\n");
13571 fprintf (file, "%s:\n", stub);
13572 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13577 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
13578 sprintf (local_label_0, "\"L%011d$spb\"", label);
13580 fprintf (file, "\tmflr r0\n");
13581 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13582 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13583 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13584 lazy_ptr_name, local_label_0);
13585 fprintf (file, "\tmtlr r0\n");
13586 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13587 lazy_ptr_name, local_label_0);
13588 fprintf (file, "\tmtctr r12\n");
13589 fprintf (file, "\tbctr\n");
13593 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13594 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13595 fprintf (file, "\tmtctr r12\n");
13596 fprintf (file, "\tbctr\n");
13599 machopic_lazy_symbol_ptr_section ();
13600 fprintf (file, "%s:\n", lazy_ptr_name);
13601 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13602 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13605 /* Legitimize PIC addresses. If the address is already
13606 position-independent, we return ORIG. Newly generated
13607 position-independent addresses go into a reg. This is REG if non
13608 zero, otherwise we allocate register(s) as necessary. */
13610 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13613 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
13618 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13619 reg = gen_reg_rtx (Pmode);
13621 if (GET_CODE (orig) == CONST)
13623 if (GET_CODE (XEXP (orig, 0)) == PLUS
13624 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13627 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13630 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13633 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13639 if (GET_CODE (offset) == CONST_INT)
13641 if (SMALL_INT (offset))
13642 return plus_constant (base, INTVAL (offset));
13643 else if (! reload_in_progress && ! reload_completed)
13644 offset = force_reg (Pmode, offset);
13647 rtx mem = force_const_mem (Pmode, orig);
13648 return machopic_legitimize_pic_address (mem, Pmode, reg);
13651 return gen_rtx (PLUS, Pmode, base, offset);
13654 /* Fall back on generic machopic code. */
13655 return machopic_legitimize_pic_address (orig, mode, reg);
13658 /* This is just a placeholder to make linking work without having to
13659 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13660 ever needed for Darwin (not too likely!) this would have to get a
13661 real definition. */
13668 #endif /* TARGET_MACHO */
13671 static unsigned int
13672 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
13675 = default_section_type_flags_1 (decl, name, reloc,
13676 flag_pic || DEFAULT_ABI == ABI_AIX);
13678 if (TARGET_RELOCATABLE)
13679 flags |= SECTION_WRITE;
13684 /* Record an element in the table of global constructors. SYMBOL is
13685 a SYMBOL_REF of the function to be called; PRIORITY is a number
13686 between 0 and MAX_INIT_PRIORITY.
13688 This differs from default_named_section_asm_out_constructor in
13689 that we have special handling for -mrelocatable. */
13692 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
13694 const char *section = ".ctors";
13697 if (priority != DEFAULT_INIT_PRIORITY)
13699 sprintf (buf, ".ctors.%.5u",
13700 /* Invert the numbering so the linker puts us in the proper
13701 order; constructors are run from right to left, and the
13702 linker sorts in increasing order. */
13703 MAX_INIT_PRIORITY - priority);
13707 named_section_flags (section, SECTION_WRITE);
13708 assemble_align (POINTER_SIZE);
13710 if (TARGET_RELOCATABLE)
13712 fputs ("\t.long (", asm_out_file);
13713 output_addr_const (asm_out_file, symbol);
13714 fputs (")@fixup\n", asm_out_file);
13717 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13721 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
13723 const char *section = ".dtors";
13726 if (priority != DEFAULT_INIT_PRIORITY)
13728 sprintf (buf, ".dtors.%.5u",
13729 /* Invert the numbering so the linker puts us in the proper
13730 order; constructors are run from right to left, and the
13731 linker sorts in increasing order. */
13732 MAX_INIT_PRIORITY - priority);
13736 named_section_flags (section, SECTION_WRITE);
13737 assemble_align (POINTER_SIZE);
13739 if (TARGET_RELOCATABLE)
13741 fputs ("\t.long (", asm_out_file);
13742 output_addr_const (asm_out_file, symbol);
13743 fputs (")@fixup\n", asm_out_file);
13746 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13750 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
13754 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
13755 ASM_OUTPUT_LABEL (file, name);
13756 fputs (DOUBLE_INT_ASM_OP, file);
13758 assemble_name (file, name);
13759 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
13760 assemble_name (file, name);
13761 fputs (",24\n\t.type\t.", file);
13762 assemble_name (file, name);
13763 fputs (",@function\n", file);
13764 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
13766 fputs ("\t.globl\t.", file);
13767 assemble_name (file, name);
13770 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
13772 ASM_OUTPUT_LABEL (file, name);
13776 if (TARGET_RELOCATABLE
13777 && (get_pool_size () != 0 || current_function_profile)
13782 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
13784 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13785 fprintf (file, "\t.long ");
13786 assemble_name (file, buf);
13788 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13789 assemble_name (file, buf);
13793 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
13794 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
13796 if (DEFAULT_ABI == ABI_AIX)
13798 const char *desc_name, *orig_name;
13800 orig_name = (*targetm.strip_name_encoding) (name);
13801 desc_name = orig_name;
13802 while (*desc_name == '.')
13805 if (TREE_PUBLIC (decl))
13806 fprintf (file, "\t.globl %s\n", desc_name);
13808 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
13809 fprintf (file, "%s:\n", desc_name);
13810 fprintf (file, "\t.long %s\n", orig_name);
13811 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
13812 if (DEFAULT_ABI == ABI_AIX)
13813 fputs ("\t.long 0\n", file);
13814 fprintf (file, "\t.previous\n");
13816 ASM_OUTPUT_LABEL (file, name);
13822 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
13824 fputs (GLOBAL_ASM_OP, stream);
13825 RS6000_OUTPUT_BASENAME (stream, name);
13826 putc ('\n', stream);
13830 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
13833 static const char * const suffix[3] = { "PR", "RO", "RW" };
13835 if (flags & SECTION_CODE)
13837 else if (flags & SECTION_WRITE)
13842 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13843 (flags & SECTION_CODE) ? "." : "",
13844 name, suffix[smclass], flags & SECTION_ENTSIZE);
13848 rs6000_xcoff_select_section (tree decl, int reloc,
13849 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
13851 if (decl_readonly_section_1 (decl, reloc, 1))
13853 if (TREE_PUBLIC (decl))
13854 read_only_data_section ();
13856 read_only_private_data_section ();
13860 if (TREE_PUBLIC (decl))
13863 private_data_section ();
13868 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
13872 /* Use select_section for private and uninitialized data. */
13873 if (!TREE_PUBLIC (decl)
13874 || DECL_COMMON (decl)
13875 || DECL_INITIAL (decl) == NULL_TREE
13876 || DECL_INITIAL (decl) == error_mark_node
13877 || (flag_zero_initialized_in_bss
13878 && initializer_zerop (DECL_INITIAL (decl))))
13881 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13882 name = (*targetm.strip_name_encoding) (name);
13883 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13886 /* Select section for constant in constant pool.
13888 On RS/6000, all constants are in the private read-only data area.
13889 However, if this is being placed in the TOC it must be output as a
13893 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
13894 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
13896 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13899 read_only_private_data_section ();
13902 /* Remove any trailing [DS] or the like from the symbol name. */
13904 static const char *
13905 rs6000_xcoff_strip_name_encoding (const char *name)
13910 len = strlen (name);
13911 if (name[len - 1] == ']')
13912 return ggc_alloc_string (name, len - 4);
13917 /* Section attributes. AIX is always PIC. */
13919 static unsigned int
13920 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
13922 unsigned int align;
13923 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13925 /* Align to at least UNIT size. */
13926 if (flags & SECTION_CODE)
13927 align = MIN_UNITS_PER_WORD;
13929 /* Increase alignment of large objects if not already stricter. */
13930 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13931 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13932 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13934 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13937 /* Output at beginning of assembler file.
13939 Initialize the section names for the RS/6000 at this point.
13941 Specify filename, including full path, to assembler.
13943 We want to go into the TOC section so at least one .toc will be emitted.
13944 Also, in order to output proper .bs/.es pairs, we need at least one static
13945 [RW] section emitted.
13947 Finally, declare mcount when profiling to make the assembler happy. */
13950 rs6000_xcoff_file_start ()
13952 rs6000_gen_section_name (&xcoff_bss_section_name,
13953 main_input_filename, ".bss_");
13954 rs6000_gen_section_name (&xcoff_private_data_section_name,
13955 main_input_filename, ".rw_");
13956 rs6000_gen_section_name (&xcoff_read_only_section_name,
13957 main_input_filename, ".ro_");
13959 fputs ("\t.file\t", asm_out_file);
13960 output_quoted_string (asm_out_file, main_input_filename);
13961 fputc ('\n', asm_out_file);
13963 if (write_symbols != NO_DEBUG)
13964 private_data_section ();
13967 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
13968 rs6000_file_start ();
13971 /* Output at end of assembler file.
13972 On the RS/6000, referencing data should automatically pull in text. */
13975 rs6000_xcoff_file_end ()
13978 fputs ("_section_.text:\n", asm_out_file);
13980 fputs (TARGET_32BIT
13981 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
13984 #endif /* TARGET_XCOFF */
13987 /* Cross-module name binding. Darwin does not support overriding
13988 functions at dynamic-link time. */
13991 rs6000_binds_local_p (tree decl)
13993 return default_binds_local_p_1 (decl, 0);
13997 /* Compute a (partial) cost for rtx X. Return true if the complete
13998 cost has been computed, and false if subexpressions should be
13999 scanned. In either case, *TOTAL contains the cost result. */
14002 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
14007 /* On the RS/6000, if it is valid in the insn, it is free.
14008 So this always returns 0. */
14019 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14020 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14021 + 0x8000) >= 0x10000)
14022 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14023 ? COSTS_N_INSNS (2)
14024 : COSTS_N_INSNS (1));
14030 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14031 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14032 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14033 ? COSTS_N_INSNS (2)
14034 : COSTS_N_INSNS (1));
14040 *total = COSTS_N_INSNS (2);
14043 switch (rs6000_cpu)
14045 case PROCESSOR_RIOS1:
14046 case PROCESSOR_PPC405:
14047 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14048 ? COSTS_N_INSNS (5)
14049 : (INTVAL (XEXP (x, 1)) >= -256
14050 && INTVAL (XEXP (x, 1)) <= 255)
14051 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14054 case PROCESSOR_PPC440:
14055 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14056 ? COSTS_N_INSNS (3)
14057 : COSTS_N_INSNS (2));
14060 case PROCESSOR_RS64A:
14061 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14062 ? GET_MODE (XEXP (x, 1)) != DImode
14063 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14064 : (INTVAL (XEXP (x, 1)) >= -256
14065 && INTVAL (XEXP (x, 1)) <= 255)
14066 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14069 case PROCESSOR_RIOS2:
14070 case PROCESSOR_MPCCORE:
14071 case PROCESSOR_PPC604e:
14072 *total = COSTS_N_INSNS (2);
14075 case PROCESSOR_PPC601:
14076 *total = COSTS_N_INSNS (5);
14079 case PROCESSOR_PPC603:
14080 case PROCESSOR_PPC7400:
14081 case PROCESSOR_PPC750:
14082 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14083 ? COSTS_N_INSNS (5)
14084 : (INTVAL (XEXP (x, 1)) >= -256
14085 && INTVAL (XEXP (x, 1)) <= 255)
14086 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14089 case PROCESSOR_PPC7450:
14090 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14091 ? COSTS_N_INSNS (4)
14092 : COSTS_N_INSNS (3));
14095 case PROCESSOR_PPC403:
14096 case PROCESSOR_PPC604:
14097 case PROCESSOR_PPC8540:
14098 *total = COSTS_N_INSNS (4);
14101 case PROCESSOR_PPC620:
14102 case PROCESSOR_PPC630:
14103 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14104 ? GET_MODE (XEXP (x, 1)) != DImode
14105 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14106 : (INTVAL (XEXP (x, 1)) >= -256
14107 && INTVAL (XEXP (x, 1)) <= 255)
14108 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14111 case PROCESSOR_POWER4:
14112 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14113 ? GET_MODE (XEXP (x, 1)) != DImode
14114 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14115 : COSTS_N_INSNS (2));
14124 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14125 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14127 *total = COSTS_N_INSNS (2);
14134 switch (rs6000_cpu)
14136 case PROCESSOR_RIOS1:
14137 *total = COSTS_N_INSNS (19);
14140 case PROCESSOR_RIOS2:
14141 *total = COSTS_N_INSNS (13);
14144 case PROCESSOR_RS64A:
14145 *total = (GET_MODE (XEXP (x, 1)) != DImode
14146 ? COSTS_N_INSNS (65)
14147 : COSTS_N_INSNS (67));
14150 case PROCESSOR_MPCCORE:
14151 *total = COSTS_N_INSNS (6);
14154 case PROCESSOR_PPC403:
14155 *total = COSTS_N_INSNS (33);
14158 case PROCESSOR_PPC405:
14159 *total = COSTS_N_INSNS (35);
14162 case PROCESSOR_PPC440:
14163 *total = COSTS_N_INSNS (34);
14166 case PROCESSOR_PPC601:
14167 *total = COSTS_N_INSNS (36);
14170 case PROCESSOR_PPC603:
14171 *total = COSTS_N_INSNS (37);
14174 case PROCESSOR_PPC604:
14175 case PROCESSOR_PPC604e:
14176 *total = COSTS_N_INSNS (20);
14179 case PROCESSOR_PPC620:
14180 case PROCESSOR_PPC630:
14181 *total = (GET_MODE (XEXP (x, 1)) != DImode
14182 ? COSTS_N_INSNS (21)
14183 : COSTS_N_INSNS (37));
14186 case PROCESSOR_PPC750:
14187 case PROCESSOR_PPC8540:
14188 case PROCESSOR_PPC7400:
14189 *total = COSTS_N_INSNS (19);
14192 case PROCESSOR_PPC7450:
14193 *total = COSTS_N_INSNS (23);
14196 case PROCESSOR_POWER4:
14197 *total = (GET_MODE (XEXP (x, 1)) != DImode
14198 ? COSTS_N_INSNS (18)
14199 : COSTS_N_INSNS (34));
14207 *total = COSTS_N_INSNS (4);
14211 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14220 /* A C expression returning the cost of moving data from a register of class
14221 CLASS1 to one of CLASS2. */
14224 rs6000_register_move_cost (enum machine_mode mode,
14225 enum reg_class from, enum reg_class to)
14227 /* Moves from/to GENERAL_REGS. */
14228 if (reg_classes_intersect_p (to, GENERAL_REGS)
14229 || reg_classes_intersect_p (from, GENERAL_REGS))
14231 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14234 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14235 return (rs6000_memory_move_cost (mode, from, 0)
14236 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14238 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14239 else if (from == CR_REGS)
14243 /* A move will cost one instruction per GPR moved. */
14244 return 2 * HARD_REGNO_NREGS (0, mode);
14247 /* Moving between two similar registers is just one instruction. */
14248 else if (reg_classes_intersect_p (to, from))
14249 return mode == TFmode ? 4 : 2;
14251 /* Everything else has to go through GENERAL_REGS. */
14253 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14254 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14257 /* A C expressions returning the cost of moving data of MODE from a register to
14261 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
14262 int in ATTRIBUTE_UNUSED)
14264 if (reg_classes_intersect_p (class, GENERAL_REGS))
14265 return 4 * HARD_REGNO_NREGS (0, mode);
14266 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14267 return 4 * HARD_REGNO_NREGS (32, mode);
14268 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14269 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14271 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14274 /* Return an RTX representing where to find the function value of a
14275 function returning MODE. */
14277 rs6000_complex_function_value (enum machine_mode mode)
14279 unsigned int regno;
14281 enum machine_mode inner = GET_MODE_INNER (mode);
14283 if (FLOAT_MODE_P (mode))
14284 regno = FP_ARG_RETURN;
14287 regno = GP_ARG_RETURN;
14289 /* 32-bit is OK since it'll go in r3/r4. */
14291 && GET_MODE_BITSIZE (inner) >= 32)
14292 return gen_rtx_REG (mode, regno);
14295 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
14297 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
14298 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
14299 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
14302 /* Define how to find the value returned by a function.
14303 VALTYPE is the data type of the value (as a tree).
14304 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14305 otherwise, FUNC is 0.
14307 On the SPE, both FPs and vectors are returned in r3.
14309 On RS/6000 an integer value is in r3 and a floating-point value is in
14310 fp1, unless -msoft-float. */
14313 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14315 enum machine_mode mode;
14316 unsigned int regno;
14318 if ((INTEGRAL_TYPE_P (valtype)
14319 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14320 || POINTER_TYPE_P (valtype))
14323 mode = TYPE_MODE (valtype);
14325 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
14326 regno = FP_ARG_RETURN;
14327 else if (TREE_CODE (valtype) == COMPLEX_TYPE
14328 && TARGET_HARD_FLOAT
14329 && SPLIT_COMPLEX_ARGS)
14330 return rs6000_complex_function_value (mode);
14331 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
14332 regno = ALTIVEC_ARG_RETURN;
14334 regno = GP_ARG_RETURN;
14336 return gen_rtx_REG (mode, regno);
14339 /* Define how to find the value returned by a library function
14340 assuming the value has mode MODE. */
14342 rs6000_libcall_value (enum machine_mode mode)
14344 unsigned int regno;
14346 if (GET_MODE_CLASS (mode) == MODE_FLOAT
14347 && TARGET_HARD_FLOAT && TARGET_FPRS)
14348 regno = FP_ARG_RETURN;
14349 else if (ALTIVEC_VECTOR_MODE (mode))
14350 regno = ALTIVEC_ARG_RETURN;
14351 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
14352 return rs6000_complex_function_value (mode);
14354 regno = GP_ARG_RETURN;
14356 return gen_rtx_REG (mode, regno);
14359 /* Return true if TYPE is of type __ev64_opaque__. */
14362 is_ev64_opaque_type (tree type)
14365 && (type == opaque_V2SI_type_node
14366 || type == opaque_V2SF_type_node
14367 || type == opaque_p_V2SI_type_node));
14371 rs6000_dwarf_register_span (rtx reg)
14375 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14378 regno = REGNO (reg);
14380 /* The duality of the SPE register size wreaks all kinds of havoc.
14381 This is a way of distinguishing r0 in 32-bits from r0 in
14384 gen_rtx_PARALLEL (VOIDmode,
14387 gen_rtx_REG (SImode, regno + 1200),
14388 gen_rtx_REG (SImode, regno))
14390 gen_rtx_REG (SImode, regno),
14391 gen_rtx_REG (SImode, regno + 1200)));
14394 #include "gt-rs6000.h"