1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
55 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
58 #ifndef TARGET_NO_PROTOTYPE
59 #define TARGET_NO_PROTOTYPE 0
62 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
63 && easy_vector_same (x, y))
65 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
67 && easy_vector_same (x, y))
69 #define min(A,B) ((A) < (B) ? (A) : (B))
70 #define max(A,B) ((A) > (B) ? (A) : (B))
74 enum processor_type rs6000_cpu;
75 struct rs6000_cpu_select rs6000_select[3] =
77 /* switch name, tune arch */
78 { (const char *)0, "--with-cpu=", 1, 1 },
79 { (const char *)0, "-mcpu=", 1, 1 },
80 { (const char *)0, "-mtune=", 1, 0 },
83 /* Size of long double */
84 const char *rs6000_long_double_size_string;
85 int rs6000_long_double_type_size;
87 /* Whether -mabi=altivec has appeared */
88 int rs6000_altivec_abi;
90 /* Whether VRSAVE instructions should be generated. */
91 int rs6000_altivec_vrsave;
93 /* String from -mvrsave= option. */
94 const char *rs6000_altivec_vrsave_string;
96 /* Nonzero if we want SPE ABI extensions. */
99 /* Whether isel instructions should be generated. */
102 /* Whether SPE simd instructions should be generated. */
105 /* Nonzero if floating point operations are done in the GPRs. */
106 int rs6000_float_gprs = 0;
108 /* String from -mfloat-gprs=. */
109 const char *rs6000_float_gprs_string;
111 /* String from -misel=. */
112 const char *rs6000_isel_string;
114 /* String from -mspe=. */
115 const char *rs6000_spe_string;
117 /* Set to nonzero once AIX common-mode calls have been defined. */
118 static GTY(()) int common_mode_defined;
120 /* Save information from a "cmpxx" operation until the branch or scc is
122 rtx rs6000_compare_op0, rs6000_compare_op1;
123 int rs6000_compare_fp_p;
125 /* Label number of label created for -mrelocatable, to call to so we can
126 get the address of the GOT section */
127 int rs6000_pic_labelno;
130 /* Which abi to adhere to */
131 const char *rs6000_abi_name;
133 /* Semantics of the small data area */
134 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
136 /* Which small data model to use */
137 const char *rs6000_sdata_name = (char *)0;
139 /* Counter for labels which are to be placed in .fixup. */
140 int fixuplabelno = 0;
143 /* Bit size of immediate TLS offsets and string from which it is decoded. */
144 int rs6000_tls_size = 32;
145 const char *rs6000_tls_size_string;
147 /* ABI enumeration available for subtarget to use. */
148 enum rs6000_abi rs6000_current_abi;
150 /* ABI string from -mabi= option. */
151 const char *rs6000_abi_string;
154 const char *rs6000_debug_name;
155 int rs6000_debug_stack; /* debug stack applications */
156 int rs6000_debug_arg; /* debug argument handling */
159 static GTY(()) tree opaque_V2SI_type_node;
160 static GTY(()) tree opaque_V2SF_type_node;
161 static GTY(()) tree opaque_p_V2SI_type_node;
163 const char *rs6000_traceback_name;
165 traceback_default = 0,
171 /* Flag to say the TOC is initialized */
173 char toc_label_name[10];
175 /* Alias set for saves and restores from the rs6000 stack. */
176 static int rs6000_sr_alias_set;
178 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
179 The only place that looks at this is rs6000_set_default_type_attributes;
180 everywhere else should rely on the presence or absence of a longcall
181 attribute on the function declaration. */
182 int rs6000_default_long_calls;
183 const char *rs6000_longcall_switch;
185 /* Control alignment for fields within structures. */
186 /* String from -malign-XXXXX. */
187 const char *rs6000_alignment_string;
188 int rs6000_alignment_flags;
190 struct builtin_description
192 /* mask is not const because we're going to alter it below. This
193 nonsense will go away when we rewrite the -march infrastructure
194 to give us more target flag bits. */
196 const enum insn_code icode;
197 const char *const name;
198 const enum rs6000_builtins code;
201 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
202 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
203 static void validate_condition_mode
204 PARAMS ((enum rtx_code, enum machine_mode));
205 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
206 static void rs6000_maybe_dead PARAMS ((rtx));
207 static void rs6000_emit_stack_tie PARAMS ((void));
208 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
209 static rtx spe_synthesize_frame_save PARAMS ((rtx));
210 static bool spe_func_has_64bit_regs_p PARAMS ((void));
211 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
212 unsigned int, int, int));
213 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
214 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
215 static unsigned rs6000_hash_constant PARAMS ((rtx));
216 static unsigned toc_hash_function PARAMS ((const void *));
217 static int toc_hash_eq PARAMS ((const void *, const void *));
218 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
219 static bool constant_pool_expr_p PARAMS ((rtx));
220 static bool toc_relative_expr_p PARAMS ((rtx));
221 static bool legitimate_small_data_p PARAMS ((enum machine_mode, rtx));
222 static bool legitimate_offset_address_p PARAMS ((enum machine_mode, rtx, int));
223 static bool legitimate_indexed_address_p PARAMS ((rtx, int));
224 static bool legitimate_indirect_address_p PARAMS ((rtx, int));
225 static bool legitimate_lo_sum_address_p PARAMS ((enum machine_mode, rtx, int));
226 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
227 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
228 #ifdef HAVE_GAS_HIDDEN
229 static void rs6000_assemble_visibility PARAMS ((tree, int));
231 static int rs6000_ra_ever_killed PARAMS ((void));
232 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
233 extern const struct attribute_spec rs6000_attribute_table[];
234 static void rs6000_set_default_type_attributes PARAMS ((tree));
235 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
236 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
237 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
238 HOST_WIDE_INT, tree));
239 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
240 HOST_WIDE_INT, HOST_WIDE_INT));
241 static void rs6000_file_start PARAMS ((void));
243 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
245 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
246 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
247 static void rs6000_elf_select_section PARAMS ((tree, int,
248 unsigned HOST_WIDE_INT));
249 static void rs6000_elf_unique_section PARAMS ((tree, int));
250 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
251 unsigned HOST_WIDE_INT));
252 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
254 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
257 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
258 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
259 static void rs6000_xcoff_select_section PARAMS ((tree, int,
260 unsigned HOST_WIDE_INT));
261 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
262 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
263 unsigned HOST_WIDE_INT));
264 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
265 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
266 static void rs6000_xcoff_file_start PARAMS ((void));
267 static void rs6000_xcoff_file_end PARAMS ((void));
270 static bool rs6000_binds_local_p PARAMS ((tree));
272 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
273 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
274 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
275 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
276 static int rs6000_adjust_priority PARAMS ((rtx, int));
277 static int rs6000_issue_rate PARAMS ((void));
278 static int rs6000_use_sched_lookahead PARAMS ((void));
280 static void rs6000_init_builtins PARAMS ((void));
281 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
282 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
283 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
284 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
285 static void altivec_init_builtins PARAMS ((void));
286 static void rs6000_common_init_builtins PARAMS ((void));
288 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
289 int, enum rs6000_builtins,
290 enum rs6000_builtins));
291 static void spe_init_builtins PARAMS ((void));
292 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
293 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
294 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
295 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
297 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
298 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
299 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
300 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
301 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
302 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
303 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
304 static void rs6000_parse_abi_options PARAMS ((void));
305 static void rs6000_parse_alignment_option PARAMS ((void));
306 static void rs6000_parse_tls_size_option PARAMS ((void));
307 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
308 static int first_altivec_reg_to_save PARAMS ((void));
309 static unsigned int compute_vrsave_mask PARAMS ((void));
310 static void is_altivec_return_reg PARAMS ((rtx, void *));
311 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
312 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
313 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
314 static bool is_ev64_opaque_type PARAMS ((tree));
315 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
316 static rtx rs6000_legitimize_tls_address PARAMS ((rtx, enum tls_model));
317 static rtx rs6000_tls_get_addr PARAMS ((void));
318 static rtx rs6000_got_sym PARAMS ((void));
319 static inline int rs6000_tls_symbol_ref_1 PARAMS ((rtx *, void *));
320 static const char *rs6000_get_some_local_dynamic_name PARAMS ((void));
321 static int rs6000_get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
322 static rtx rs6000_complex_function_value (enum machine_mode);
323 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *, enum machine_mode, tree);
325 /* Hash table stuff for keeping track of TOC entries. */
327 struct toc_hash_struct GTY(())
329 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
330 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
332 enum machine_mode key_mode;
336 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
338 /* Default register names. */
339 char rs6000_reg_names[][8] =
341 "0", "1", "2", "3", "4", "5", "6", "7",
342 "8", "9", "10", "11", "12", "13", "14", "15",
343 "16", "17", "18", "19", "20", "21", "22", "23",
344 "24", "25", "26", "27", "28", "29", "30", "31",
345 "0", "1", "2", "3", "4", "5", "6", "7",
346 "8", "9", "10", "11", "12", "13", "14", "15",
347 "16", "17", "18", "19", "20", "21", "22", "23",
348 "24", "25", "26", "27", "28", "29", "30", "31",
349 "mq", "lr", "ctr","ap",
350 "0", "1", "2", "3", "4", "5", "6", "7",
352 /* AltiVec registers. */
353 "0", "1", "2", "3", "4", "5", "6", "7",
354 "8", "9", "10", "11", "12", "13", "14", "15",
355 "16", "17", "18", "19", "20", "21", "22", "23",
356 "24", "25", "26", "27", "28", "29", "30", "31",
362 #ifdef TARGET_REGNAMES
363 static const char alt_reg_names[][8] =
365 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
366 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
367 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
368 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
369 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
370 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
371 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
372 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
373 "mq", "lr", "ctr", "ap",
374 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
376 /* AltiVec registers. */
377 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
378 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
379 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
380 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
387 #ifndef MASK_STRICT_ALIGN
388 #define MASK_STRICT_ALIGN 0
390 #ifndef TARGET_PROFILE_KERNEL
391 #define TARGET_PROFILE_KERNEL 0
394 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
395 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
397 /* Return 1 for a symbol ref for a thread-local storage symbol. */
398 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
399 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
401 /* Initialize the GCC target structure. */
402 #undef TARGET_ATTRIBUTE_TABLE
403 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
404 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
405 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
407 #undef TARGET_ASM_ALIGNED_DI_OP
408 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
410 /* Default unaligned ops are only provided for ELF. Find the ops needed
411 for non-ELF systems. */
412 #ifndef OBJECT_FORMAT_ELF
414 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
416 #undef TARGET_ASM_UNALIGNED_HI_OP
417 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
418 #undef TARGET_ASM_UNALIGNED_SI_OP
419 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
420 #undef TARGET_ASM_UNALIGNED_DI_OP
421 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
424 #undef TARGET_ASM_UNALIGNED_HI_OP
425 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
426 #undef TARGET_ASM_UNALIGNED_SI_OP
427 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
431 /* This hook deals with fixups for relocatable code and DI-mode objects
433 #undef TARGET_ASM_INTEGER
434 #define TARGET_ASM_INTEGER rs6000_assemble_integer
436 #ifdef HAVE_GAS_HIDDEN
437 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
438 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
441 #undef TARGET_HAVE_TLS
442 #define TARGET_HAVE_TLS HAVE_AS_TLS
444 #undef TARGET_CANNOT_FORCE_CONST_MEM
445 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
447 #undef TARGET_ASM_FUNCTION_PROLOGUE
448 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
449 #undef TARGET_ASM_FUNCTION_EPILOGUE
450 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
452 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
453 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
454 #undef TARGET_SCHED_VARIABLE_ISSUE
455 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
457 #undef TARGET_SCHED_ISSUE_RATE
458 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
459 #undef TARGET_SCHED_ADJUST_COST
460 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
461 #undef TARGET_SCHED_ADJUST_PRIORITY
462 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
464 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
465 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
467 #undef TARGET_INIT_BUILTINS
468 #define TARGET_INIT_BUILTINS rs6000_init_builtins
470 #undef TARGET_EXPAND_BUILTIN
471 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
474 #undef TARGET_BINDS_LOCAL_P
475 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
478 #undef TARGET_ASM_OUTPUT_MI_THUNK
479 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
481 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
482 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
484 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
485 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
487 #undef TARGET_RTX_COSTS
488 #define TARGET_RTX_COSTS rs6000_rtx_costs
489 #undef TARGET_ADDRESS_COST
490 #define TARGET_ADDRESS_COST hook_int_rtx_0
492 #undef TARGET_VECTOR_OPAQUE_P
493 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
495 #undef TARGET_DWARF_REGISTER_SPAN
496 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
498 struct gcc_target targetm = TARGET_INITIALIZER;
500 /* Override command line options. Mostly we process the processor
501 type and sometimes adjust other TARGET_ options. */
504 rs6000_override_options (default_cpu)
505 const char *default_cpu;
508 struct rs6000_cpu_select *ptr;
510 /* Simplify the entries below by making a mask for any POWER
511 variant and any PowerPC variant. */
513 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
514 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
515 | MASK_PPC_GFXOPT | MASK_POWERPC64)
516 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
520 const char *const name; /* Canonical processor name. */
521 const enum processor_type processor; /* Processor type enum value. */
522 const int target_enable; /* Target flags to enable. */
523 const int target_disable; /* Target flags to disable. */
524 } const processor_target_table[]
525 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
526 POWER_MASKS | POWERPC_MASKS},
527 {"power", PROCESSOR_POWER,
528 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
529 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
530 {"power2", PROCESSOR_POWER,
531 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
532 POWERPC_MASKS | MASK_NEW_MNEMONICS},
533 {"power3", PROCESSOR_PPC630,
534 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
536 {"power4", PROCESSOR_POWER4,
537 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
539 {"powerpc", PROCESSOR_POWERPC,
540 MASK_POWERPC | MASK_NEW_MNEMONICS,
541 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
542 {"powerpc64", PROCESSOR_POWERPC64,
543 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
544 POWER_MASKS | POWERPC_OPT_MASKS},
545 {"rios", PROCESSOR_RIOS1,
546 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
547 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
548 {"rios1", PROCESSOR_RIOS1,
549 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
550 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
551 {"rsc", PROCESSOR_PPC601,
552 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
553 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
554 {"rsc1", PROCESSOR_PPC601,
555 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
556 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
557 {"rios2", PROCESSOR_RIOS2,
558 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
559 POWERPC_MASKS | MASK_NEW_MNEMONICS},
560 {"rs64a", PROCESSOR_RS64A,
561 MASK_POWERPC | MASK_NEW_MNEMONICS,
562 POWER_MASKS | POWERPC_OPT_MASKS},
563 {"401", PROCESSOR_PPC403,
564 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
565 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
566 {"403", PROCESSOR_PPC403,
567 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
568 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
569 {"405", PROCESSOR_PPC405,
570 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
571 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
572 {"405fp", PROCESSOR_PPC405,
573 MASK_POWERPC | MASK_NEW_MNEMONICS,
574 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
575 {"440", PROCESSOR_PPC440,
576 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
577 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
578 {"440fp", PROCESSOR_PPC440,
579 MASK_POWERPC | MASK_NEW_MNEMONICS,
580 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
581 {"505", PROCESSOR_MPCCORE,
582 MASK_POWERPC | MASK_NEW_MNEMONICS,
583 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
584 {"601", PROCESSOR_PPC601,
585 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
586 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
587 {"602", PROCESSOR_PPC603,
588 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
589 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
590 {"603", PROCESSOR_PPC603,
591 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
592 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
593 {"603e", PROCESSOR_PPC603,
594 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
595 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
596 {"ec603e", PROCESSOR_PPC603,
597 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
598 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
599 {"604", PROCESSOR_PPC604,
600 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
601 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
602 {"604e", PROCESSOR_PPC604e,
603 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
604 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
605 {"620", PROCESSOR_PPC620,
606 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
608 {"630", PROCESSOR_PPC630,
609 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
611 {"740", PROCESSOR_PPC750,
612 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
613 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
614 {"750", PROCESSOR_PPC750,
615 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
616 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
617 {"7400", PROCESSOR_PPC7400,
618 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
619 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
620 {"7450", PROCESSOR_PPC7450,
621 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
622 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
623 {"8540", PROCESSOR_PPC8540,
624 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
625 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
626 {"801", PROCESSOR_MPCCORE,
627 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
628 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
629 {"821", PROCESSOR_MPCCORE,
630 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
631 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
632 {"823", PROCESSOR_MPCCORE,
633 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
634 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
635 {"860", PROCESSOR_MPCCORE,
636 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
637 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
638 {"970", PROCESSOR_POWER4,
639 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
642 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
644 /* Save current -mmultiple/-mno-multiple status. */
645 int multiple = TARGET_MULTIPLE;
646 /* Save current -mstring/-mno-string status. */
647 int string = TARGET_STRING;
649 /* Identify the processor type. */
650 rs6000_select[0].string = default_cpu;
651 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
653 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
655 ptr = &rs6000_select[i];
656 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
658 for (j = 0; j < ptt_size; j++)
659 if (! strcmp (ptr->string, processor_target_table[j].name))
662 rs6000_cpu = processor_target_table[j].processor;
666 target_flags |= processor_target_table[j].target_enable;
667 target_flags &= ~processor_target_table[j].target_disable;
673 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
680 /* If we are optimizing big endian systems for space, use the load/store
681 multiple and string instructions. */
682 if (BYTES_BIG_ENDIAN && optimize_size)
683 target_flags |= MASK_MULTIPLE | MASK_STRING;
685 /* If -mmultiple or -mno-multiple was explicitly used, don't
686 override with the processor default */
687 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
688 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
690 /* If -mstring or -mno-string was explicitly used, don't override
691 with the processor default. */
692 if ((target_flags_explicit & MASK_STRING) != 0)
693 target_flags = (target_flags & ~MASK_STRING) | string;
695 /* Don't allow -mmultiple or -mstring on little endian systems
696 unless the cpu is a 750, because the hardware doesn't support the
697 instructions used in little endian mode, and causes an alignment
698 trap. The 750 does not cause an alignment trap (except when the
699 target is unaligned). */
701 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
705 target_flags &= ~MASK_MULTIPLE;
706 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
707 warning ("-mmultiple is not supported on little endian systems");
712 target_flags &= ~MASK_STRING;
713 if ((target_flags_explicit & MASK_STRING) != 0)
714 warning ("-mstring is not supported on little endian systems");
718 /* Set debug flags */
719 if (rs6000_debug_name)
721 if (! strcmp (rs6000_debug_name, "all"))
722 rs6000_debug_stack = rs6000_debug_arg = 1;
723 else if (! strcmp (rs6000_debug_name, "stack"))
724 rs6000_debug_stack = 1;
725 else if (! strcmp (rs6000_debug_name, "arg"))
726 rs6000_debug_arg = 1;
728 error ("unknown -mdebug-%s switch", rs6000_debug_name);
731 if (rs6000_traceback_name)
733 if (! strncmp (rs6000_traceback_name, "full", 4))
734 rs6000_traceback = traceback_full;
735 else if (! strncmp (rs6000_traceback_name, "part", 4))
736 rs6000_traceback = traceback_part;
737 else if (! strncmp (rs6000_traceback_name, "no", 2))
738 rs6000_traceback = traceback_none;
740 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
741 rs6000_traceback_name);
744 /* Set size of long double */
745 rs6000_long_double_type_size = 64;
746 if (rs6000_long_double_size_string)
749 int size = strtol (rs6000_long_double_size_string, &tail, 10);
750 if (*tail != '\0' || (size != 64 && size != 128))
751 error ("Unknown switch -mlong-double-%s",
752 rs6000_long_double_size_string);
754 rs6000_long_double_type_size = size;
757 /* Handle -mabi= options. */
758 rs6000_parse_abi_options ();
760 /* Handle -malign-XXXXX option. */
761 rs6000_parse_alignment_option ();
763 /* Handle generic -mFOO=YES/NO options. */
764 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
765 &rs6000_altivec_vrsave);
766 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
768 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
769 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
772 /* Handle -mtls-size option. */
773 rs6000_parse_tls_size_option ();
775 #ifdef SUBTARGET_OVERRIDE_OPTIONS
776 SUBTARGET_OVERRIDE_OPTIONS;
778 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
779 SUBSUBTARGET_OVERRIDE_OPTIONS;
784 /* The e500 does not have string instructions, and we set
785 MASK_STRING above when optimizing for size. */
786 if ((target_flags & MASK_STRING) != 0)
787 target_flags = target_flags & ~MASK_STRING;
789 /* No SPE means 64-bit long doubles, even if an E500. */
790 if (rs6000_spe_string != 0
791 && !strcmp (rs6000_spe_string, "no"))
792 rs6000_long_double_type_size = 64;
794 else if (rs6000_select[1].string != NULL)
796 /* For the powerpc-eabispe configuration, we set all these by
797 default, so let's unset them if we manually set another
798 CPU that is not the E500. */
799 if (rs6000_abi_string == 0)
801 if (rs6000_spe_string == 0)
803 if (rs6000_float_gprs_string == 0)
804 rs6000_float_gprs = 0;
805 if (rs6000_isel_string == 0)
807 if (rs6000_long_double_size_string == 0)
808 rs6000_long_double_type_size = 64;
811 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
812 using TARGET_OPTIONS to handle a toggle switch, but we're out of
813 bits in target_flags so TARGET_SWITCHES cannot be used.
814 Assumption here is that rs6000_longcall_switch points into the
815 text of the complete option, rather than being a copy, so we can
816 scan back for the presence or absence of the no- modifier. */
817 if (rs6000_longcall_switch)
819 const char *base = rs6000_longcall_switch;
820 while (base[-1] != 'm') base--;
822 if (*rs6000_longcall_switch != '\0')
823 error ("invalid option `%s'", base);
824 rs6000_default_long_calls = (base[0] != 'n');
827 #ifdef TARGET_REGNAMES
828 /* If the user desires alternate register names, copy in the
829 alternate names now. */
831 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
834 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
835 If -maix-struct-return or -msvr4-struct-return was explicitly
836 used, don't override with the ABI default. */
837 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
839 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
840 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
842 target_flags |= MASK_AIX_STRUCT_RET;
845 if (TARGET_LONG_DOUBLE_128
846 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
847 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
849 /* Allocate an alias set for register saves & restores from stack. */
850 rs6000_sr_alias_set = new_alias_set ();
853 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
855 /* We can only guarantee the availability of DI pseudo-ops when
856 assembling for 64-bit targets. */
859 targetm.asm_out.aligned_op.di = NULL;
860 targetm.asm_out.unaligned_op.di = NULL;
863 /* Set maximum branch target alignment at two instructions, eight bytes. */
864 align_jumps_max_skip = 8;
865 align_loops_max_skip = 8;
867 /* Arrange to save and restore machine status around nested functions. */
868 init_machine_status = rs6000_init_machine_status;
871 /* Handle generic options of the form -mfoo=yes/no.
872 NAME is the option name.
873 VALUE is the option value.
874 FLAG is the pointer to the flag where to store a 1 or 0, depending on
875 whether the option value is 'yes' or 'no' respectively. */
877 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
881 else if (!strcmp (value, "yes"))
883 else if (!strcmp (value, "no"))
886 error ("unknown -m%s= option specified: '%s'", name, value);
889 /* Handle -mabi= options. */
891 rs6000_parse_abi_options ()
893 if (rs6000_abi_string == 0)
895 else if (! strcmp (rs6000_abi_string, "altivec"))
896 rs6000_altivec_abi = 1;
897 else if (! strcmp (rs6000_abi_string, "no-altivec"))
898 rs6000_altivec_abi = 0;
899 else if (! strcmp (rs6000_abi_string, "spe"))
903 error ("not configured for ABI: '%s'", rs6000_abi_string);
906 else if (! strcmp (rs6000_abi_string, "no-spe"))
909 error ("unknown ABI specified: '%s'", rs6000_abi_string);
912 /* Handle -malign-XXXXXX options. */
914 rs6000_parse_alignment_option ()
916 if (rs6000_alignment_string == 0
917 || ! strcmp (rs6000_alignment_string, "power"))
918 rs6000_alignment_flags = MASK_ALIGN_POWER;
919 else if (! strcmp (rs6000_alignment_string, "natural"))
920 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
922 error ("unknown -malign-XXXXX option specified: '%s'",
923 rs6000_alignment_string);
926 /* Validate and record the size specified with the -mtls-size option. */
929 rs6000_parse_tls_size_option ()
931 if (rs6000_tls_size_string == 0)
933 else if (strcmp (rs6000_tls_size_string, "16") == 0)
934 rs6000_tls_size = 16;
935 else if (strcmp (rs6000_tls_size_string, "32") == 0)
936 rs6000_tls_size = 32;
937 else if (strcmp (rs6000_tls_size_string, "64") == 0)
938 rs6000_tls_size = 64;
940 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
944 optimization_options (level, size)
945 int level ATTRIBUTE_UNUSED;
946 int size ATTRIBUTE_UNUSED;
950 /* Do anything needed at the start of the asm file. */
957 const char *start = buffer;
958 struct rs6000_cpu_select *ptr;
959 const char *default_cpu = TARGET_CPU_DEFAULT;
960 FILE *file = asm_out_file;
962 default_file_start ();
964 #ifdef TARGET_BI_ARCH
965 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
969 if (flag_verbose_asm)
971 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
972 rs6000_select[0].string = default_cpu;
974 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
976 ptr = &rs6000_select[i];
977 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
979 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
985 switch (rs6000_sdata)
987 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
988 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
989 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
990 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
993 if (rs6000_sdata && g_switch_value)
995 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1006 /* Return nonzero if this function is known to have a null epilogue. */
1011 if (reload_completed)
1013 rs6000_stack_t *info = rs6000_stack_info ();
1015 if (info->first_gp_reg_save == 32
1016 && info->first_fp_reg_save == 64
1017 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1018 && ! info->lr_save_p
1019 && ! info->cr_save_p
1020 && info->vrsave_mask == 0
1028 /* Returns 1 always. */
1031 any_operand (op, mode)
1032 rtx op ATTRIBUTE_UNUSED;
1033 enum machine_mode mode ATTRIBUTE_UNUSED;
1038 /* Returns 1 if op is the count register. */
1040 count_register_operand (op, mode)
1042 enum machine_mode mode ATTRIBUTE_UNUSED;
1044 if (GET_CODE (op) != REG)
1047 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1050 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1056 /* Returns 1 if op is an altivec register. */
1058 altivec_register_operand (op, mode)
1060 enum machine_mode mode ATTRIBUTE_UNUSED;
1063 return (register_operand (op, mode)
1064 && (GET_CODE (op) != REG
1065 || REGNO (op) > FIRST_PSEUDO_REGISTER
1066 || ALTIVEC_REGNO_P (REGNO (op))));
1070 xer_operand (op, mode)
1072 enum machine_mode mode ATTRIBUTE_UNUSED;
1074 if (GET_CODE (op) != REG)
1077 if (XER_REGNO_P (REGNO (op)))
1083 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1084 by such constants completes more quickly. */
1087 s8bit_cint_operand (op, mode)
1089 enum machine_mode mode ATTRIBUTE_UNUSED;
1091 return ( GET_CODE (op) == CONST_INT
1092 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1095 /* Return 1 if OP is a constant that can fit in a D field. */
1098 short_cint_operand (op, mode)
1100 enum machine_mode mode ATTRIBUTE_UNUSED;
1102 return (GET_CODE (op) == CONST_INT
1103 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1106 /* Similar for an unsigned D field. */
1109 u_short_cint_operand (op, mode)
1111 enum machine_mode mode ATTRIBUTE_UNUSED;
1113 return (GET_CODE (op) == CONST_INT
1114 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1117 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1120 non_short_cint_operand (op, mode)
1122 enum machine_mode mode ATTRIBUTE_UNUSED;
1124 return (GET_CODE (op) == CONST_INT
1125 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1128 /* Returns 1 if OP is a CONST_INT that is a positive value
1129 and an exact power of 2. */
1132 exact_log2_cint_operand (op, mode)
1134 enum machine_mode mode ATTRIBUTE_UNUSED;
1136 return (GET_CODE (op) == CONST_INT
1138 && exact_log2 (INTVAL (op)) >= 0);
1141 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1145 gpc_reg_operand (op, mode)
1147 enum machine_mode mode;
1149 return (register_operand (op, mode)
1150 && (GET_CODE (op) != REG
1151 || (REGNO (op) >= ARG_POINTER_REGNUM
1152 && !XER_REGNO_P (REGNO (op)))
1153 || REGNO (op) < MQ_REGNO));
1156 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1160 cc_reg_operand (op, mode)
1162 enum machine_mode mode;
1164 return (register_operand (op, mode)
1165 && (GET_CODE (op) != REG
1166 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1167 || CR_REGNO_P (REGNO (op))));
1170 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1171 CR field that isn't CR0. */
1174 cc_reg_not_cr0_operand (op, mode)
1176 enum machine_mode mode;
1178 return (register_operand (op, mode)
1179 && (GET_CODE (op) != REG
1180 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1181 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1184 /* Returns 1 if OP is either a constant integer valid for a D-field or
1185 a non-special register. If a register, it must be in the proper
1186 mode unless MODE is VOIDmode. */
1189 reg_or_short_operand (op, mode)
1191 enum machine_mode mode;
1193 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1196 /* Similar, except check if the negation of the constant would be
1197 valid for a D-field. */
1200 reg_or_neg_short_operand (op, mode)
1202 enum machine_mode mode;
1204 if (GET_CODE (op) == CONST_INT)
1205 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1207 return gpc_reg_operand (op, mode);
1210 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1211 a non-special register. If a register, it must be in the proper
1212 mode unless MODE is VOIDmode. */
1215 reg_or_aligned_short_operand (op, mode)
1217 enum machine_mode mode;
1219 if (gpc_reg_operand (op, mode))
1221 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1228 /* Return 1 if the operand is either a register or an integer whose
1229 high-order 16 bits are zero. */
1232 reg_or_u_short_operand (op, mode)
1234 enum machine_mode mode;
1236 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1239 /* Return 1 is the operand is either a non-special register or ANY
1240 constant integer. */
1243 reg_or_cint_operand (op, mode)
1245 enum machine_mode mode;
1247 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1250 /* Return 1 is the operand is either a non-special register or ANY
1251 32-bit signed constant integer. */
1254 reg_or_arith_cint_operand (op, mode)
1256 enum machine_mode mode;
1258 return (gpc_reg_operand (op, mode)
1259 || (GET_CODE (op) == CONST_INT
1260 #if HOST_BITS_PER_WIDE_INT != 32
1261 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1262 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1267 /* Return 1 is the operand is either a non-special register or a 32-bit
1268 signed constant integer valid for 64-bit addition. */
1271 reg_or_add_cint64_operand (op, mode)
1273 enum machine_mode mode;
1275 return (gpc_reg_operand (op, mode)
1276 || (GET_CODE (op) == CONST_INT
1277 #if HOST_BITS_PER_WIDE_INT == 32
1278 && INTVAL (op) < 0x7fff8000
1280 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1286 /* Return 1 is the operand is either a non-special register or a 32-bit
1287 signed constant integer valid for 64-bit subtraction. */
1290 reg_or_sub_cint64_operand (op, mode)
1292 enum machine_mode mode;
1294 return (gpc_reg_operand (op, mode)
1295 || (GET_CODE (op) == CONST_INT
1296 #if HOST_BITS_PER_WIDE_INT == 32
1297 && (- INTVAL (op)) < 0x7fff8000
1299 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1305 /* Return 1 is the operand is either a non-special register or ANY
1306 32-bit unsigned constant integer. */
1309 reg_or_logical_cint_operand (op, mode)
1311 enum machine_mode mode;
1313 if (GET_CODE (op) == CONST_INT)
1315 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1317 if (GET_MODE_BITSIZE (mode) <= 32)
1320 if (INTVAL (op) < 0)
1324 return ((INTVAL (op) & GET_MODE_MASK (mode)
1325 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1327 else if (GET_CODE (op) == CONST_DOUBLE)
1329 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1333 return CONST_DOUBLE_HIGH (op) == 0;
1336 return gpc_reg_operand (op, mode);
1339 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1342 got_operand (op, mode)
1344 enum machine_mode mode ATTRIBUTE_UNUSED;
1346 return (GET_CODE (op) == SYMBOL_REF
1347 || GET_CODE (op) == CONST
1348 || GET_CODE (op) == LABEL_REF);
1351 /* Return 1 if the operand is a simple references that can be loaded via
1352 the GOT (labels involving addition aren't allowed). */
1355 got_no_const_operand (op, mode)
1357 enum machine_mode mode ATTRIBUTE_UNUSED;
1359 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1362 /* Return the number of instructions it takes to form a constant in an
1363 integer register. */
1366 num_insns_constant_wide (value)
1367 HOST_WIDE_INT value;
1369 /* signed constant loadable with {cal|addi} */
1370 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1373 /* constant loadable with {cau|addis} */
1374 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1377 #if HOST_BITS_PER_WIDE_INT == 64
1378 else if (TARGET_POWERPC64)
1380 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1381 HOST_WIDE_INT high = value >> 31;
1383 if (high == 0 || high == -1)
1389 return num_insns_constant_wide (high) + 1;
1391 return (num_insns_constant_wide (high)
1392 + num_insns_constant_wide (low) + 1);
1401 num_insns_constant (op, mode)
1403 enum machine_mode mode;
1405 if (GET_CODE (op) == CONST_INT)
1407 #if HOST_BITS_PER_WIDE_INT == 64
1408 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1409 && mask64_operand (op, mode))
1413 return num_insns_constant_wide (INTVAL (op));
1416 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1421 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1422 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1423 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1426 else if (GET_CODE (op) == CONST_DOUBLE)
1432 int endian = (WORDS_BIG_ENDIAN == 0);
1434 if (mode == VOIDmode || mode == DImode)
1436 high = CONST_DOUBLE_HIGH (op);
1437 low = CONST_DOUBLE_LOW (op);
1441 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1442 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1444 low = l[1 - endian];
1448 return (num_insns_constant_wide (low)
1449 + num_insns_constant_wide (high));
1453 if (high == 0 && low >= 0)
1454 return num_insns_constant_wide (low);
1456 else if (high == -1 && low < 0)
1457 return num_insns_constant_wide (low);
1459 else if (mask64_operand (op, mode))
1463 return num_insns_constant_wide (high) + 1;
1466 return (num_insns_constant_wide (high)
1467 + num_insns_constant_wide (low) + 1);
1475 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1476 register with one instruction per word. We only do this if we can
1477 safely read CONST_DOUBLE_{LOW,HIGH}. */
1480 easy_fp_constant (op, mode)
1482 enum machine_mode mode;
1484 if (GET_CODE (op) != CONST_DOUBLE
1485 || GET_MODE (op) != mode
1486 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1489 /* Consider all constants with -msoft-float to be easy. */
1490 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1494 /* If we are using V.4 style PIC, consider all constants to be hard. */
1495 if (flag_pic && DEFAULT_ABI == ABI_V4)
1498 #ifdef TARGET_RELOCATABLE
1499 /* Similarly if we are using -mrelocatable, consider all constants
1501 if (TARGET_RELOCATABLE)
1510 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1511 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1513 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1514 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1515 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1516 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1519 else if (mode == DFmode)
1524 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1525 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1527 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1528 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1531 else if (mode == SFmode)
1536 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1537 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1539 return num_insns_constant_wide (l) == 1;
1542 else if (mode == DImode)
1543 return ((TARGET_POWERPC64
1544 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1545 || (num_insns_constant (op, DImode) <= 2));
1547 else if (mode == SImode)
1553 /* Return nonzero if all elements of a vector have the same value. */
1556 easy_vector_same (op, mode)
1558 enum machine_mode mode ATTRIBUTE_UNUSED;
1562 units = CONST_VECTOR_NUNITS (op);
1564 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1565 for (i = 1; i < units; ++i)
1566 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1573 /* Return 1 if the operand is a CONST_INT and can be put into a
1574 register without using memory. */
1577 easy_vector_constant (op, mode)
1579 enum machine_mode mode;
1583 if (GET_CODE (op) != CONST_VECTOR
1588 if (zero_constant (op, mode)
1589 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1590 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1593 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1596 if (TARGET_SPE && mode == V1DImode)
1599 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1600 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1602 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1604 evmergelo r0, r0, r0
1607 I don't know how efficient it would be to allow bigger constants,
1608 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1609 instructions is better than a 64-bit memory load, but I don't
1610 have the e500 timing specs. */
1611 if (TARGET_SPE && mode == V2SImode
1612 && cst >= -0x7fff && cst <= 0x7fff
1613 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1616 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1619 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1625 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1628 easy_vector_constant_add_self (op, mode)
1630 enum machine_mode mode;
1634 if (!easy_vector_constant (op, mode))
1637 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1639 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1643 output_vec_const_move (operands)
1647 enum machine_mode mode;
1653 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1654 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1655 mode = GET_MODE (dest);
1659 if (zero_constant (vec, mode))
1660 return "vxor %0,%0,%0";
1661 else if (EASY_VECTOR_15 (cst, vec, mode))
1663 operands[1] = GEN_INT (cst);
1667 return "vspltisw %0,%1";
1669 return "vspltish %0,%1";
1671 return "vspltisb %0,%1";
1676 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1684 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1685 pattern of V1DI, V4HI, and V2SF.
1687 FIXME: We should probably return # and add post reload
1688 splitters for these, but this way is so easy ;-).
1690 operands[1] = GEN_INT (cst);
1691 operands[2] = GEN_INT (cst2);
1693 return "li %0,%1\n\tevmergelo %0,%0,%0";
1695 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1701 /* Return 1 if the operand is the constant 0. This works for scalars
1702 as well as vectors. */
1704 zero_constant (op, mode)
1706 enum machine_mode mode;
1708 return op == CONST0_RTX (mode);
1711 /* Return 1 if the operand is 0.0. */
1713 zero_fp_constant (op, mode)
1715 enum machine_mode mode;
1717 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1720 /* Return 1 if the operand is in volatile memory. Note that during
1721 the RTL generation phase, memory_operand does not return TRUE for
1722 volatile memory references. So this function allows us to
1723 recognize volatile references where its safe. */
1726 volatile_mem_operand (op, mode)
1728 enum machine_mode mode;
1730 if (GET_CODE (op) != MEM)
1733 if (!MEM_VOLATILE_P (op))
1736 if (mode != GET_MODE (op))
1739 if (reload_completed)
1740 return memory_operand (op, mode);
1742 if (reload_in_progress)
1743 return strict_memory_address_p (mode, XEXP (op, 0));
1745 return memory_address_p (mode, XEXP (op, 0));
1748 /* Return 1 if the operand is an offsettable memory operand. */
1751 offsettable_mem_operand (op, mode)
1753 enum machine_mode mode;
1755 return ((GET_CODE (op) == MEM)
1756 && offsettable_address_p (reload_completed || reload_in_progress,
1757 mode, XEXP (op, 0)));
1760 /* Return 1 if the operand is either an easy FP constant (see above) or
1764 mem_or_easy_const_operand (op, mode)
1766 enum machine_mode mode;
1768 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1771 /* Return 1 if the operand is either a non-special register or an item
1772 that can be used as the operand of a `mode' add insn. */
1775 add_operand (op, mode)
1777 enum machine_mode mode;
1779 if (GET_CODE (op) == CONST_INT)
1780 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1781 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1783 return gpc_reg_operand (op, mode);
1786 /* Return 1 if OP is a constant but not a valid add_operand. */
1789 non_add_cint_operand (op, mode)
1791 enum machine_mode mode ATTRIBUTE_UNUSED;
1793 return (GET_CODE (op) == CONST_INT
1794 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1795 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1798 /* Return 1 if the operand is a non-special register or a constant that
1799 can be used as the operand of an OR or XOR insn on the RS/6000. */
1802 logical_operand (op, mode)
1804 enum machine_mode mode;
1806 HOST_WIDE_INT opl, oph;
1808 if (gpc_reg_operand (op, mode))
1811 if (GET_CODE (op) == CONST_INT)
1813 opl = INTVAL (op) & GET_MODE_MASK (mode);
1815 #if HOST_BITS_PER_WIDE_INT <= 32
1816 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1820 else if (GET_CODE (op) == CONST_DOUBLE)
1822 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1825 opl = CONST_DOUBLE_LOW (op);
1826 oph = CONST_DOUBLE_HIGH (op);
1833 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1834 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1837 /* Return 1 if C is a constant that is not a logical operand (as
1838 above), but could be split into one. */
1841 non_logical_cint_operand (op, mode)
1843 enum machine_mode mode;
1845 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1846 && ! logical_operand (op, mode)
1847 && reg_or_logical_cint_operand (op, mode));
1850 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1851 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1852 Reject all ones and all zeros, since these should have been optimized
1853 away and confuse the making of MB and ME. */
1856 mask_operand (op, mode)
1858 enum machine_mode mode ATTRIBUTE_UNUSED;
1860 HOST_WIDE_INT c, lsb;
1862 if (GET_CODE (op) != CONST_INT)
1867 /* Fail in 64-bit mode if the mask wraps around because the upper
1868 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1869 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1872 /* We don't change the number of transitions by inverting,
1873 so make sure we start with the LS bit zero. */
1877 /* Reject all zeros or all ones. */
1881 /* Find the first transition. */
1884 /* Invert to look for a second transition. */
1887 /* Erase first transition. */
1890 /* Find the second transition (if any). */
1893 /* Match if all the bits above are 1's (or c is zero). */
1897 /* Return 1 for the PowerPC64 rlwinm corner case. */
1900 mask_operand_wrap (op, mode)
1902 enum machine_mode mode ATTRIBUTE_UNUSED;
1904 HOST_WIDE_INT c, lsb;
1906 if (GET_CODE (op) != CONST_INT)
1911 if ((c & 0x80000001) != 0x80000001)
1925 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1926 It is if there are no more than one 1->0 or 0->1 transitions.
1927 Reject all zeros, since zero should have been optimized away and
1928 confuses the making of MB and ME. */
1931 mask64_operand (op, mode)
1933 enum machine_mode mode ATTRIBUTE_UNUSED;
1935 if (GET_CODE (op) == CONST_INT)
1937 HOST_WIDE_INT c, lsb;
1941 /* Reject all zeros. */
1945 /* We don't change the number of transitions by inverting,
1946 so make sure we start with the LS bit zero. */
1950 /* Find the transition, and check that all bits above are 1's. */
1953 /* Match if all the bits above are 1's (or c is zero). */
1959 /* Like mask64_operand, but allow up to three transitions. This
1960 predicate is used by insn patterns that generate two rldicl or
1961 rldicr machine insns. */
1964 mask64_2_operand (op, mode)
1966 enum machine_mode mode ATTRIBUTE_UNUSED;
1968 if (GET_CODE (op) == CONST_INT)
1970 HOST_WIDE_INT c, lsb;
1974 /* Disallow all zeros. */
1978 /* We don't change the number of transitions by inverting,
1979 so make sure we start with the LS bit zero. */
1983 /* Find the first transition. */
1986 /* Invert to look for a second transition. */
1989 /* Erase first transition. */
1992 /* Find the second transition. */
1995 /* Invert to look for a third transition. */
1998 /* Erase second transition. */
2001 /* Find the third transition (if any). */
2004 /* Match if all the bits above are 1's (or c is zero). */
2010 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2011 implement ANDing by the mask IN. */
2013 build_mask64_2_operands (in, out)
2017 #if HOST_BITS_PER_WIDE_INT >= 64
2018 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2021 if (GET_CODE (in) != CONST_INT)
2027 /* Assume c initially something like 0x00fff000000fffff. The idea
2028 is to rotate the word so that the middle ^^^^^^ group of zeros
2029 is at the MS end and can be cleared with an rldicl mask. We then
2030 rotate back and clear off the MS ^^ group of zeros with a
2032 c = ~c; /* c == 0xff000ffffff00000 */
2033 lsb = c & -c; /* lsb == 0x0000000000100000 */
2034 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2035 c = ~c; /* c == 0x00fff000000fffff */
2036 c &= -lsb; /* c == 0x00fff00000000000 */
2037 lsb = c & -c; /* lsb == 0x0000100000000000 */
2038 c = ~c; /* c == 0xff000fffffffffff */
2039 c &= -lsb; /* c == 0xff00000000000000 */
2041 while ((lsb >>= 1) != 0)
2042 shift++; /* shift == 44 on exit from loop */
2043 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2044 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2045 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2049 /* Assume c initially something like 0xff000f0000000000. The idea
2050 is to rotate the word so that the ^^^ middle group of zeros
2051 is at the LS end and can be cleared with an rldicr mask. We then
2052 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2054 lsb = c & -c; /* lsb == 0x0000010000000000 */
2055 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2056 c = ~c; /* c == 0x00fff0ffffffffff */
2057 c &= -lsb; /* c == 0x00fff00000000000 */
2058 lsb = c & -c; /* lsb == 0x0000100000000000 */
2059 c = ~c; /* c == 0xff000fffffffffff */
2060 c &= -lsb; /* c == 0xff00000000000000 */
2062 while ((lsb >>= 1) != 0)
2063 shift++; /* shift == 44 on exit from loop */
2064 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2065 m1 >>= shift; /* m1 == 0x0000000000000fff */
2066 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2069 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2070 masks will be all 1's. We are guaranteed more than one transition. */
2071 out[0] = GEN_INT (64 - shift);
2072 out[1] = GEN_INT (m1);
2073 out[2] = GEN_INT (shift);
2074 out[3] = GEN_INT (m2);
2082 /* Return 1 if the operand is either a non-special register or a constant
2083 that can be used as the operand of a PowerPC64 logical AND insn. */
2086 and64_operand (op, mode)
2088 enum machine_mode mode;
2090 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2091 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2093 return (logical_operand (op, mode) || mask64_operand (op, mode));
2096 /* Like the above, but also match constants that can be implemented
2097 with two rldicl or rldicr insns. */
2100 and64_2_operand (op, mode)
2102 enum machine_mode mode;
2104 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2105 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2107 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2110 /* Return 1 if the operand is either a non-special register or a
2111 constant that can be used as the operand of an RS/6000 logical AND insn. */
2114 and_operand (op, mode)
2116 enum machine_mode mode;
2118 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2119 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2121 return (logical_operand (op, mode) || mask_operand (op, mode));
2124 /* Return 1 if the operand is a general register or memory operand. */
2127 reg_or_mem_operand (op, mode)
2129 enum machine_mode mode;
2131 return (gpc_reg_operand (op, mode)
2132 || memory_operand (op, mode)
2133 || volatile_mem_operand (op, mode));
2136 /* Return 1 if the operand is a general register or memory operand without
2137 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2141 lwa_operand (op, mode)
2143 enum machine_mode mode;
2147 if (reload_completed && GET_CODE (inner) == SUBREG)
2148 inner = SUBREG_REG (inner);
2150 return gpc_reg_operand (inner, mode)
2151 || (memory_operand (inner, mode)
2152 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2153 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2154 && (GET_CODE (XEXP (inner, 0)) != PLUS
2155 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2156 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2159 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2162 symbol_ref_operand (op, mode)
2164 enum machine_mode mode;
2166 if (mode != VOIDmode && GET_MODE (op) != mode)
2169 return (GET_CODE (op) == SYMBOL_REF
2170 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2173 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2174 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2177 call_operand (op, mode)
2179 enum machine_mode mode;
2181 if (mode != VOIDmode && GET_MODE (op) != mode)
2184 return (GET_CODE (op) == SYMBOL_REF
2185 || (GET_CODE (op) == REG
2186 && (REGNO (op) == LINK_REGISTER_REGNUM
2187 || REGNO (op) == COUNT_REGISTER_REGNUM
2188 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2191 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2195 current_file_function_operand (op, mode)
2197 enum machine_mode mode ATTRIBUTE_UNUSED;
2199 return (GET_CODE (op) == SYMBOL_REF
2200 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2201 && (SYMBOL_REF_LOCAL_P (op)
2202 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2205 /* Return 1 if this operand is a valid input for a move insn. */
2208 input_operand (op, mode)
2210 enum machine_mode mode;
2212 /* Memory is always valid. */
2213 if (memory_operand (op, mode))
2216 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2217 if (GET_CODE (op) == CONSTANT_P_RTX)
2220 /* For floating-point, easy constants are valid. */
2221 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2223 && easy_fp_constant (op, mode))
2226 /* Allow any integer constant. */
2227 if (GET_MODE_CLASS (mode) == MODE_INT
2228 && (GET_CODE (op) == CONST_INT
2229 || GET_CODE (op) == CONST_DOUBLE))
2232 /* Allow easy vector constants. */
2233 if (GET_CODE (op) == CONST_VECTOR
2234 && easy_vector_constant (op, mode))
2237 /* For floating-point or multi-word mode, the only remaining valid type
2239 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2240 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2241 return register_operand (op, mode);
2243 /* The only cases left are integral modes one word or smaller (we
2244 do not get called for MODE_CC values). These can be in any
2246 if (register_operand (op, mode))
2249 /* A SYMBOL_REF referring to the TOC is valid. */
2250 if (legitimate_constant_pool_address_p (op))
2253 /* A constant pool expression (relative to the TOC) is valid */
2254 if (toc_relative_expr_p (op))
2257 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2259 if (DEFAULT_ABI == ABI_V4
2260 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2261 && small_data_operand (op, Pmode))
2267 /* Return 1 for an operand in small memory on V.4/eabi. */
2270 small_data_operand (op, mode)
2271 rtx op ATTRIBUTE_UNUSED;
2272 enum machine_mode mode ATTRIBUTE_UNUSED;
2277 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2280 if (DEFAULT_ABI != ABI_V4)
2283 if (GET_CODE (op) == SYMBOL_REF)
2286 else if (GET_CODE (op) != CONST
2287 || GET_CODE (XEXP (op, 0)) != PLUS
2288 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2289 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2294 rtx sum = XEXP (op, 0);
2295 HOST_WIDE_INT summand;
2297 /* We have to be careful here, because it is the referenced address
2298 that must be 32k from _SDA_BASE_, not just the symbol. */
2299 summand = INTVAL (XEXP (sum, 1));
2300 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2303 sym_ref = XEXP (sum, 0);
2306 return SYMBOL_REF_SMALL_P (sym_ref);
2312 /* Return 1 for all valid move insn operand combination involving altivec
2316 altivec_in_gprs_p (rtx op0, rtx op1)
2318 if (REG_P (op0) && REGNO_REG_CLASS (REGNO (op0)) == GENERAL_REGS)
2321 if (REG_P (op1) && REGNO_REG_CLASS (REGNO (op1)) == GENERAL_REGS)
2327 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2330 constant_pool_expr_1 (op, have_sym, have_toc)
2335 switch (GET_CODE(op))
2338 if (RS6000_SYMBOL_REF_TLS_P (op))
2340 else if (CONSTANT_POOL_ADDRESS_P (op))
2342 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2350 else if (! strcmp (XSTR (op, 0), toc_label_name))
2359 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2360 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2362 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2371 constant_pool_expr_p (op)
2376 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2380 toc_relative_expr_p (op)
2385 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2388 /* SPE offset addressing is limited to 5-bits worth of double words. */
2389 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2392 legitimate_constant_pool_address_p (x)
2396 && GET_CODE (x) == PLUS
2397 && GET_CODE (XEXP (x, 0)) == REG
2398 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2399 && constant_pool_expr_p (XEXP (x, 1)));
2403 legitimate_small_data_p (mode, x)
2404 enum machine_mode mode;
2407 return (DEFAULT_ABI == ABI_V4
2408 && !flag_pic && !TARGET_TOC
2409 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2410 && small_data_operand (x, mode));
2414 legitimate_offset_address_p (mode, x, strict)
2415 enum machine_mode mode;
2419 unsigned HOST_WIDE_INT offset, extra;
2421 if (GET_CODE (x) != PLUS)
2423 if (GET_CODE (XEXP (x, 0)) != REG)
2425 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2427 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2430 offset = INTVAL (XEXP (x, 1));
2438 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2439 which leaves the only valid constant offset of zero, which by
2440 canonicalization rules is also invalid. */
2447 /* SPE vector modes. */
2448 return SPE_CONST_OFFSET_OK (offset);
2454 else if (offset & 3)
2462 else if (offset & 3)
2472 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2476 legitimate_indexed_address_p (x, strict)
2482 if (GET_CODE (x) != PLUS)
2487 if (!REG_P (op0) || !REG_P (op1))
2490 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2491 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2492 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2493 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2497 legitimate_indirect_address_p (x, strict)
2501 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2505 legitimate_lo_sum_address_p (mode, x, strict)
2506 enum machine_mode mode;
2510 if (GET_CODE (x) != LO_SUM)
2512 if (GET_CODE (XEXP (x, 0)) != REG)
2514 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2520 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2524 if (GET_MODE_NUNITS (mode) != 1)
2526 if (GET_MODE_BITSIZE (mode) > 32
2527 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2530 return CONSTANT_P (x);
2537 /* Try machine-dependent ways of modifying an illegitimate address
2538 to be legitimate. If we find one, return the new, valid address.
2539 This is used from only one place: `memory_address' in explow.c.
2541 OLDX is the address as it was before break_out_memory_refs was
2542 called. In some cases it is useful to look at this to decide what
2545 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2547 It is always safe for this function to do nothing. It exists to
2548 recognize opportunities to optimize the output.
2550 On RS/6000, first check for the sum of a register with a constant
2551 integer that is out of range. If so, generate code to add the
2552 constant with the low-order 16 bits masked to the register and force
2553 this result into another register (this can be done with `cau').
2554 Then generate an address of REG+(CONST&0xffff), allowing for the
2555 possibility of bit 16 being a one.
2557 Then check for the sum of a register and something not constant, try to
2558 load the other things into a register and return the sum. */
2561 rs6000_legitimize_address (x, oldx, mode)
2563 rtx oldx ATTRIBUTE_UNUSED;
2564 enum machine_mode mode;
2566 if (GET_CODE (x) == SYMBOL_REF)
2568 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2570 return rs6000_legitimize_tls_address (x, model);
2573 if (GET_CODE (x) == PLUS
2574 && GET_CODE (XEXP (x, 0)) == REG
2575 && GET_CODE (XEXP (x, 1)) == CONST_INT
2576 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2578 HOST_WIDE_INT high_int, low_int;
2580 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2581 high_int = INTVAL (XEXP (x, 1)) - low_int;
2582 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2583 GEN_INT (high_int)), 0);
2584 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2586 else if (GET_CODE (x) == PLUS
2587 && GET_CODE (XEXP (x, 0)) == REG
2588 && GET_CODE (XEXP (x, 1)) != CONST_INT
2589 && GET_MODE_NUNITS (mode) == 1
2590 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2592 || (mode != DFmode && mode != TFmode))
2593 && (TARGET_POWERPC64 || mode != DImode)
2596 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2597 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2599 else if (ALTIVEC_VECTOR_MODE (mode))
2603 /* Make sure both operands are registers. */
2604 if (GET_CODE (x) == PLUS)
2605 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2606 force_reg (Pmode, XEXP (x, 1)));
2608 reg = force_reg (Pmode, x);
2611 else if (SPE_VECTOR_MODE (mode))
2613 /* We accept [reg + reg] and [reg + OFFSET]. */
2615 if (GET_CODE (x) == PLUS)
2617 rtx op1 = XEXP (x, 0);
2618 rtx op2 = XEXP (x, 1);
2620 op1 = force_reg (Pmode, op1);
2622 if (GET_CODE (op2) != REG
2623 && (GET_CODE (op2) != CONST_INT
2624 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2625 op2 = force_reg (Pmode, op2);
2627 return gen_rtx_PLUS (Pmode, op1, op2);
2630 return force_reg (Pmode, x);
2636 && GET_CODE (x) != CONST_INT
2637 && GET_CODE (x) != CONST_DOUBLE
2639 && GET_MODE_NUNITS (mode) == 1
2640 && (GET_MODE_BITSIZE (mode) <= 32
2641 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2643 rtx reg = gen_reg_rtx (Pmode);
2644 emit_insn (gen_elf_high (reg, (x)));
2645 return gen_rtx_LO_SUM (Pmode, reg, (x));
2647 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2650 && ! MACHO_DYNAMIC_NO_PIC_P
2652 && GET_CODE (x) != CONST_INT
2653 && GET_CODE (x) != CONST_DOUBLE
2655 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2659 rtx reg = gen_reg_rtx (Pmode);
2660 emit_insn (gen_macho_high (reg, (x)));
2661 return gen_rtx_LO_SUM (Pmode, reg, (x));
2664 && constant_pool_expr_p (x)
2665 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2667 return create_TOC_reference (x);
2673 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2675 static GTY(()) rtx rs6000_tls_symbol;
2677 rs6000_tls_get_addr ()
2679 if (!rs6000_tls_symbol)
2680 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2682 return rs6000_tls_symbol;
2685 /* Construct the SYMBOL_REF for TLS GOT references. */
2687 static GTY(()) rtx rs6000_got_symbol;
2691 if (!rs6000_got_symbol)
2693 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2694 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2695 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2698 return rs6000_got_symbol;
2701 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2702 this (thread-local) address. */
2705 rs6000_legitimize_tls_address (addr, model)
2707 enum tls_model model;
2711 dest = gen_reg_rtx (Pmode);
2712 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2718 tlsreg = gen_rtx_REG (Pmode, 13);
2719 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2723 tlsreg = gen_rtx_REG (Pmode, 2);
2724 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2728 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2732 tmp = gen_reg_rtx (Pmode);
2735 tlsreg = gen_rtx_REG (Pmode, 13);
2736 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2740 tlsreg = gen_rtx_REG (Pmode, 2);
2741 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2745 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2747 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2752 rtx r3, got, tga, tmp1, tmp2, eqv;
2755 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2759 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2762 rtx gsym = rs6000_got_sym ();
2763 got = gen_reg_rtx (Pmode);
2765 rs6000_emit_move (got, gsym, Pmode);
2769 static int tls_got_labelno = 0;
2770 rtx tempLR, lab, tmp3, mem;
2773 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2774 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2775 tempLR = gen_reg_rtx (Pmode);
2776 tmp1 = gen_reg_rtx (Pmode);
2777 tmp2 = gen_reg_rtx (Pmode);
2778 tmp3 = gen_reg_rtx (Pmode);
2779 mem = gen_rtx_MEM (Pmode, tmp1);
2780 RTX_UNCHANGING_P (mem) = 1;
2782 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2784 emit_move_insn (tmp1, tempLR);
2785 emit_move_insn (tmp2, mem);
2786 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2787 last = emit_move_insn (got, tmp3);
2788 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2790 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2792 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2798 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2800 r3 = gen_rtx_REG (Pmode, 3);
2802 insn = gen_tls_gd_64 (r3, got, addr);
2804 insn = gen_tls_gd_32 (r3, got, addr);
2807 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2808 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2809 insn = emit_call_insn (insn);
2810 CONST_OR_PURE_CALL_P (insn) = 1;
2811 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2812 insn = get_insns ();
2814 emit_libcall_block (insn, dest, r3, addr);
2816 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2818 r3 = gen_rtx_REG (Pmode, 3);
2820 insn = gen_tls_ld_64 (r3, got);
2822 insn = gen_tls_ld_32 (r3, got);
2825 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2826 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2827 insn = emit_call_insn (insn);
2828 CONST_OR_PURE_CALL_P (insn) = 1;
2829 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2830 insn = get_insns ();
2832 tmp1 = gen_reg_rtx (Pmode);
2833 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2835 emit_libcall_block (insn, tmp1, r3, eqv);
2836 if (rs6000_tls_size == 16)
2839 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2841 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2843 else if (rs6000_tls_size == 32)
2845 tmp2 = gen_reg_rtx (Pmode);
2847 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2849 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2852 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2854 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2858 tmp2 = gen_reg_rtx (Pmode);
2860 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2862 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2864 insn = gen_rtx_SET (Pmode, dest,
2865 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2871 /* IE, or 64 bit offset LE. */
2872 tmp2 = gen_reg_rtx (Pmode);
2874 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2876 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2879 insn = gen_tls_tls_64 (dest, tmp2, addr);
2881 insn = gen_tls_tls_32 (dest, tmp2, addr);
2889 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2890 instruction definitions. */
2893 rs6000_tls_symbol_ref (x, mode)
2895 enum machine_mode mode ATTRIBUTE_UNUSED;
2897 return RS6000_SYMBOL_REF_TLS_P (x);
2900 /* Return 1 if X contains a thread-local symbol. */
2903 rs6000_tls_referenced_p (x)
2906 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2909 /* Return 1 if *X is a thread-local symbol. This is the same as
2910 rs6000_tls_symbol_ref except for the type of the unused argument. */
2913 rs6000_tls_symbol_ref_1 (x, data)
2915 void *data ATTRIBUTE_UNUSED;
2917 return RS6000_SYMBOL_REF_TLS_P (*x);
2920 /* The convention appears to be to define this wherever it is used.
2921 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2922 is now used here. */
2923 #ifndef REG_MODE_OK_FOR_BASE_P
2924 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2927 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2928 replace the input X, or the original X if no replacement is called for.
2929 The output parameter *WIN is 1 if the calling macro should goto WIN,
2932 For RS/6000, we wish to handle large displacements off a base
2933 register by splitting the addend across an addiu/addis and the mem insn.
2934 This cuts number of extra insns needed from 3 to 1.
2936 On Darwin, we use this to generate code for floating point constants.
2937 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2938 The Darwin code is inside #if TARGET_MACHO because only then is
2939 machopic_function_base_name() defined. */
2941 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2943 enum machine_mode mode;
2946 int ind_levels ATTRIBUTE_UNUSED;
2949 /* We must recognize output that we have already generated ourselves. */
2950 if (GET_CODE (x) == PLUS
2951 && GET_CODE (XEXP (x, 0)) == PLUS
2952 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2953 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2954 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2956 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2957 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2958 opnum, (enum reload_type)type);
2964 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2965 && GET_CODE (x) == LO_SUM
2966 && GET_CODE (XEXP (x, 0)) == PLUS
2967 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2968 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2969 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2970 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2971 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2972 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2973 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2975 /* Result of previous invocation of this function on Darwin
2976 floating point constant. */
2977 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2978 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2979 opnum, (enum reload_type)type);
2984 if (GET_CODE (x) == PLUS
2985 && GET_CODE (XEXP (x, 0)) == REG
2986 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2987 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2988 && GET_CODE (XEXP (x, 1)) == CONST_INT
2989 && !SPE_VECTOR_MODE (mode)
2990 && !ALTIVEC_VECTOR_MODE (mode))
2992 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2993 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2995 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2997 /* Check for 32-bit overflow. */
2998 if (high + low != val)
3004 /* Reload the high part into a base reg; leave the low part
3005 in the mem directly. */
3007 x = gen_rtx_PLUS (GET_MODE (x),
3008 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3012 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3013 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3014 opnum, (enum reload_type)type);
3019 if (GET_CODE (x) == SYMBOL_REF
3020 && DEFAULT_ABI == ABI_DARWIN
3021 && !ALTIVEC_VECTOR_MODE (mode)
3024 /* Darwin load of floating point constant. */
3025 rtx offset = gen_rtx (CONST, Pmode,
3026 gen_rtx (MINUS, Pmode, x,
3027 gen_rtx (SYMBOL_REF, Pmode,
3028 machopic_function_base_name ())));
3029 x = gen_rtx (LO_SUM, GET_MODE (x),
3030 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
3031 gen_rtx (HIGH, Pmode, offset)), offset);
3032 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3033 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3034 opnum, (enum reload_type)type);
3038 if (GET_CODE (x) == SYMBOL_REF
3039 && DEFAULT_ABI == ABI_DARWIN
3040 && !ALTIVEC_VECTOR_MODE (mode)
3041 && MACHO_DYNAMIC_NO_PIC_P)
3043 /* Darwin load of floating point constant. */
3044 x = gen_rtx (LO_SUM, GET_MODE (x),
3045 gen_rtx (HIGH, Pmode, x), x);
3046 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3047 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3048 opnum, (enum reload_type)type);
3054 && constant_pool_expr_p (x)
3055 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3057 (x) = create_TOC_reference (x);
3065 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3066 that is a valid memory address for an instruction.
3067 The MODE argument is the machine mode for the MEM expression
3068 that wants to use this address.
3070 On the RS/6000, there are four valid address: a SYMBOL_REF that
3071 refers to a constant pool entry of an address (or the sum of it
3072 plus a constant), a short (16-bit signed) constant plus a register,
3073 the sum of two registers, or a register indirect, possibly with an
3074 auto-increment. For DFmode and DImode with a constant plus register,
3075 we must ensure that both words are addressable or PowerPC64 with offset
3078 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3079 32-bit DImode, TImode), indexed addressing cannot be used because
3080 adjacent memory cells are accessed by adding word-sized offsets
3081 during assembly output. */
3083 rs6000_legitimate_address (mode, x, reg_ok_strict)
3084 enum machine_mode mode;
3088 if (RS6000_SYMBOL_REF_TLS_P (x))
3090 if (legitimate_indirect_address_p (x, reg_ok_strict))
3092 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3093 && !ALTIVEC_VECTOR_MODE (mode)
3094 && !SPE_VECTOR_MODE (mode)
3096 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3098 if (legitimate_small_data_p (mode, x))
3100 if (legitimate_constant_pool_address_p (x))
3102 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3104 && GET_CODE (x) == PLUS
3105 && GET_CODE (XEXP (x, 0)) == REG
3106 && XEXP (x, 0) == virtual_stack_vars_rtx
3107 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3109 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3112 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3114 || (mode != DFmode && mode != TFmode))
3115 && (TARGET_POWERPC64 || mode != DImode)
3116 && legitimate_indexed_address_p (x, reg_ok_strict))
3118 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3123 /* Go to LABEL if ADDR (a legitimate address expression)
3124 has an effect that depends on the machine mode it is used for.
3126 On the RS/6000 this is true of all integral offsets (since AltiVec
3127 modes don't allow them) or is a pre-increment or decrement.
3129 ??? Except that due to conceptual problems in offsettable_address_p
3130 we can't really report the problems of integral offsets. So leave
3131 this assuming that the adjustable offset must be valid for the
3132 sub-words of a TFmode operand, which is what we had before. */
3135 rs6000_mode_dependent_address (addr)
3138 switch (GET_CODE (addr))
3141 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3143 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3144 return val + 12 + 0x8000 >= 0x10000;
3153 return TARGET_UPDATE;
3162 /* Try to output insns to set TARGET equal to the constant C if it can
3163 be done in less than N insns. Do all computations in MODE.
3164 Returns the place where the output has been placed if it can be
3165 done and the insns have been emitted. If it would take more than N
3166 insns, zero is returned and no insns and emitted. */
3169 rs6000_emit_set_const (dest, mode, source, n)
3171 enum machine_mode mode;
3172 int n ATTRIBUTE_UNUSED;
3174 rtx result, insn, set;
3175 HOST_WIDE_INT c0, c1;
3177 if (mode == QImode || mode == HImode)
3180 dest = gen_reg_rtx (mode);
3181 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3184 else if (mode == SImode)
3186 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3188 emit_insn (gen_rtx_SET (VOIDmode, result,
3189 GEN_INT (INTVAL (source)
3190 & (~ (HOST_WIDE_INT) 0xffff))));
3191 emit_insn (gen_rtx_SET (VOIDmode, dest,
3192 gen_rtx_IOR (SImode, result,
3193 GEN_INT (INTVAL (source) & 0xffff))));
3196 else if (mode == DImode)
3198 if (GET_CODE (source) == CONST_INT)
3200 c0 = INTVAL (source);
3203 else if (GET_CODE (source) == CONST_DOUBLE)
3205 #if HOST_BITS_PER_WIDE_INT >= 64
3206 c0 = CONST_DOUBLE_LOW (source);
3209 c0 = CONST_DOUBLE_LOW (source);
3210 c1 = CONST_DOUBLE_HIGH (source);
3216 result = rs6000_emit_set_long_const (dest, c0, c1);
3221 insn = get_last_insn ();
3222 set = single_set (insn);
3223 if (! CONSTANT_P (SET_SRC (set)))
3224 set_unique_reg_note (insn, REG_EQUAL, source);
3229 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3230 fall back to a straight forward decomposition. We do this to avoid
3231 exponential run times encountered when looking for longer sequences
3232 with rs6000_emit_set_const. */
3234 rs6000_emit_set_long_const (dest, c1, c2)
3236 HOST_WIDE_INT c1, c2;
3238 if (!TARGET_POWERPC64)
3240 rtx operand1, operand2;
3242 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3244 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3246 emit_move_insn (operand1, GEN_INT (c1));
3247 emit_move_insn (operand2, GEN_INT (c2));
3251 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3254 ud2 = (c1 & 0xffff0000) >> 16;
3255 #if HOST_BITS_PER_WIDE_INT >= 64
3259 ud4 = (c2 & 0xffff0000) >> 16;
3261 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3262 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3265 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3267 emit_move_insn (dest, GEN_INT (ud1));
3270 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3271 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3274 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3277 emit_move_insn (dest, GEN_INT (ud2 << 16));
3279 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3281 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3282 || (ud4 == 0 && ! (ud3 & 0x8000)))
3285 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3288 emit_move_insn (dest, GEN_INT (ud3 << 16));
3291 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3292 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3294 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3299 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3302 emit_move_insn (dest, GEN_INT (ud4 << 16));
3305 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3307 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3309 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3310 GEN_INT (ud2 << 16)));
3312 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3318 /* Emit a move from SOURCE to DEST in mode MODE. */
3320 rs6000_emit_move (dest, source, mode)
3323 enum machine_mode mode;
3327 operands[1] = source;
3329 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3330 if (GET_CODE (operands[1]) == CONST_DOUBLE
3331 && ! FLOAT_MODE_P (mode)
3332 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3334 /* FIXME. This should never happen. */
3335 /* Since it seems that it does, do the safe thing and convert
3337 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3339 if (GET_CODE (operands[1]) == CONST_DOUBLE
3340 && ! FLOAT_MODE_P (mode)
3341 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3342 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3343 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3344 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3347 /* Check if GCC is setting up a block move that will end up using FP
3348 registers as temporaries. We must make sure this is acceptable. */
3349 if (GET_CODE (operands[0]) == MEM
3350 && GET_CODE (operands[1]) == MEM
3352 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3353 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3354 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3355 ? 32 : MEM_ALIGN (operands[0])))
3356 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3358 : MEM_ALIGN (operands[1]))))
3359 && ! MEM_VOLATILE_P (operands [0])
3360 && ! MEM_VOLATILE_P (operands [1]))
3362 emit_move_insn (adjust_address (operands[0], SImode, 0),
3363 adjust_address (operands[1], SImode, 0));
3364 emit_move_insn (adjust_address (operands[0], SImode, 4),
3365 adjust_address (operands[1], SImode, 4));
3369 if (!no_new_pseudos)
3371 if (GET_CODE (operands[1]) == MEM && optimize > 0
3372 && (mode == QImode || mode == HImode || mode == SImode)
3373 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3375 rtx reg = gen_reg_rtx (word_mode);
3377 emit_insn (gen_rtx_SET (word_mode, reg,
3378 gen_rtx_ZERO_EXTEND (word_mode,
3380 operands[1] = gen_lowpart (mode, reg);
3382 if (GET_CODE (operands[0]) != REG)
3383 operands[1] = force_reg (mode, operands[1]);
3386 if (mode == SFmode && ! TARGET_POWERPC
3387 && TARGET_HARD_FLOAT && TARGET_FPRS
3388 && GET_CODE (operands[0]) == MEM)
3392 if (reload_in_progress || reload_completed)
3393 regnum = true_regnum (operands[1]);
3394 else if (GET_CODE (operands[1]) == REG)
3395 regnum = REGNO (operands[1]);
3399 /* If operands[1] is a register, on POWER it may have
3400 double-precision data in it, so truncate it to single
3402 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3405 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3406 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3407 operands[1] = newreg;
3411 /* Recognize the case where operand[1] is a reference to thread-local
3412 data and load its address to a register. */
3413 if (GET_CODE (operands[1]) == SYMBOL_REF)
3415 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3417 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3420 /* Handle the case where reload calls us with an invalid address. */
3421 if (reload_in_progress && mode == Pmode
3422 && (! general_operand (operands[1], mode)
3423 || ! nonimmediate_operand (operands[0], mode)))
3426 /* Handle the case of CONSTANT_P_RTX. */
3427 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3430 /* FIXME: In the long term, this switch statement should go away
3431 and be replaced by a sequence of tests based on things like
3437 if (CONSTANT_P (operands[1])
3438 && GET_CODE (operands[1]) != CONST_INT)
3439 operands[1] = force_const_mem (mode, operands[1]);
3445 if (CONSTANT_P (operands[1])
3446 && ! easy_fp_constant (operands[1], mode))
3447 operands[1] = force_const_mem (mode, operands[1]);
3458 if (CONSTANT_P (operands[1])
3459 && !easy_vector_constant (operands[1], mode))
3460 operands[1] = force_const_mem (mode, operands[1]);
3465 /* Use default pattern for address of ELF small data */
3468 && DEFAULT_ABI == ABI_V4
3469 && (GET_CODE (operands[1]) == SYMBOL_REF
3470 || GET_CODE (operands[1]) == CONST)
3471 && small_data_operand (operands[1], mode))
3473 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3477 if (DEFAULT_ABI == ABI_V4
3478 && mode == Pmode && mode == SImode
3479 && flag_pic == 1 && got_operand (operands[1], mode))
3481 emit_insn (gen_movsi_got (operands[0], operands[1]));
3485 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3489 && CONSTANT_P (operands[1])
3490 && GET_CODE (operands[1]) != HIGH
3491 && GET_CODE (operands[1]) != CONST_INT)
3493 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3495 /* If this is a function address on -mcall-aixdesc,
3496 convert it to the address of the descriptor. */
3497 if (DEFAULT_ABI == ABI_AIX
3498 && GET_CODE (operands[1]) == SYMBOL_REF
3499 && XSTR (operands[1], 0)[0] == '.')
3501 const char *name = XSTR (operands[1], 0);
3503 while (*name == '.')
3505 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3506 CONSTANT_POOL_ADDRESS_P (new_ref)
3507 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3508 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3509 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3510 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3511 operands[1] = new_ref;
3514 if (DEFAULT_ABI == ABI_DARWIN)
3517 if (MACHO_DYNAMIC_NO_PIC_P)
3519 /* Take care of any required data indirection. */
3520 operands[1] = rs6000_machopic_legitimize_pic_address (
3521 operands[1], mode, operands[0]);
3522 if (operands[0] != operands[1])
3523 emit_insn (gen_rtx_SET (VOIDmode,
3524 operands[0], operands[1]));
3528 emit_insn (gen_macho_high (target, operands[1]));
3529 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3533 emit_insn (gen_elf_high (target, operands[1]));
3534 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3538 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3539 and we have put it in the TOC, we just need to make a TOC-relative
3542 && GET_CODE (operands[1]) == SYMBOL_REF
3543 && constant_pool_expr_p (operands[1])
3544 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3545 get_pool_mode (operands[1])))
3547 operands[1] = create_TOC_reference (operands[1]);
3549 else if (mode == Pmode
3550 && CONSTANT_P (operands[1])
3551 && ((GET_CODE (operands[1]) != CONST_INT
3552 && ! easy_fp_constant (operands[1], mode))
3553 || (GET_CODE (operands[1]) == CONST_INT
3554 && num_insns_constant (operands[1], mode) > 2)
3555 || (GET_CODE (operands[0]) == REG
3556 && FP_REGNO_P (REGNO (operands[0]))))
3557 && GET_CODE (operands[1]) != HIGH
3558 && ! legitimate_constant_pool_address_p (operands[1])
3559 && ! toc_relative_expr_p (operands[1]))
3561 /* Emit a USE operation so that the constant isn't deleted if
3562 expensive optimizations are turned on because nobody
3563 references it. This should only be done for operands that
3564 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3565 This should not be done for operands that contain LABEL_REFs.
3566 For now, we just handle the obvious case. */
3567 if (GET_CODE (operands[1]) != LABEL_REF)
3568 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3571 /* Darwin uses a special PIC legitimizer. */
3572 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3575 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3577 if (operands[0] != operands[1])
3578 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3583 /* If we are to limit the number of things we put in the TOC and
3584 this is a symbol plus a constant we can add in one insn,
3585 just put the symbol in the TOC and add the constant. Don't do
3586 this if reload is in progress. */
3587 if (GET_CODE (operands[1]) == CONST
3588 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3589 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3590 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3591 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3592 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3593 && ! side_effects_p (operands[0]))
3596 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3597 rtx other = XEXP (XEXP (operands[1], 0), 1);
3599 sym = force_reg (mode, sym);
3601 emit_insn (gen_addsi3 (operands[0], sym, other));
3603 emit_insn (gen_adddi3 (operands[0], sym, other));
3607 operands[1] = force_const_mem (mode, operands[1]);
3610 && constant_pool_expr_p (XEXP (operands[1], 0))
3611 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3612 get_pool_constant (XEXP (operands[1], 0)),
3613 get_pool_mode (XEXP (operands[1], 0))))
3616 = gen_rtx_MEM (mode,
3617 create_TOC_reference (XEXP (operands[1], 0)));
3618 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3619 RTX_UNCHANGING_P (operands[1]) = 1;
3625 if (GET_CODE (operands[0]) == MEM
3626 && GET_CODE (XEXP (operands[0], 0)) != REG
3627 && ! reload_in_progress)
3629 = replace_equiv_address (operands[0],
3630 copy_addr_to_reg (XEXP (operands[0], 0)));
3632 if (GET_CODE (operands[1]) == MEM
3633 && GET_CODE (XEXP (operands[1], 0)) != REG
3634 && ! reload_in_progress)
3636 = replace_equiv_address (operands[1],
3637 copy_addr_to_reg (XEXP (operands[1], 0)));
3640 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3642 gen_rtx_SET (VOIDmode,
3643 operands[0], operands[1]),
3644 gen_rtx_CLOBBER (VOIDmode,
3645 gen_rtx_SCRATCH (SImode)))));
3654 /* Above, we may have called force_const_mem which may have returned
3655 an invalid address. If we can, fix this up; otherwise, reload will
3656 have to deal with it. */
3657 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3658 operands[1] = validize_mem (operands[1]);
3661 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3664 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3665 for a call to a function whose data type is FNTYPE.
3666 For a library call, FNTYPE is 0.
3668 For incoming args we set the number of arguments in the prototype large
3669 so we never return a PARALLEL. */
3672 init_cumulative_args (cum, fntype, libname, incoming, libcall)
3673 CUMULATIVE_ARGS *cum;
3675 rtx libname ATTRIBUTE_UNUSED;
3679 static CUMULATIVE_ARGS zero_cumulative;
3681 *cum = zero_cumulative;
3683 cum->fregno = FP_ARG_MIN_REG;
3684 cum->vregno = ALTIVEC_ARG_MIN_REG;
3685 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3686 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3687 ? CALL_LIBCALL : CALL_NORMAL);
3688 cum->sysv_gregno = GP_ARG_MIN_REG;
3689 cum->stdarg = fntype
3690 && (TYPE_ARG_TYPES (fntype) != 0
3691 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3692 != void_type_node));
3695 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3697 else if (cum->prototype)
3698 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3699 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3700 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3703 cum->nargs_prototype = 0;
3705 cum->orig_nargs = cum->nargs_prototype;
3707 /* Check for a longcall attribute. */
3709 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3710 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3711 cum->call_cookie = CALL_LONG;
3713 if (TARGET_DEBUG_ARG)
3715 fprintf (stderr, "\ninit_cumulative_args:");
3718 tree ret_type = TREE_TYPE (fntype);
3719 fprintf (stderr, " ret code = %s,",
3720 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3723 if (cum->call_cookie & CALL_LONG)
3724 fprintf (stderr, " longcall,");
3726 fprintf (stderr, " proto = %d, nargs = %d\n",
3727 cum->prototype, cum->nargs_prototype);
3731 /* If defined, a C expression which determines whether, and in which
3732 direction, to pad out an argument with extra space. The value
3733 should be of type `enum direction': either `upward' to pad above
3734 the argument, `downward' to pad below, or `none' to inhibit
3737 For the AIX ABI structs are always stored left shifted in their
3741 function_arg_padding (mode, type)
3742 enum machine_mode mode;
3745 if (type != 0 && AGGREGATE_TYPE_P (type))
3748 /* This is the default definition. */
3749 return (! BYTES_BIG_ENDIAN
3752 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3753 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3754 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3755 ? downward : upward));
3758 /* If defined, a C expression that gives the alignment boundary, in bits,
3759 of an argument with the specified mode and type. If it is not defined,
3760 PARM_BOUNDARY is used for all arguments.
3762 V.4 wants long longs to be double word aligned. */
3765 function_arg_boundary (mode, type)
3766 enum machine_mode mode;
3767 tree type ATTRIBUTE_UNUSED;
3769 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3771 else if (SPE_VECTOR_MODE (mode))
3773 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3776 return PARM_BOUNDARY;
3779 /* Update the data in CUM to advance over an argument
3780 of mode MODE and data type TYPE.
3781 (TYPE is null for libcalls where that information may not be available.) */
3784 function_arg_advance (cum, mode, type, named)
3785 CUMULATIVE_ARGS *cum;
3786 enum machine_mode mode;
3790 cum->nargs_prototype--;
3792 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3794 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3797 cum->words += RS6000_ARG_SIZE (mode, type);
3799 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3801 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3803 else if (DEFAULT_ABI == ABI_V4)
3805 if (TARGET_HARD_FLOAT && TARGET_FPRS
3806 && (mode == SFmode || mode == DFmode))
3808 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3813 cum->words += cum->words & 1;
3814 cum->words += RS6000_ARG_SIZE (mode, type);
3820 int gregno = cum->sysv_gregno;
3822 /* Aggregates and IEEE quad get passed by reference. */
3823 if ((type && AGGREGATE_TYPE_P (type))
3827 n_words = RS6000_ARG_SIZE (mode, type);
3829 /* Long long and SPE vectors are put in odd registers. */
3830 if (n_words == 2 && (gregno & 1) == 0)
3833 /* Long long and SPE vectors are not split between registers
3835 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3837 /* Long long is aligned on the stack. */
3839 cum->words += cum->words & 1;
3840 cum->words += n_words;
3843 /* Note: continuing to accumulate gregno past when we've started
3844 spilling to the stack indicates the fact that we've started
3845 spilling to the stack to expand_builtin_saveregs. */
3846 cum->sysv_gregno = gregno + n_words;
3849 if (TARGET_DEBUG_ARG)
3851 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3852 cum->words, cum->fregno);
3853 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3854 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3855 fprintf (stderr, "mode = %4s, named = %d\n",
3856 GET_MODE_NAME (mode), named);
3861 int align = (TARGET_32BIT && (cum->words & 1) != 0
3862 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3864 cum->words += align + RS6000_ARG_SIZE (mode, type);
3866 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3867 && TARGET_HARD_FLOAT && TARGET_FPRS)
3868 cum->fregno += (mode == TFmode ? 2 : 1);
3870 if (TARGET_DEBUG_ARG)
3872 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3873 cum->words, cum->fregno);
3874 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3875 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3876 fprintf (stderr, "named = %d, align = %d\n", named, align);
3881 /* Determine where to put a SIMD argument on the SPE. */
3883 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
3887 int gregno = cum->sysv_gregno;
3888 int n_words = RS6000_ARG_SIZE (mode, type);
3890 /* SPE vectors are put in odd registers. */
3891 if (n_words == 2 && (gregno & 1) == 0)
3894 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3897 enum machine_mode m = SImode;
3899 r1 = gen_rtx_REG (m, gregno);
3900 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3901 r2 = gen_rtx_REG (m, gregno + 1);
3902 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3903 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3910 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3911 return gen_rtx_REG (mode, cum->sysv_gregno);
3917 /* Determine where to put an argument to a function.
3918 Value is zero to push the argument on the stack,
3919 or a hard register in which to store the argument.
3921 MODE is the argument's machine mode.
3922 TYPE is the data type of the argument (as a tree).
3923 This is null for libcalls where that information may
3925 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3926 the preceding args and about the function being called.
3927 NAMED is nonzero if this argument is a named parameter
3928 (otherwise it is an extra parameter matching an ellipsis).
3930 On RS/6000 the first eight words of non-FP are normally in registers
3931 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3932 Under V.4, the first 8 FP args are in registers.
3934 If this is floating-point and no prototype is specified, we use
3935 both an FP and integer register (or possibly FP reg and stack). Library
3936 functions (when CALL_LIBCALL is set) always have the proper types for args,
3937 so we can pass the FP value just in one register. emit_library_function
3938 doesn't support PARALLEL anyway. */
3941 function_arg (cum, mode, type, named)
3942 CUMULATIVE_ARGS *cum;
3943 enum machine_mode mode;
3947 enum rs6000_abi abi = DEFAULT_ABI;
3949 /* Return a marker to indicate whether CR1 needs to set or clear the
3950 bit that V.4 uses to say fp args were passed in registers.
3951 Assume that we don't need the marker for software floating point,
3952 or compiler generated library calls. */
3953 if (mode == VOIDmode)
3956 && cum->nargs_prototype < 0
3957 && (cum->call_cookie & CALL_LIBCALL) == 0
3958 && (cum->prototype || TARGET_NO_PROTOTYPE))
3960 /* For the SPE, we need to crxor CR6 always. */
3962 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3963 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3964 return GEN_INT (cum->call_cookie
3965 | ((cum->fregno == FP_ARG_MIN_REG)
3966 ? CALL_V4_SET_FP_ARGS
3967 : CALL_V4_CLEAR_FP_ARGS));
3970 return GEN_INT (cum->call_cookie);
3973 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3975 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3976 return gen_rtx_REG (mode, cum->vregno);
3980 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
3981 return rs6000_spe_function_arg (cum, mode, type);
3982 else if (abi == ABI_V4)
3984 if (TARGET_HARD_FLOAT && TARGET_FPRS
3985 && (mode == SFmode || mode == DFmode))
3987 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3988 return gen_rtx_REG (mode, cum->fregno);
3995 int gregno = cum->sysv_gregno;
3997 /* Aggregates and IEEE quad get passed by reference. */
3998 if ((type && AGGREGATE_TYPE_P (type))
4002 n_words = RS6000_ARG_SIZE (mode, type);
4004 /* Long long and SPE vectors are put in odd registers. */
4005 if (n_words == 2 && (gregno & 1) == 0)
4008 /* Long long do not split between registers and stack. */
4009 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4010 return gen_rtx_REG (mode, gregno);
4017 int align = (TARGET_32BIT && (cum->words & 1) != 0
4018 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4019 int align_words = cum->words + align;
4021 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4024 if (USE_FP_FOR_ARG_P (*cum, mode, type))
4027 || ((cum->nargs_prototype > 0)
4028 /* IBM AIX extended its linkage convention definition always
4029 to require FP args after register save area hole on the
4031 && (DEFAULT_ABI != ABI_AIX
4033 || (align_words < GP_ARG_NUM_REG))))
4034 return gen_rtx_REG (mode, cum->fregno);
4036 return gen_rtx_PARALLEL (mode,
4038 gen_rtx_EXPR_LIST (VOIDmode,
4039 ((align_words >= GP_ARG_NUM_REG)
4042 + RS6000_ARG_SIZE (mode, type)
4044 /* If this is partially on the stack, then
4045 we only include the portion actually
4046 in registers here. */
4047 ? gen_rtx_REG (SImode,
4048 GP_ARG_MIN_REG + align_words)
4049 : gen_rtx_REG (mode,
4050 GP_ARG_MIN_REG + align_words))),
4052 gen_rtx_EXPR_LIST (VOIDmode,
4053 gen_rtx_REG (mode, cum->fregno),
4056 else if (align_words < GP_ARG_NUM_REG)
4057 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4063 /* For an arg passed partly in registers and partly in memory,
4064 this is the number of registers used.
4065 For args passed entirely in registers or entirely in memory, zero. */
4068 function_arg_partial_nregs (cum, mode, type, named)
4069 CUMULATIVE_ARGS *cum;
4070 enum machine_mode mode;
4072 int named ATTRIBUTE_UNUSED;
4074 if (DEFAULT_ABI == ABI_V4)
4077 if (USE_FP_FOR_ARG_P (*cum, mode, type)
4078 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
4080 if (cum->nargs_prototype >= 0)
4084 if (cum->words < GP_ARG_NUM_REG
4085 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4087 int ret = GP_ARG_NUM_REG - cum->words;
4088 if (ret && TARGET_DEBUG_ARG)
4089 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4097 /* A C expression that indicates when an argument must be passed by
4098 reference. If nonzero for an argument, a copy of that argument is
4099 made in memory and a pointer to the argument is passed instead of
4100 the argument itself. The pointer is passed in whatever way is
4101 appropriate for passing a pointer to that type.
4103 Under V.4, structures and unions are passed by reference.
4105 As an extension to all ABIs, variable sized types are passed by
4109 function_arg_pass_by_reference (cum, mode, type, named)
4110 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
4111 enum machine_mode mode ATTRIBUTE_UNUSED;
4113 int named ATTRIBUTE_UNUSED;
4115 if (DEFAULT_ABI == ABI_V4
4116 && ((type && AGGREGATE_TYPE_P (type))
4119 if (TARGET_DEBUG_ARG)
4120 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4124 return type && int_size_in_bytes (type) <= 0;
4127 /* Perform any needed actions needed for a function that is receiving a
4128 variable number of arguments.
4132 MODE and TYPE are the mode and type of the current parameter.
4134 PRETEND_SIZE is a variable that should be set to the amount of stack
4135 that must be pushed by the prolog to pretend that our caller pushed
4138 Normally, this macro will push all remaining incoming registers on the
4139 stack and set PRETEND_SIZE to the length of the registers pushed. */
4142 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
4143 CUMULATIVE_ARGS *cum;
4144 enum machine_mode mode;
4146 int *pretend_size ATTRIBUTE_UNUSED;
4150 CUMULATIVE_ARGS next_cum;
4151 int reg_size = TARGET_32BIT ? 4 : 8;
4152 rtx save_area = NULL_RTX, mem;
4153 int first_reg_offset, set;
4157 fntype = TREE_TYPE (current_function_decl);
4158 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4159 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4160 != void_type_node));
4162 /* For varargs, we do not want to skip the dummy va_dcl argument.
4163 For stdargs, we do want to skip the last named argument. */
4166 function_arg_advance (&next_cum, mode, type, 1);
4168 if (DEFAULT_ABI == ABI_V4)
4170 /* Indicate to allocate space on the stack for varargs save area. */
4171 cfun->machine->sysv_varargs_p = 1;
4173 save_area = plus_constant (virtual_stack_vars_rtx,
4174 - RS6000_VARARGS_SIZE);
4176 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4180 first_reg_offset = next_cum.words;
4181 save_area = virtual_incoming_args_rtx;
4182 cfun->machine->sysv_varargs_p = 0;
4184 if (MUST_PASS_IN_STACK (mode, type))
4185 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4188 set = get_varargs_alias_set ();
4189 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4191 mem = gen_rtx_MEM (BLKmode,
4192 plus_constant (save_area,
4193 first_reg_offset * reg_size)),
4194 set_mem_alias_set (mem, set);
4195 set_mem_align (mem, BITS_PER_WORD);
4197 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4198 GP_ARG_NUM_REG - first_reg_offset);
4201 /* Save FP registers if needed. */
4202 if (DEFAULT_ABI == ABI_V4
4203 && TARGET_HARD_FLOAT && TARGET_FPRS
4205 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4207 int fregno = next_cum.fregno;
4208 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4209 rtx lab = gen_label_rtx ();
4210 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4212 emit_jump_insn (gen_rtx_SET (VOIDmode,
4214 gen_rtx_IF_THEN_ELSE (VOIDmode,
4215 gen_rtx_NE (VOIDmode, cr1,
4217 gen_rtx_LABEL_REF (VOIDmode, lab),
4220 while (fregno <= FP_ARG_V4_MAX_REG)
4222 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4223 set_mem_alias_set (mem, set);
4224 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4233 /* Create the va_list data type. */
4236 rs6000_build_va_list ()
4238 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4240 /* For AIX, prefer 'char *' because that's what the system
4241 header files like. */
4242 if (DEFAULT_ABI != ABI_V4)
4243 return build_pointer_type (char_type_node);
4245 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4246 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4248 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4249 unsigned_char_type_node);
4250 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4251 unsigned_char_type_node);
4252 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4254 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4255 short_unsigned_type_node);
4256 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4258 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4261 DECL_FIELD_CONTEXT (f_gpr) = record;
4262 DECL_FIELD_CONTEXT (f_fpr) = record;
4263 DECL_FIELD_CONTEXT (f_res) = record;
4264 DECL_FIELD_CONTEXT (f_ovf) = record;
4265 DECL_FIELD_CONTEXT (f_sav) = record;
4267 TREE_CHAIN (record) = type_decl;
4268 TYPE_NAME (record) = type_decl;
4269 TYPE_FIELDS (record) = f_gpr;
4270 TREE_CHAIN (f_gpr) = f_fpr;
4271 TREE_CHAIN (f_fpr) = f_res;
4272 TREE_CHAIN (f_res) = f_ovf;
4273 TREE_CHAIN (f_ovf) = f_sav;
4275 layout_type (record);
4277 /* The correct type is an array type of one element. */
4278 return build_array_type (record, build_index_type (size_zero_node));
4281 /* Implement va_start. */
4284 rs6000_va_start (valist, nextarg)
4288 HOST_WIDE_INT words, n_gpr, n_fpr;
4289 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4290 tree gpr, fpr, ovf, sav, t;
4292 /* Only SVR4 needs something special. */
4293 if (DEFAULT_ABI != ABI_V4)
4295 std_expand_builtin_va_start (valist, nextarg);
4299 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4300 f_fpr = TREE_CHAIN (f_gpr);
4301 f_res = TREE_CHAIN (f_fpr);
4302 f_ovf = TREE_CHAIN (f_res);
4303 f_sav = TREE_CHAIN (f_ovf);
4305 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4306 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4307 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4308 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4309 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4311 /* Count number of gp and fp argument registers used. */
4312 words = current_function_args_info.words;
4313 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4314 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4316 if (TARGET_DEBUG_ARG)
4317 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4318 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4319 words, n_gpr, n_fpr);
4321 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4322 TREE_SIDE_EFFECTS (t) = 1;
4323 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4325 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4326 TREE_SIDE_EFFECTS (t) = 1;
4327 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4329 /* Find the overflow area. */
4330 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4332 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4333 build_int_2 (words * UNITS_PER_WORD, 0));
4334 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4335 TREE_SIDE_EFFECTS (t) = 1;
4336 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4338 /* Find the register save area. */
4339 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4340 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4341 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4342 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4343 TREE_SIDE_EFFECTS (t) = 1;
4344 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4347 /* Implement va_arg. */
4350 rs6000_va_arg (valist, type)
4353 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4354 tree gpr, fpr, ovf, sav, reg, t, u;
4355 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4356 rtx lab_false, lab_over, addr_rtx, r;
4358 if (DEFAULT_ABI != ABI_V4)
4360 /* Variable sized types are passed by reference. */
4361 if (int_size_in_bytes (type) <= 0)
4363 u = build_pointer_type (type);
4365 /* Args grow upward. */
4366 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4367 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4368 TREE_SIDE_EFFECTS (t) = 1;
4370 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4371 TREE_SIDE_EFFECTS (t) = 1;
4373 t = build1 (INDIRECT_REF, u, t);
4374 TREE_SIDE_EFFECTS (t) = 1;
4376 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4379 return std_expand_builtin_va_arg (valist, type);
4382 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4383 f_fpr = TREE_CHAIN (f_gpr);
4384 f_res = TREE_CHAIN (f_fpr);
4385 f_ovf = TREE_CHAIN (f_res);
4386 f_sav = TREE_CHAIN (f_ovf);
4388 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4389 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4390 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4391 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4392 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4394 size = int_size_in_bytes (type);
4395 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4397 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4399 /* Aggregates and long doubles are passed by reference. */
4405 size = UNITS_PER_WORD;
4408 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4410 /* FP args go in FP registers, if present. */
4419 /* Otherwise into GP registers. */
4427 /* Pull the value out of the saved registers ... */
4429 lab_false = gen_label_rtx ();
4430 lab_over = gen_label_rtx ();
4431 addr_rtx = gen_reg_rtx (Pmode);
4433 /* AltiVec vectors never go in registers. */
4434 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4436 TREE_THIS_VOLATILE (reg) = 1;
4437 emit_cmp_and_jump_insns
4438 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4439 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4442 /* Long long is aligned in the registers. */
4445 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4446 build_int_2 (n_reg - 1, 0));
4447 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4448 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4449 TREE_SIDE_EFFECTS (u) = 1;
4450 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4454 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4458 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4459 build_int_2 (n_reg, 0));
4460 TREE_SIDE_EFFECTS (u) = 1;
4462 u = build1 (CONVERT_EXPR, integer_type_node, u);
4463 TREE_SIDE_EFFECTS (u) = 1;
4465 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4466 TREE_SIDE_EFFECTS (u) = 1;
4468 t = build (PLUS_EXPR, ptr_type_node, t, u);
4469 TREE_SIDE_EFFECTS (t) = 1;
4471 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4473 emit_move_insn (addr_rtx, r);
4475 emit_jump_insn (gen_jump (lab_over));
4479 emit_label (lab_false);
4481 /* ... otherwise out of the overflow area. */
4483 /* Make sure we don't find reg 7 for the next int arg.
4485 All AltiVec vectors go in the overflow area. So in the AltiVec
4486 case we need to get the vectors from the overflow area, but
4487 remember where the GPRs and FPRs are. */
4488 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4489 || !TARGET_ALTIVEC))
4491 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4492 TREE_SIDE_EFFECTS (t) = 1;
4493 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4496 /* Care for on-stack alignment if needed. */
4503 /* AltiVec vectors are 16 byte aligned. */
4504 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4509 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4510 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4514 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4516 emit_move_insn (addr_rtx, r);
4518 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4519 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4520 TREE_SIDE_EFFECTS (t) = 1;
4521 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4523 emit_label (lab_over);
4527 r = gen_rtx_MEM (Pmode, addr_rtx);
4528 set_mem_alias_set (r, get_varargs_alias_set ());
4529 emit_move_insn (addr_rtx, r);
4537 #define def_builtin(MASK, NAME, TYPE, CODE) \
4539 if ((MASK) & target_flags) \
4540 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4544 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4546 static const struct builtin_description bdesc_3arg[] =
4548 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4549 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4550 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4551 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4552 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4553 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4554 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4555 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4556 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4557 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4558 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4559 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4560 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4561 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4562 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4563 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4564 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4565 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4566 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4567 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4568 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4569 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4570 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4573 /* DST operations: void foo (void *, const int, const char). */
4575 static const struct builtin_description bdesc_dst[] =
4577 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4578 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4579 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4580 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4583 /* Simple binary operations: VECc = foo (VECa, VECb). */
4585 static struct builtin_description bdesc_2arg[] =
4587 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4588 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4589 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4590 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4591 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4592 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4593 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4594 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4595 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4596 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4597 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4598 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4599 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4600 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4601 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4602 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4603 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4604 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4605 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4606 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4607 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4608 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4609 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4610 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4611 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4612 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4613 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4614 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4615 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4616 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4617 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4618 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4619 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4620 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4621 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4622 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4623 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4624 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4625 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4626 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4627 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4628 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4629 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4630 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4631 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4632 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4633 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4634 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4635 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4636 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4637 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4638 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4639 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4640 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4641 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4642 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4643 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4644 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4645 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4646 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4647 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4648 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4649 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4650 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4651 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4652 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4653 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4654 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4655 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4656 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4657 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4658 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4659 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4660 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4661 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4662 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4663 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4664 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4665 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4666 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4667 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4668 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4669 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4670 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4671 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4672 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4673 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4674 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4675 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4676 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4677 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4678 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4679 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4680 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4681 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4682 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4683 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4684 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4685 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4686 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4687 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4688 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4689 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4690 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4691 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4692 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4693 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4694 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4695 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4696 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4697 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4698 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4699 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4701 /* Place holder, leave as first spe builtin. */
4702 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4703 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4704 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4705 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4706 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4707 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4708 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4709 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4710 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4711 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4712 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4713 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4714 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4715 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4716 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4717 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4718 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4719 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4720 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4721 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4722 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4723 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4724 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4725 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4726 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4727 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4728 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4729 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4730 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4731 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4732 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4733 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4734 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4735 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4736 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4737 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4738 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4739 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4740 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4741 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4742 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4743 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4744 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4745 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4746 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4747 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4748 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4749 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4750 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4751 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4752 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4753 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4754 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4755 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4756 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4757 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4758 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4759 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4760 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4761 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4762 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4763 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4764 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4765 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4766 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4767 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4768 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4769 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4770 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4771 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4772 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4773 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4774 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4775 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4776 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4777 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4778 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4779 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4780 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4781 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4782 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4783 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4784 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4785 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4786 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4787 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4788 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4789 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4790 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4791 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4792 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4793 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4794 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4795 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4796 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4797 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4798 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4799 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4800 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4801 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4802 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4803 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4804 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4805 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4806 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4807 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4808 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4809 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4810 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4812 /* SPE binary operations expecting a 5-bit unsigned literal. */
4813 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4815 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4816 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4817 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4818 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4819 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4820 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4821 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4822 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4823 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4824 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4825 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4826 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4827 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4828 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4829 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4830 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4831 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4832 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4833 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4834 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4835 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4836 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4837 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4838 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4839 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4840 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4842 /* Place-holder. Leave as last binary SPE builtin. */
4843 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4846 /* AltiVec predicates. */
4848 struct builtin_description_predicates
4850 const unsigned int mask;
4851 const enum insn_code icode;
4853 const char *const name;
4854 const enum rs6000_builtins code;
4857 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4859 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4860 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4861 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4862 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4863 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4864 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4865 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4866 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4867 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4868 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4869 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4870 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4871 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4874 /* SPE predicates. */
4875 static struct builtin_description bdesc_spe_predicates[] =
4877 /* Place-holder. Leave as first. */
4878 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4879 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4880 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4881 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4882 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4883 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4884 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4885 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4886 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4887 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4888 /* Place-holder. Leave as last. */
4889 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4892 /* SPE evsel predicates. */
4893 static struct builtin_description bdesc_spe_evsel[] =
4895 /* Place-holder. Leave as first. */
4896 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4897 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4898 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4899 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4900 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4901 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4902 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4903 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4904 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4905 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4906 /* Place-holder. Leave as last. */
4907 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4910 /* ABS* operations. */
4912 static const struct builtin_description bdesc_abs[] =
4914 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4915 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4916 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4917 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4918 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4919 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4920 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4923 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4926 static struct builtin_description bdesc_1arg[] =
4928 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4929 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4930 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4931 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4932 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4933 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4934 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4935 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4936 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4937 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4938 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4939 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4940 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4941 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4942 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4943 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4944 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4946 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4947 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4948 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4949 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4950 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4951 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4952 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4953 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4954 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4955 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4956 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4957 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4958 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4959 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4960 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4961 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4962 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4963 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4964 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4965 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4966 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4967 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4968 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4969 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4970 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4971 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4972 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4973 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4974 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4975 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4976 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4977 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4979 /* Place-holder. Leave as last unary SPE builtin. */
4980 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4984 rs6000_expand_unop_builtin (icode, arglist, target)
4985 enum insn_code icode;
4990 tree arg0 = TREE_VALUE (arglist);
4991 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4992 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4993 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4995 if (icode == CODE_FOR_nothing)
4996 /* Builtin not supported on this processor. */
4999 /* If we got invalid arguments bail out before generating bad rtl. */
5000 if (arg0 == error_mark_node)
5003 if (icode == CODE_FOR_altivec_vspltisb
5004 || icode == CODE_FOR_altivec_vspltish
5005 || icode == CODE_FOR_altivec_vspltisw
5006 || icode == CODE_FOR_spe_evsplatfi
5007 || icode == CODE_FOR_spe_evsplati)
5009 /* Only allow 5-bit *signed* literals. */
5010 if (GET_CODE (op0) != CONST_INT
5011 || INTVAL (op0) > 0x1f
5012 || INTVAL (op0) < -0x1f)
5014 error ("argument 1 must be a 5-bit signed literal");
5020 || GET_MODE (target) != tmode
5021 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5022 target = gen_reg_rtx (tmode);
5024 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5025 op0 = copy_to_mode_reg (mode0, op0);
5027 pat = GEN_FCN (icode) (target, op0);
5036 altivec_expand_abs_builtin (icode, arglist, target)
5037 enum insn_code icode;
5041 rtx pat, scratch1, scratch2;
5042 tree arg0 = TREE_VALUE (arglist);
5043 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5044 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5045 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5047 /* If we have invalid arguments, bail out before generating bad rtl. */
5048 if (arg0 == error_mark_node)
5052 || GET_MODE (target) != tmode
5053 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5054 target = gen_reg_rtx (tmode);
5056 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5057 op0 = copy_to_mode_reg (mode0, op0);
5059 scratch1 = gen_reg_rtx (mode0);
5060 scratch2 = gen_reg_rtx (mode0);
5062 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5071 rs6000_expand_binop_builtin (icode, arglist, target)
5072 enum insn_code icode;
5077 tree arg0 = TREE_VALUE (arglist);
5078 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5079 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5080 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5081 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5082 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5083 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5085 if (icode == CODE_FOR_nothing)
5086 /* Builtin not supported on this processor. */
5089 /* If we got invalid arguments bail out before generating bad rtl. */
5090 if (arg0 == error_mark_node || arg1 == error_mark_node)
5093 if (icode == CODE_FOR_altivec_vcfux
5094 || icode == CODE_FOR_altivec_vcfsx
5095 || icode == CODE_FOR_altivec_vctsxs
5096 || icode == CODE_FOR_altivec_vctuxs
5097 || icode == CODE_FOR_altivec_vspltb
5098 || icode == CODE_FOR_altivec_vsplth
5099 || icode == CODE_FOR_altivec_vspltw
5100 || icode == CODE_FOR_spe_evaddiw
5101 || icode == CODE_FOR_spe_evldd
5102 || icode == CODE_FOR_spe_evldh
5103 || icode == CODE_FOR_spe_evldw
5104 || icode == CODE_FOR_spe_evlhhesplat
5105 || icode == CODE_FOR_spe_evlhhossplat
5106 || icode == CODE_FOR_spe_evlhhousplat
5107 || icode == CODE_FOR_spe_evlwhe
5108 || icode == CODE_FOR_spe_evlwhos
5109 || icode == CODE_FOR_spe_evlwhou
5110 || icode == CODE_FOR_spe_evlwhsplat
5111 || icode == CODE_FOR_spe_evlwwsplat
5112 || icode == CODE_FOR_spe_evrlwi
5113 || icode == CODE_FOR_spe_evslwi
5114 || icode == CODE_FOR_spe_evsrwis
5115 || icode == CODE_FOR_spe_evsubifw
5116 || icode == CODE_FOR_spe_evsrwiu)
5118 /* Only allow 5-bit unsigned literals. */
5119 if (TREE_CODE (arg1) != INTEGER_CST
5120 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5122 error ("argument 2 must be a 5-bit unsigned literal");
5128 || GET_MODE (target) != tmode
5129 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5130 target = gen_reg_rtx (tmode);
5132 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5133 op0 = copy_to_mode_reg (mode0, op0);
5134 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5135 op1 = copy_to_mode_reg (mode1, op1);
5137 pat = GEN_FCN (icode) (target, op0, op1);
5146 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
5147 enum insn_code icode;
5153 tree cr6_form = TREE_VALUE (arglist);
5154 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5155 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5156 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5157 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5158 enum machine_mode tmode = SImode;
5159 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5160 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5163 if (TREE_CODE (cr6_form) != INTEGER_CST)
5165 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5169 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5174 /* If we have invalid arguments, bail out before generating bad rtl. */
5175 if (arg0 == error_mark_node || arg1 == error_mark_node)
5179 || GET_MODE (target) != tmode
5180 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5181 target = gen_reg_rtx (tmode);
5183 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5184 op0 = copy_to_mode_reg (mode0, op0);
5185 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5186 op1 = copy_to_mode_reg (mode1, op1);
5188 scratch = gen_reg_rtx (mode0);
5190 pat = GEN_FCN (icode) (scratch, op0, op1,
5191 gen_rtx (SYMBOL_REF, Pmode, opcode));
5196 /* The vec_any* and vec_all* predicates use the same opcodes for two
5197 different operations, but the bits in CR6 will be different
5198 depending on what information we want. So we have to play tricks
5199 with CR6 to get the right bits out.
5201 If you think this is disgusting, look at the specs for the
5202 AltiVec predicates. */
5204 switch (cr6_form_int)
5207 emit_insn (gen_cr6_test_for_zero (target));
5210 emit_insn (gen_cr6_test_for_zero_reverse (target));
5213 emit_insn (gen_cr6_test_for_lt (target));
5216 emit_insn (gen_cr6_test_for_lt_reverse (target));
5219 error ("argument 1 of __builtin_altivec_predicate is out of range");
5227 altivec_expand_stv_builtin (icode, arglist)
5228 enum insn_code icode;
5231 tree arg0 = TREE_VALUE (arglist);
5232 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5233 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5234 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5235 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5236 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5238 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5239 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5240 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5242 /* Invalid arguments. Bail before doing anything stoopid! */
5243 if (arg0 == error_mark_node
5244 || arg1 == error_mark_node
5245 || arg2 == error_mark_node)
5248 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5249 op0 = copy_to_mode_reg (mode2, op0);
5250 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5251 op1 = copy_to_mode_reg (mode0, op1);
5252 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5253 op2 = copy_to_mode_reg (mode1, op2);
5255 pat = GEN_FCN (icode) (op1, op2, op0);
5262 rs6000_expand_ternop_builtin (icode, arglist, target)
5263 enum insn_code icode;
5268 tree arg0 = TREE_VALUE (arglist);
5269 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5270 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5271 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5272 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5273 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5274 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5275 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5276 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5277 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5279 if (icode == CODE_FOR_nothing)
5280 /* Builtin not supported on this processor. */
5283 /* If we got invalid arguments bail out before generating bad rtl. */
5284 if (arg0 == error_mark_node
5285 || arg1 == error_mark_node
5286 || arg2 == error_mark_node)
5289 if (icode == CODE_FOR_altivec_vsldoi_4sf
5290 || icode == CODE_FOR_altivec_vsldoi_4si
5291 || icode == CODE_FOR_altivec_vsldoi_8hi
5292 || icode == CODE_FOR_altivec_vsldoi_16qi)
5294 /* Only allow 4-bit unsigned literals. */
5295 if (TREE_CODE (arg2) != INTEGER_CST
5296 || TREE_INT_CST_LOW (arg2) & ~0xf)
5298 error ("argument 3 must be a 4-bit unsigned literal");
5304 || GET_MODE (target) != tmode
5305 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5306 target = gen_reg_rtx (tmode);
5308 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5309 op0 = copy_to_mode_reg (mode0, op0);
5310 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5311 op1 = copy_to_mode_reg (mode1, op1);
5312 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5313 op2 = copy_to_mode_reg (mode2, op2);
5315 pat = GEN_FCN (icode) (target, op0, op1, op2);
5323 /* Expand the lvx builtins. */
5325 altivec_expand_ld_builtin (exp, target, expandedp)
5330 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5331 tree arglist = TREE_OPERAND (exp, 1);
5332 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5334 enum machine_mode tmode, mode0;
5336 enum insn_code icode;
5340 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5341 icode = CODE_FOR_altivec_lvx_16qi;
5343 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5344 icode = CODE_FOR_altivec_lvx_8hi;
5346 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5347 icode = CODE_FOR_altivec_lvx_4si;
5349 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5350 icode = CODE_FOR_altivec_lvx_4sf;
5359 arg0 = TREE_VALUE (arglist);
5360 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5361 tmode = insn_data[icode].operand[0].mode;
5362 mode0 = insn_data[icode].operand[1].mode;
5365 || GET_MODE (target) != tmode
5366 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5367 target = gen_reg_rtx (tmode);
5369 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5370 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5372 pat = GEN_FCN (icode) (target, op0);
5379 /* Expand the stvx builtins. */
5381 altivec_expand_st_builtin (exp, target, expandedp)
5383 rtx target ATTRIBUTE_UNUSED;
5386 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5387 tree arglist = TREE_OPERAND (exp, 1);
5388 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5390 enum machine_mode mode0, mode1;
5392 enum insn_code icode;
5396 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5397 icode = CODE_FOR_altivec_stvx_16qi;
5399 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5400 icode = CODE_FOR_altivec_stvx_8hi;
5402 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5403 icode = CODE_FOR_altivec_stvx_4si;
5405 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5406 icode = CODE_FOR_altivec_stvx_4sf;
5413 arg0 = TREE_VALUE (arglist);
5414 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5415 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5416 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5417 mode0 = insn_data[icode].operand[0].mode;
5418 mode1 = insn_data[icode].operand[1].mode;
5420 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5421 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5422 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5423 op1 = copy_to_mode_reg (mode1, op1);
5425 pat = GEN_FCN (icode) (op0, op1);
5433 /* Expand the dst builtins. */
5435 altivec_expand_dst_builtin (exp, target, expandedp)
5437 rtx target ATTRIBUTE_UNUSED;
5440 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5441 tree arglist = TREE_OPERAND (exp, 1);
5442 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5443 tree arg0, arg1, arg2;
5444 enum machine_mode mode0, mode1, mode2;
5445 rtx pat, op0, op1, op2;
5446 struct builtin_description *d;
5451 /* Handle DST variants. */
5452 d = (struct builtin_description *) bdesc_dst;
5453 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5454 if (d->code == fcode)
5456 arg0 = TREE_VALUE (arglist);
5457 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5458 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5459 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5460 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5461 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5462 mode0 = insn_data[d->icode].operand[0].mode;
5463 mode1 = insn_data[d->icode].operand[1].mode;
5464 mode2 = insn_data[d->icode].operand[2].mode;
5466 /* Invalid arguments, bail out before generating bad rtl. */
5467 if (arg0 == error_mark_node
5468 || arg1 == error_mark_node
5469 || arg2 == error_mark_node)
5472 if (TREE_CODE (arg2) != INTEGER_CST
5473 || TREE_INT_CST_LOW (arg2) & ~0x3)
5475 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5479 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5480 op0 = copy_to_mode_reg (mode0, op0);
5481 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5482 op1 = copy_to_mode_reg (mode1, op1);
5484 pat = GEN_FCN (d->icode) (op0, op1, op2);
5495 /* Expand the builtin in EXP and store the result in TARGET. Store
5496 true in *EXPANDEDP if we found a builtin to expand. */
5498 altivec_expand_builtin (exp, target, expandedp)
5503 struct builtin_description *d;
5504 struct builtin_description_predicates *dp;
5506 enum insn_code icode;
5507 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5508 tree arglist = TREE_OPERAND (exp, 1);
5511 enum machine_mode tmode, mode0;
5512 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5514 target = altivec_expand_ld_builtin (exp, target, expandedp);
5518 target = altivec_expand_st_builtin (exp, target, expandedp);
5522 target = altivec_expand_dst_builtin (exp, target, expandedp);
5530 case ALTIVEC_BUILTIN_STVX:
5531 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5532 case ALTIVEC_BUILTIN_STVEBX:
5533 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5534 case ALTIVEC_BUILTIN_STVEHX:
5535 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5536 case ALTIVEC_BUILTIN_STVEWX:
5537 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5538 case ALTIVEC_BUILTIN_STVXL:
5539 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5541 case ALTIVEC_BUILTIN_MFVSCR:
5542 icode = CODE_FOR_altivec_mfvscr;
5543 tmode = insn_data[icode].operand[0].mode;
5546 || GET_MODE (target) != tmode
5547 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5548 target = gen_reg_rtx (tmode);
5550 pat = GEN_FCN (icode) (target);
5556 case ALTIVEC_BUILTIN_MTVSCR:
5557 icode = CODE_FOR_altivec_mtvscr;
5558 arg0 = TREE_VALUE (arglist);
5559 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5560 mode0 = insn_data[icode].operand[0].mode;
5562 /* If we got invalid arguments bail out before generating bad rtl. */
5563 if (arg0 == error_mark_node)
5566 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5567 op0 = copy_to_mode_reg (mode0, op0);
5569 pat = GEN_FCN (icode) (op0);
5574 case ALTIVEC_BUILTIN_DSSALL:
5575 emit_insn (gen_altivec_dssall ());
5578 case ALTIVEC_BUILTIN_DSS:
5579 icode = CODE_FOR_altivec_dss;
5580 arg0 = TREE_VALUE (arglist);
5581 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5582 mode0 = insn_data[icode].operand[0].mode;
5584 /* If we got invalid arguments bail out before generating bad rtl. */
5585 if (arg0 == error_mark_node)
5588 if (TREE_CODE (arg0) != INTEGER_CST
5589 || TREE_INT_CST_LOW (arg0) & ~0x3)
5591 error ("argument to dss must be a 2-bit unsigned literal");
5595 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5596 op0 = copy_to_mode_reg (mode0, op0);
5598 emit_insn (gen_altivec_dss (op0));
5602 /* Expand abs* operations. */
5603 d = (struct builtin_description *) bdesc_abs;
5604 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5605 if (d->code == fcode)
5606 return altivec_expand_abs_builtin (d->icode, arglist, target);
5608 /* Expand the AltiVec predicates. */
5609 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5610 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5611 if (dp->code == fcode)
5612 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5614 /* LV* are funky. We initialized them differently. */
5617 case ALTIVEC_BUILTIN_LVSL:
5618 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5620 case ALTIVEC_BUILTIN_LVSR:
5621 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5623 case ALTIVEC_BUILTIN_LVEBX:
5624 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5626 case ALTIVEC_BUILTIN_LVEHX:
5627 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5629 case ALTIVEC_BUILTIN_LVEWX:
5630 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5632 case ALTIVEC_BUILTIN_LVXL:
5633 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5635 case ALTIVEC_BUILTIN_LVX:
5636 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5647 /* Binops that need to be initialized manually, but can be expanded
5648 automagically by rs6000_expand_binop_builtin. */
5649 static struct builtin_description bdesc_2arg_spe[] =
5651 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5652 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5653 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5654 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5655 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5656 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5657 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5658 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5659 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5660 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5661 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5662 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5663 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5664 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5665 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5666 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5667 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5668 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5669 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5670 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5671 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5672 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5675 /* Expand the builtin in EXP and store the result in TARGET. Store
5676 true in *EXPANDEDP if we found a builtin to expand.
5678 This expands the SPE builtins that are not simple unary and binary
5681 spe_expand_builtin (exp, target, expandedp)
5686 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5687 tree arglist = TREE_OPERAND (exp, 1);
5689 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5690 enum insn_code icode;
5691 enum machine_mode tmode, mode0;
5693 struct builtin_description *d;
5698 /* Syntax check for a 5-bit unsigned immediate. */
5701 case SPE_BUILTIN_EVSTDD:
5702 case SPE_BUILTIN_EVSTDH:
5703 case SPE_BUILTIN_EVSTDW:
5704 case SPE_BUILTIN_EVSTWHE:
5705 case SPE_BUILTIN_EVSTWHO:
5706 case SPE_BUILTIN_EVSTWWE:
5707 case SPE_BUILTIN_EVSTWWO:
5708 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5709 if (TREE_CODE (arg1) != INTEGER_CST
5710 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5712 error ("argument 2 must be a 5-bit unsigned literal");
5720 d = (struct builtin_description *) bdesc_2arg_spe;
5721 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5722 if (d->code == fcode)
5723 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5725 d = (struct builtin_description *) bdesc_spe_predicates;
5726 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5727 if (d->code == fcode)
5728 return spe_expand_predicate_builtin (d->icode, arglist, target);
5730 d = (struct builtin_description *) bdesc_spe_evsel;
5731 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5732 if (d->code == fcode)
5733 return spe_expand_evsel_builtin (d->icode, arglist, target);
5737 case SPE_BUILTIN_EVSTDDX:
5738 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5739 case SPE_BUILTIN_EVSTDHX:
5740 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5741 case SPE_BUILTIN_EVSTDWX:
5742 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5743 case SPE_BUILTIN_EVSTWHEX:
5744 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5745 case SPE_BUILTIN_EVSTWHOX:
5746 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5747 case SPE_BUILTIN_EVSTWWEX:
5748 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5749 case SPE_BUILTIN_EVSTWWOX:
5750 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5751 case SPE_BUILTIN_EVSTDD:
5752 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5753 case SPE_BUILTIN_EVSTDH:
5754 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5755 case SPE_BUILTIN_EVSTDW:
5756 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5757 case SPE_BUILTIN_EVSTWHE:
5758 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5759 case SPE_BUILTIN_EVSTWHO:
5760 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5761 case SPE_BUILTIN_EVSTWWE:
5762 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5763 case SPE_BUILTIN_EVSTWWO:
5764 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5765 case SPE_BUILTIN_MFSPEFSCR:
5766 icode = CODE_FOR_spe_mfspefscr;
5767 tmode = insn_data[icode].operand[0].mode;
5770 || GET_MODE (target) != tmode
5771 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5772 target = gen_reg_rtx (tmode);
5774 pat = GEN_FCN (icode) (target);
5779 case SPE_BUILTIN_MTSPEFSCR:
5780 icode = CODE_FOR_spe_mtspefscr;
5781 arg0 = TREE_VALUE (arglist);
5782 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5783 mode0 = insn_data[icode].operand[0].mode;
5785 if (arg0 == error_mark_node)
5788 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5789 op0 = copy_to_mode_reg (mode0, op0);
5791 pat = GEN_FCN (icode) (op0);
5804 spe_expand_predicate_builtin (icode, arglist, target)
5805 enum insn_code icode;
5809 rtx pat, scratch, tmp;
5810 tree form = TREE_VALUE (arglist);
5811 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5812 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5813 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5814 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5815 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5816 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5820 if (TREE_CODE (form) != INTEGER_CST)
5822 error ("argument 1 of __builtin_spe_predicate must be a constant");
5826 form_int = TREE_INT_CST_LOW (form);
5831 if (arg0 == error_mark_node || arg1 == error_mark_node)
5835 || GET_MODE (target) != SImode
5836 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5837 target = gen_reg_rtx (SImode);
5839 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5840 op0 = copy_to_mode_reg (mode0, op0);
5841 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5842 op1 = copy_to_mode_reg (mode1, op1);
5844 scratch = gen_reg_rtx (CCmode);
5846 pat = GEN_FCN (icode) (scratch, op0, op1);
5851 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5852 _lower_. We use one compare, but look in different bits of the
5853 CR for each variant.
5855 There are 2 elements in each SPE simd type (upper/lower). The CR
5856 bits are set as follows:
5858 BIT0 | BIT 1 | BIT 2 | BIT 3
5859 U | L | (U | L) | (U & L)
5861 So, for an "all" relationship, BIT 3 would be set.
5862 For an "any" relationship, BIT 2 would be set. Etc.
5864 Following traditional nomenclature, these bits map to:
5866 BIT0 | BIT 1 | BIT 2 | BIT 3
5869 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5874 /* All variant. OV bit. */
5876 /* We need to get to the OV bit, which is the ORDERED bit. We
5877 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5878 that's ugly and will trigger a validate_condition_mode abort.
5879 So let's just use another pattern. */
5880 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5882 /* Any variant. EQ bit. */
5886 /* Upper variant. LT bit. */
5890 /* Lower variant. GT bit. */
5895 error ("argument 1 of __builtin_spe_predicate is out of range");
5899 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5900 emit_move_insn (target, tmp);
5905 /* The evsel builtins look like this:
5907 e = __builtin_spe_evsel_OP (a, b, c, d);
5911 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5912 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5916 spe_expand_evsel_builtin (icode, arglist, target)
5917 enum insn_code icode;
5922 tree arg0 = TREE_VALUE (arglist);
5923 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5924 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5925 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5926 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5927 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5928 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5929 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5930 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5931 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5936 if (arg0 == error_mark_node || arg1 == error_mark_node
5937 || arg2 == error_mark_node || arg3 == error_mark_node)
5941 || GET_MODE (target) != mode0
5942 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5943 target = gen_reg_rtx (mode0);
5945 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5946 op0 = copy_to_mode_reg (mode0, op0);
5947 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5948 op1 = copy_to_mode_reg (mode0, op1);
5949 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5950 op2 = copy_to_mode_reg (mode0, op2);
5951 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5952 op3 = copy_to_mode_reg (mode0, op3);
5954 /* Generate the compare. */
5955 scratch = gen_reg_rtx (CCmode);
5956 pat = GEN_FCN (icode) (scratch, op0, op1);
5961 if (mode0 == V2SImode)
5962 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5964 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5969 /* Expand an expression EXP that calls a built-in function,
5970 with result going to TARGET if that's convenient
5971 (and in mode MODE if that's convenient).
5972 SUBTARGET may be used as the target for computing one of EXP's operands.
5973 IGNORE is nonzero if the value is to be ignored. */
5976 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5979 rtx subtarget ATTRIBUTE_UNUSED;
5980 enum machine_mode mode ATTRIBUTE_UNUSED;
5981 int ignore ATTRIBUTE_UNUSED;
5983 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5984 tree arglist = TREE_OPERAND (exp, 1);
5985 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5986 struct builtin_description *d;
5993 ret = altivec_expand_builtin (exp, target, &success);
6000 ret = spe_expand_builtin (exp, target, &success);
6006 if (TARGET_ALTIVEC || TARGET_SPE)
6008 /* Handle simple unary operations. */
6009 d = (struct builtin_description *) bdesc_1arg;
6010 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6011 if (d->code == fcode)
6012 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6014 /* Handle simple binary operations. */
6015 d = (struct builtin_description *) bdesc_2arg;
6016 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6017 if (d->code == fcode)
6018 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6020 /* Handle simple ternary operations. */
6021 d = (struct builtin_description *) bdesc_3arg;
6022 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6023 if (d->code == fcode)
6024 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6032 rs6000_init_builtins ()
6034 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6035 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6036 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6039 spe_init_builtins ();
6041 altivec_init_builtins ();
6042 if (TARGET_ALTIVEC || TARGET_SPE)
6043 rs6000_common_init_builtins ();
6046 /* Search through a set of builtins and enable the mask bits.
6047 DESC is an array of builtins.
6048 SIZE is the total number of builtins.
6049 START is the builtin enum at which to start.
6050 END is the builtin enum at which to end. */
6052 enable_mask_for_builtins (desc, size, start, end)
6053 struct builtin_description *desc;
6055 enum rs6000_builtins start, end;
6059 for (i = 0; i < size; ++i)
6060 if (desc[i].code == start)
6066 for (; i < size; ++i)
6068 /* Flip all the bits on. */
6069 desc[i].mask = target_flags;
6070 if (desc[i].code == end)
6076 spe_init_builtins ()
6078 tree endlink = void_list_node;
6079 tree puint_type_node = build_pointer_type (unsigned_type_node);
6080 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6081 struct builtin_description *d;
6084 tree v2si_ftype_4_v2si
6085 = build_function_type
6086 (opaque_V2SI_type_node,
6087 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6088 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6089 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6090 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6093 tree v2sf_ftype_4_v2sf
6094 = build_function_type
6095 (opaque_V2SF_type_node,
6096 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6097 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6098 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6099 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6102 tree int_ftype_int_v2si_v2si
6103 = build_function_type
6105 tree_cons (NULL_TREE, integer_type_node,
6106 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6107 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6110 tree int_ftype_int_v2sf_v2sf
6111 = build_function_type
6113 tree_cons (NULL_TREE, integer_type_node,
6114 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6115 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6118 tree void_ftype_v2si_puint_int
6119 = build_function_type (void_type_node,
6120 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6121 tree_cons (NULL_TREE, puint_type_node,
6122 tree_cons (NULL_TREE,
6126 tree void_ftype_v2si_puint_char
6127 = build_function_type (void_type_node,
6128 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6129 tree_cons (NULL_TREE, puint_type_node,
6130 tree_cons (NULL_TREE,
6134 tree void_ftype_v2si_pv2si_int
6135 = build_function_type (void_type_node,
6136 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6137 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6138 tree_cons (NULL_TREE,
6142 tree void_ftype_v2si_pv2si_char
6143 = build_function_type (void_type_node,
6144 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6145 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6146 tree_cons (NULL_TREE,
6151 = build_function_type (void_type_node,
6152 tree_cons (NULL_TREE, integer_type_node, endlink));
6155 = build_function_type (integer_type_node, endlink);
6157 tree v2si_ftype_pv2si_int
6158 = build_function_type (opaque_V2SI_type_node,
6159 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6160 tree_cons (NULL_TREE, integer_type_node,
6163 tree v2si_ftype_puint_int
6164 = build_function_type (opaque_V2SI_type_node,
6165 tree_cons (NULL_TREE, puint_type_node,
6166 tree_cons (NULL_TREE, integer_type_node,
6169 tree v2si_ftype_pushort_int
6170 = build_function_type (opaque_V2SI_type_node,
6171 tree_cons (NULL_TREE, pushort_type_node,
6172 tree_cons (NULL_TREE, integer_type_node,
6175 /* The initialization of the simple binary and unary builtins is
6176 done in rs6000_common_init_builtins, but we have to enable the
6177 mask bits here manually because we have run out of `target_flags'
6178 bits. We really need to redesign this mask business. */
6180 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6181 ARRAY_SIZE (bdesc_2arg),
6184 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6185 ARRAY_SIZE (bdesc_1arg),
6187 SPE_BUILTIN_EVSUBFUSIAAW);
6188 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6189 ARRAY_SIZE (bdesc_spe_predicates),
6190 SPE_BUILTIN_EVCMPEQ,
6191 SPE_BUILTIN_EVFSTSTLT);
6192 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6193 ARRAY_SIZE (bdesc_spe_evsel),
6194 SPE_BUILTIN_EVSEL_CMPGTS,
6195 SPE_BUILTIN_EVSEL_FSTSTEQ);
6197 /* Initialize irregular SPE builtins. */
6199 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6200 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6201 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6202 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6203 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6204 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6205 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6206 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6207 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6208 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6209 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6210 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6211 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6212 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6213 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6214 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6217 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6218 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6219 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6220 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6221 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6222 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6223 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6224 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6225 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6226 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6227 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6228 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6229 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6230 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6231 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6232 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6233 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6234 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6235 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6236 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6237 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6238 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6241 d = (struct builtin_description *) bdesc_spe_predicates;
6242 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6246 switch (insn_data[d->icode].operand[1].mode)
6249 type = int_ftype_int_v2si_v2si;
6252 type = int_ftype_int_v2sf_v2sf;
6258 def_builtin (d->mask, d->name, type, d->code);
6261 /* Evsel predicates. */
6262 d = (struct builtin_description *) bdesc_spe_evsel;
6263 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6267 switch (insn_data[d->icode].operand[1].mode)
6270 type = v2si_ftype_4_v2si;
6273 type = v2sf_ftype_4_v2sf;
6279 def_builtin (d->mask, d->name, type, d->code);
6284 altivec_init_builtins ()
6286 struct builtin_description *d;
6287 struct builtin_description_predicates *dp;
6289 tree pfloat_type_node = build_pointer_type (float_type_node);
6290 tree pint_type_node = build_pointer_type (integer_type_node);
6291 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6292 tree pchar_type_node = build_pointer_type (char_type_node);
6294 tree pvoid_type_node = build_pointer_type (void_type_node);
6296 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6297 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6298 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6299 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6301 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6303 tree int_ftype_int_v4si_v4si
6304 = build_function_type_list (integer_type_node,
6305 integer_type_node, V4SI_type_node,
6306 V4SI_type_node, NULL_TREE);
6307 tree v4sf_ftype_pcfloat
6308 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6309 tree void_ftype_pfloat_v4sf
6310 = build_function_type_list (void_type_node,
6311 pfloat_type_node, V4SF_type_node, NULL_TREE);
6312 tree v4si_ftype_pcint
6313 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6314 tree void_ftype_pint_v4si
6315 = build_function_type_list (void_type_node,
6316 pint_type_node, V4SI_type_node, NULL_TREE);
6317 tree v8hi_ftype_pcshort
6318 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6319 tree void_ftype_pshort_v8hi
6320 = build_function_type_list (void_type_node,
6321 pshort_type_node, V8HI_type_node, NULL_TREE);
6322 tree v16qi_ftype_pcchar
6323 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6324 tree void_ftype_pchar_v16qi
6325 = build_function_type_list (void_type_node,
6326 pchar_type_node, V16QI_type_node, NULL_TREE);
6327 tree void_ftype_v4si
6328 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6329 tree v8hi_ftype_void
6330 = build_function_type (V8HI_type_node, void_list_node);
6331 tree void_ftype_void
6332 = build_function_type (void_type_node, void_list_node);
6334 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6336 tree v16qi_ftype_int_pcvoid
6337 = build_function_type_list (V16QI_type_node,
6338 integer_type_node, pcvoid_type_node, NULL_TREE);
6339 tree v8hi_ftype_int_pcvoid
6340 = build_function_type_list (V8HI_type_node,
6341 integer_type_node, pcvoid_type_node, NULL_TREE);
6342 tree v4si_ftype_int_pcvoid
6343 = build_function_type_list (V4SI_type_node,
6344 integer_type_node, pcvoid_type_node, NULL_TREE);
6346 tree void_ftype_v4si_int_pvoid
6347 = build_function_type_list (void_type_node,
6348 V4SI_type_node, integer_type_node,
6349 pvoid_type_node, NULL_TREE);
6350 tree void_ftype_v16qi_int_pvoid
6351 = build_function_type_list (void_type_node,
6352 V16QI_type_node, integer_type_node,
6353 pvoid_type_node, NULL_TREE);
6354 tree void_ftype_v8hi_int_pvoid
6355 = build_function_type_list (void_type_node,
6356 V8HI_type_node, integer_type_node,
6357 pvoid_type_node, NULL_TREE);
6358 tree int_ftype_int_v8hi_v8hi
6359 = build_function_type_list (integer_type_node,
6360 integer_type_node, V8HI_type_node,
6361 V8HI_type_node, NULL_TREE);
6362 tree int_ftype_int_v16qi_v16qi
6363 = build_function_type_list (integer_type_node,
6364 integer_type_node, V16QI_type_node,
6365 V16QI_type_node, NULL_TREE);
6366 tree int_ftype_int_v4sf_v4sf
6367 = build_function_type_list (integer_type_node,
6368 integer_type_node, V4SF_type_node,
6369 V4SF_type_node, NULL_TREE);
6370 tree v4si_ftype_v4si
6371 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6372 tree v8hi_ftype_v8hi
6373 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6374 tree v16qi_ftype_v16qi
6375 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6376 tree v4sf_ftype_v4sf
6377 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6378 tree void_ftype_pcvoid_int_char
6379 = build_function_type_list (void_type_node,
6380 pcvoid_type_node, integer_type_node,
6381 char_type_node, NULL_TREE);
6383 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6384 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6385 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6386 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6387 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6388 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6389 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6390 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6391 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6392 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6393 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6394 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6395 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6396 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6397 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6398 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6399 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6400 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6401 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6402 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6403 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6404 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6405 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6406 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6407 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6408 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6409 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6410 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6411 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6412 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6413 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6414 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6416 /* Add the DST variants. */
6417 d = (struct builtin_description *) bdesc_dst;
6418 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6419 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6421 /* Initialize the predicates. */
6422 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6423 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6425 enum machine_mode mode1;
6428 mode1 = insn_data[dp->icode].operand[1].mode;
6433 type = int_ftype_int_v4si_v4si;
6436 type = int_ftype_int_v8hi_v8hi;
6439 type = int_ftype_int_v16qi_v16qi;
6442 type = int_ftype_int_v4sf_v4sf;
6448 def_builtin (dp->mask, dp->name, type, dp->code);
6451 /* Initialize the abs* operators. */
6452 d = (struct builtin_description *) bdesc_abs;
6453 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6455 enum machine_mode mode0;
6458 mode0 = insn_data[d->icode].operand[0].mode;
6463 type = v4si_ftype_v4si;
6466 type = v8hi_ftype_v8hi;
6469 type = v16qi_ftype_v16qi;
6472 type = v4sf_ftype_v4sf;
6478 def_builtin (d->mask, d->name, type, d->code);
6483 rs6000_common_init_builtins ()
6485 struct builtin_description *d;
6488 tree v4sf_ftype_v4sf_v4sf_v16qi
6489 = build_function_type_list (V4SF_type_node,
6490 V4SF_type_node, V4SF_type_node,
6491 V16QI_type_node, NULL_TREE);
6492 tree v4si_ftype_v4si_v4si_v16qi
6493 = build_function_type_list (V4SI_type_node,
6494 V4SI_type_node, V4SI_type_node,
6495 V16QI_type_node, NULL_TREE);
6496 tree v8hi_ftype_v8hi_v8hi_v16qi
6497 = build_function_type_list (V8HI_type_node,
6498 V8HI_type_node, V8HI_type_node,
6499 V16QI_type_node, NULL_TREE);
6500 tree v16qi_ftype_v16qi_v16qi_v16qi
6501 = build_function_type_list (V16QI_type_node,
6502 V16QI_type_node, V16QI_type_node,
6503 V16QI_type_node, NULL_TREE);
6504 tree v4si_ftype_char
6505 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6506 tree v8hi_ftype_char
6507 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6508 tree v16qi_ftype_char
6509 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6510 tree v8hi_ftype_v16qi
6511 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6512 tree v4sf_ftype_v4sf
6513 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6515 tree v2si_ftype_v2si_v2si
6516 = build_function_type_list (opaque_V2SI_type_node,
6517 opaque_V2SI_type_node,
6518 opaque_V2SI_type_node, NULL_TREE);
6520 tree v2sf_ftype_v2sf_v2sf
6521 = build_function_type_list (opaque_V2SF_type_node,
6522 opaque_V2SF_type_node,
6523 opaque_V2SF_type_node, NULL_TREE);
6525 tree v2si_ftype_int_int
6526 = build_function_type_list (opaque_V2SI_type_node,
6527 integer_type_node, integer_type_node,
6530 tree v2si_ftype_v2si
6531 = build_function_type_list (opaque_V2SI_type_node,
6532 opaque_V2SI_type_node, NULL_TREE);
6534 tree v2sf_ftype_v2sf
6535 = build_function_type_list (opaque_V2SF_type_node,
6536 opaque_V2SF_type_node, NULL_TREE);
6538 tree v2sf_ftype_v2si
6539 = build_function_type_list (opaque_V2SF_type_node,
6540 opaque_V2SI_type_node, NULL_TREE);
6542 tree v2si_ftype_v2sf
6543 = build_function_type_list (opaque_V2SI_type_node,
6544 opaque_V2SF_type_node, NULL_TREE);
6546 tree v2si_ftype_v2si_char
6547 = build_function_type_list (opaque_V2SI_type_node,
6548 opaque_V2SI_type_node,
6549 char_type_node, NULL_TREE);
6551 tree v2si_ftype_int_char
6552 = build_function_type_list (opaque_V2SI_type_node,
6553 integer_type_node, char_type_node, NULL_TREE);
6555 tree v2si_ftype_char
6556 = build_function_type_list (opaque_V2SI_type_node,
6557 char_type_node, NULL_TREE);
6559 tree int_ftype_int_int
6560 = build_function_type_list (integer_type_node,
6561 integer_type_node, integer_type_node,
6564 tree v4si_ftype_v4si_v4si
6565 = build_function_type_list (V4SI_type_node,
6566 V4SI_type_node, V4SI_type_node, NULL_TREE);
6567 tree v4sf_ftype_v4si_char
6568 = build_function_type_list (V4SF_type_node,
6569 V4SI_type_node, char_type_node, NULL_TREE);
6570 tree v4si_ftype_v4sf_char
6571 = build_function_type_list (V4SI_type_node,
6572 V4SF_type_node, char_type_node, NULL_TREE);
6573 tree v4si_ftype_v4si_char
6574 = build_function_type_list (V4SI_type_node,
6575 V4SI_type_node, char_type_node, NULL_TREE);
6576 tree v8hi_ftype_v8hi_char
6577 = build_function_type_list (V8HI_type_node,
6578 V8HI_type_node, char_type_node, NULL_TREE);
6579 tree v16qi_ftype_v16qi_char
6580 = build_function_type_list (V16QI_type_node,
6581 V16QI_type_node, char_type_node, NULL_TREE);
6582 tree v16qi_ftype_v16qi_v16qi_char
6583 = build_function_type_list (V16QI_type_node,
6584 V16QI_type_node, V16QI_type_node,
6585 char_type_node, NULL_TREE);
6586 tree v8hi_ftype_v8hi_v8hi_char
6587 = build_function_type_list (V8HI_type_node,
6588 V8HI_type_node, V8HI_type_node,
6589 char_type_node, NULL_TREE);
6590 tree v4si_ftype_v4si_v4si_char
6591 = build_function_type_list (V4SI_type_node,
6592 V4SI_type_node, V4SI_type_node,
6593 char_type_node, NULL_TREE);
6594 tree v4sf_ftype_v4sf_v4sf_char
6595 = build_function_type_list (V4SF_type_node,
6596 V4SF_type_node, V4SF_type_node,
6597 char_type_node, NULL_TREE);
6598 tree v4sf_ftype_v4sf_v4sf
6599 = build_function_type_list (V4SF_type_node,
6600 V4SF_type_node, V4SF_type_node, NULL_TREE);
6601 tree v4sf_ftype_v4sf_v4sf_v4si
6602 = build_function_type_list (V4SF_type_node,
6603 V4SF_type_node, V4SF_type_node,
6604 V4SI_type_node, NULL_TREE);
6605 tree v4sf_ftype_v4sf_v4sf_v4sf
6606 = build_function_type_list (V4SF_type_node,
6607 V4SF_type_node, V4SF_type_node,
6608 V4SF_type_node, NULL_TREE);
6609 tree v4si_ftype_v4si_v4si_v4si
6610 = build_function_type_list (V4SI_type_node,
6611 V4SI_type_node, V4SI_type_node,
6612 V4SI_type_node, NULL_TREE);
6613 tree v8hi_ftype_v8hi_v8hi
6614 = build_function_type_list (V8HI_type_node,
6615 V8HI_type_node, V8HI_type_node, NULL_TREE);
6616 tree v8hi_ftype_v8hi_v8hi_v8hi
6617 = build_function_type_list (V8HI_type_node,
6618 V8HI_type_node, V8HI_type_node,
6619 V8HI_type_node, NULL_TREE);
6620 tree v4si_ftype_v8hi_v8hi_v4si
6621 = build_function_type_list (V4SI_type_node,
6622 V8HI_type_node, V8HI_type_node,
6623 V4SI_type_node, NULL_TREE);
6624 tree v4si_ftype_v16qi_v16qi_v4si
6625 = build_function_type_list (V4SI_type_node,
6626 V16QI_type_node, V16QI_type_node,
6627 V4SI_type_node, NULL_TREE);
6628 tree v16qi_ftype_v16qi_v16qi
6629 = build_function_type_list (V16QI_type_node,
6630 V16QI_type_node, V16QI_type_node, NULL_TREE);
6631 tree v4si_ftype_v4sf_v4sf
6632 = build_function_type_list (V4SI_type_node,
6633 V4SF_type_node, V4SF_type_node, NULL_TREE);
6634 tree v8hi_ftype_v16qi_v16qi
6635 = build_function_type_list (V8HI_type_node,
6636 V16QI_type_node, V16QI_type_node, NULL_TREE);
6637 tree v4si_ftype_v8hi_v8hi
6638 = build_function_type_list (V4SI_type_node,
6639 V8HI_type_node, V8HI_type_node, NULL_TREE);
6640 tree v8hi_ftype_v4si_v4si
6641 = build_function_type_list (V8HI_type_node,
6642 V4SI_type_node, V4SI_type_node, NULL_TREE);
6643 tree v16qi_ftype_v8hi_v8hi
6644 = build_function_type_list (V16QI_type_node,
6645 V8HI_type_node, V8HI_type_node, NULL_TREE);
6646 tree v4si_ftype_v16qi_v4si
6647 = build_function_type_list (V4SI_type_node,
6648 V16QI_type_node, V4SI_type_node, NULL_TREE);
6649 tree v4si_ftype_v16qi_v16qi
6650 = build_function_type_list (V4SI_type_node,
6651 V16QI_type_node, V16QI_type_node, NULL_TREE);
6652 tree v4si_ftype_v8hi_v4si
6653 = build_function_type_list (V4SI_type_node,
6654 V8HI_type_node, V4SI_type_node, NULL_TREE);
6655 tree v4si_ftype_v8hi
6656 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6657 tree int_ftype_v4si_v4si
6658 = build_function_type_list (integer_type_node,
6659 V4SI_type_node, V4SI_type_node, NULL_TREE);
6660 tree int_ftype_v4sf_v4sf
6661 = build_function_type_list (integer_type_node,
6662 V4SF_type_node, V4SF_type_node, NULL_TREE);
6663 tree int_ftype_v16qi_v16qi
6664 = build_function_type_list (integer_type_node,
6665 V16QI_type_node, V16QI_type_node, NULL_TREE);
6666 tree int_ftype_v8hi_v8hi
6667 = build_function_type_list (integer_type_node,
6668 V8HI_type_node, V8HI_type_node, NULL_TREE);
6670 /* Add the simple ternary operators. */
6671 d = (struct builtin_description *) bdesc_3arg;
6672 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6675 enum machine_mode mode0, mode1, mode2, mode3;
6678 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6681 mode0 = insn_data[d->icode].operand[0].mode;
6682 mode1 = insn_data[d->icode].operand[1].mode;
6683 mode2 = insn_data[d->icode].operand[2].mode;
6684 mode3 = insn_data[d->icode].operand[3].mode;
6686 /* When all four are of the same mode. */
6687 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6692 type = v4si_ftype_v4si_v4si_v4si;
6695 type = v4sf_ftype_v4sf_v4sf_v4sf;
6698 type = v8hi_ftype_v8hi_v8hi_v8hi;
6701 type = v16qi_ftype_v16qi_v16qi_v16qi;
6707 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6712 type = v4si_ftype_v4si_v4si_v16qi;
6715 type = v4sf_ftype_v4sf_v4sf_v16qi;
6718 type = v8hi_ftype_v8hi_v8hi_v16qi;
6721 type = v16qi_ftype_v16qi_v16qi_v16qi;
6727 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6728 && mode3 == V4SImode)
6729 type = v4si_ftype_v16qi_v16qi_v4si;
6730 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6731 && mode3 == V4SImode)
6732 type = v4si_ftype_v8hi_v8hi_v4si;
6733 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6734 && mode3 == V4SImode)
6735 type = v4sf_ftype_v4sf_v4sf_v4si;
6737 /* vchar, vchar, vchar, 4 bit literal. */
6738 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6740 type = v16qi_ftype_v16qi_v16qi_char;
6742 /* vshort, vshort, vshort, 4 bit literal. */
6743 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6745 type = v8hi_ftype_v8hi_v8hi_char;
6747 /* vint, vint, vint, 4 bit literal. */
6748 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6750 type = v4si_ftype_v4si_v4si_char;
6752 /* vfloat, vfloat, vfloat, 4 bit literal. */
6753 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6755 type = v4sf_ftype_v4sf_v4sf_char;
6760 def_builtin (d->mask, d->name, type, d->code);
6763 /* Add the simple binary operators. */
6764 d = (struct builtin_description *) bdesc_2arg;
6765 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6767 enum machine_mode mode0, mode1, mode2;
6770 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6773 mode0 = insn_data[d->icode].operand[0].mode;
6774 mode1 = insn_data[d->icode].operand[1].mode;
6775 mode2 = insn_data[d->icode].operand[2].mode;
6777 /* When all three operands are of the same mode. */
6778 if (mode0 == mode1 && mode1 == mode2)
6783 type = v4sf_ftype_v4sf_v4sf;
6786 type = v4si_ftype_v4si_v4si;
6789 type = v16qi_ftype_v16qi_v16qi;
6792 type = v8hi_ftype_v8hi_v8hi;
6795 type = v2si_ftype_v2si_v2si;
6798 type = v2sf_ftype_v2sf_v2sf;
6801 type = int_ftype_int_int;
6808 /* A few other combos we really don't want to do manually. */
6810 /* vint, vfloat, vfloat. */
6811 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6812 type = v4si_ftype_v4sf_v4sf;
6814 /* vshort, vchar, vchar. */
6815 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6816 type = v8hi_ftype_v16qi_v16qi;
6818 /* vint, vshort, vshort. */
6819 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6820 type = v4si_ftype_v8hi_v8hi;
6822 /* vshort, vint, vint. */
6823 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6824 type = v8hi_ftype_v4si_v4si;
6826 /* vchar, vshort, vshort. */
6827 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6828 type = v16qi_ftype_v8hi_v8hi;
6830 /* vint, vchar, vint. */
6831 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6832 type = v4si_ftype_v16qi_v4si;
6834 /* vint, vchar, vchar. */
6835 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6836 type = v4si_ftype_v16qi_v16qi;
6838 /* vint, vshort, vint. */
6839 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6840 type = v4si_ftype_v8hi_v4si;
6842 /* vint, vint, 5 bit literal. */
6843 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6844 type = v4si_ftype_v4si_char;
6846 /* vshort, vshort, 5 bit literal. */
6847 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6848 type = v8hi_ftype_v8hi_char;
6850 /* vchar, vchar, 5 bit literal. */
6851 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6852 type = v16qi_ftype_v16qi_char;
6854 /* vfloat, vint, 5 bit literal. */
6855 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6856 type = v4sf_ftype_v4si_char;
6858 /* vint, vfloat, 5 bit literal. */
6859 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6860 type = v4si_ftype_v4sf_char;
6862 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6863 type = v2si_ftype_int_int;
6865 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6866 type = v2si_ftype_v2si_char;
6868 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6869 type = v2si_ftype_int_char;
6872 else if (mode0 == SImode)
6877 type = int_ftype_v4si_v4si;
6880 type = int_ftype_v4sf_v4sf;
6883 type = int_ftype_v16qi_v16qi;
6886 type = int_ftype_v8hi_v8hi;
6896 def_builtin (d->mask, d->name, type, d->code);
6899 /* Add the simple unary operators. */
6900 d = (struct builtin_description *) bdesc_1arg;
6901 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6903 enum machine_mode mode0, mode1;
6906 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6909 mode0 = insn_data[d->icode].operand[0].mode;
6910 mode1 = insn_data[d->icode].operand[1].mode;
6912 if (mode0 == V4SImode && mode1 == QImode)
6913 type = v4si_ftype_char;
6914 else if (mode0 == V8HImode && mode1 == QImode)
6915 type = v8hi_ftype_char;
6916 else if (mode0 == V16QImode && mode1 == QImode)
6917 type = v16qi_ftype_char;
6918 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6919 type = v4sf_ftype_v4sf;
6920 else if (mode0 == V8HImode && mode1 == V16QImode)
6921 type = v8hi_ftype_v16qi;
6922 else if (mode0 == V4SImode && mode1 == V8HImode)
6923 type = v4si_ftype_v8hi;
6924 else if (mode0 == V2SImode && mode1 == V2SImode)
6925 type = v2si_ftype_v2si;
6926 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6927 type = v2sf_ftype_v2sf;
6928 else if (mode0 == V2SFmode && mode1 == V2SImode)
6929 type = v2sf_ftype_v2si;
6930 else if (mode0 == V2SImode && mode1 == V2SFmode)
6931 type = v2si_ftype_v2sf;
6932 else if (mode0 == V2SImode && mode1 == QImode)
6933 type = v2si_ftype_char;
6937 def_builtin (d->mask, d->name, type, d->code);
6942 /* Expand a block move operation, and return 1 if successful. Return 0
6943 if we should let the compiler generate normal code.
6945 operands[0] is the destination
6946 operands[1] is the source
6947 operands[2] is the length
6948 operands[3] is the alignment */
6950 #define MAX_MOVE_REG 4
6953 expand_block_move (operands)
6956 rtx orig_dest = operands[0];
6957 rtx orig_src = operands[1];
6958 rtx bytes_rtx = operands[2];
6959 rtx align_rtx = operands[3];
6960 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6965 rtx stores[MAX_MOVE_REG];
6968 /* If this is not a fixed size move, just call memcpy */
6972 /* If this is not a fixed size alignment, abort */
6973 if (GET_CODE (align_rtx) != CONST_INT)
6975 align = INTVAL (align_rtx);
6977 /* Anything to move? */
6978 bytes = INTVAL (bytes_rtx);
6982 /* store_one_arg depends on expand_block_move to handle at least the size of
6983 reg_parm_stack_space. */
6984 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6987 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6990 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6991 rtx (*mov) PARAMS ((rtx, rtx));
6993 enum machine_mode mode = BLKmode;
6997 && bytes > 24 /* move up to 32 bytes at a time */
7005 && ! fixed_regs[12])
7007 move_bytes = (bytes > 32) ? 32 : bytes;
7008 gen_func.movstrsi = gen_movstrsi_8reg;
7010 else if (TARGET_STRING
7011 && bytes > 16 /* move up to 24 bytes at a time */
7017 && ! fixed_regs[10])
7019 move_bytes = (bytes > 24) ? 24 : bytes;
7020 gen_func.movstrsi = gen_movstrsi_6reg;
7022 else if (TARGET_STRING
7023 && bytes > 8 /* move up to 16 bytes at a time */
7029 move_bytes = (bytes > 16) ? 16 : bytes;
7030 gen_func.movstrsi = gen_movstrsi_4reg;
7032 else if (bytes >= 8 && TARGET_POWERPC64
7033 /* 64-bit loads and stores require word-aligned
7035 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7039 gen_func.mov = gen_movdi;
7041 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7042 { /* move up to 8 bytes at a time */
7043 move_bytes = (bytes > 8) ? 8 : bytes;
7044 gen_func.movstrsi = gen_movstrsi_2reg;
7046 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7047 { /* move 4 bytes */
7050 gen_func.mov = gen_movsi;
7052 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7053 { /* move 2 bytes */
7056 gen_func.mov = gen_movhi;
7058 else if (TARGET_STRING && bytes > 1)
7059 { /* move up to 4 bytes at a time */
7060 move_bytes = (bytes > 4) ? 4 : bytes;
7061 gen_func.movstrsi = gen_movstrsi_1reg;
7063 else /* move 1 byte at a time */
7067 gen_func.mov = gen_movqi;
7070 src = adjust_address (orig_src, mode, offset);
7071 dest = adjust_address (orig_dest, mode, offset);
7073 if (mode != BLKmode)
7075 rtx tmp_reg = gen_reg_rtx (mode);
7077 emit_insn ((*gen_func.mov) (tmp_reg, src));
7078 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7081 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7084 for (i = 0; i < num_reg; i++)
7085 emit_insn (stores[i]);
7089 if (mode == BLKmode)
7091 /* Move the address into scratch registers. The movstrsi
7092 patterns require zero offset. */
7093 if (!REG_P (XEXP (src, 0)))
7095 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7096 src = replace_equiv_address (src, src_reg);
7098 set_mem_size (src, GEN_INT (move_bytes));
7100 if (!REG_P (XEXP (dest, 0)))
7102 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7103 dest = replace_equiv_address (dest, dest_reg);
7105 set_mem_size (dest, GEN_INT (move_bytes));
7107 emit_insn ((*gen_func.movstrsi) (dest, src,
7108 GEN_INT (move_bytes & 31),
7117 /* Return 1 if OP is a load multiple operation. It is known to be a
7118 PARALLEL and the first section will be tested. */
7121 load_multiple_operation (op, mode)
7123 enum machine_mode mode ATTRIBUTE_UNUSED;
7125 int count = XVECLEN (op, 0);
7126 unsigned int dest_regno;
7130 /* Perform a quick check so we don't blow up below. */
7132 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7133 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7134 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7137 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7138 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7140 for (i = 1; i < count; i++)
7142 rtx elt = XVECEXP (op, 0, i);
7144 if (GET_CODE (elt) != SET
7145 || GET_CODE (SET_DEST (elt)) != REG
7146 || GET_MODE (SET_DEST (elt)) != SImode
7147 || REGNO (SET_DEST (elt)) != dest_regno + i
7148 || GET_CODE (SET_SRC (elt)) != MEM
7149 || GET_MODE (SET_SRC (elt)) != SImode
7150 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7151 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7152 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7153 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7160 /* Similar, but tests for store multiple. Here, the second vector element
7161 is a CLOBBER. It will be tested later. */
7164 store_multiple_operation (op, mode)
7166 enum machine_mode mode ATTRIBUTE_UNUSED;
7168 int count = XVECLEN (op, 0) - 1;
7169 unsigned int src_regno;
7173 /* Perform a quick check so we don't blow up below. */
7175 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7176 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7177 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7180 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7181 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7183 for (i = 1; i < count; i++)
7185 rtx elt = XVECEXP (op, 0, i + 1);
7187 if (GET_CODE (elt) != SET
7188 || GET_CODE (SET_SRC (elt)) != REG
7189 || GET_MODE (SET_SRC (elt)) != SImode
7190 || REGNO (SET_SRC (elt)) != src_regno + i
7191 || GET_CODE (SET_DEST (elt)) != MEM
7192 || GET_MODE (SET_DEST (elt)) != SImode
7193 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7194 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7195 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7196 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7203 /* Return a string to perform a load_multiple operation.
7204 operands[0] is the vector.
7205 operands[1] is the source address.
7206 operands[2] is the first destination register. */
7209 rs6000_output_load_multiple (operands)
7212 /* We have to handle the case where the pseudo used to contain the address
7213 is assigned to one of the output registers. */
7215 int words = XVECLEN (operands[0], 0);
7218 if (XVECLEN (operands[0], 0) == 1)
7219 return "{l|lwz} %2,0(%1)";
7221 for (i = 0; i < words; i++)
7222 if (refers_to_regno_p (REGNO (operands[2]) + i,
7223 REGNO (operands[2]) + i + 1, operands[1], 0))
7227 xop[0] = GEN_INT (4 * (words-1));
7228 xop[1] = operands[1];
7229 xop[2] = operands[2];
7230 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7235 xop[0] = GEN_INT (4 * (words-1));
7236 xop[1] = operands[1];
7237 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7238 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7243 for (j = 0; j < words; j++)
7246 xop[0] = GEN_INT (j * 4);
7247 xop[1] = operands[1];
7248 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7249 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7251 xop[0] = GEN_INT (i * 4);
7252 xop[1] = operands[1];
7253 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7258 return "{lsi|lswi} %2,%1,%N0";
7261 /* Return 1 for a parallel vrsave operation. */
7264 vrsave_operation (op, mode)
7266 enum machine_mode mode ATTRIBUTE_UNUSED;
7268 int count = XVECLEN (op, 0);
7269 unsigned int dest_regno, src_regno;
7273 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7274 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7275 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7278 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7279 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7281 if (dest_regno != VRSAVE_REGNO
7282 && src_regno != VRSAVE_REGNO)
7285 for (i = 1; i < count; i++)
7287 rtx elt = XVECEXP (op, 0, i);
7289 if (GET_CODE (elt) != CLOBBER
7290 && GET_CODE (elt) != SET)
7297 /* Return 1 for an PARALLEL suitable for mtcrf. */
7300 mtcrf_operation (op, mode)
7302 enum machine_mode mode ATTRIBUTE_UNUSED;
7304 int count = XVECLEN (op, 0);
7308 /* Perform a quick check so we don't blow up below. */
7310 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7311 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7312 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7314 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7316 if (GET_CODE (src_reg) != REG
7317 || GET_MODE (src_reg) != SImode
7318 || ! INT_REGNO_P (REGNO (src_reg)))
7321 for (i = 0; i < count; i++)
7323 rtx exp = XVECEXP (op, 0, i);
7327 if (GET_CODE (exp) != SET
7328 || GET_CODE (SET_DEST (exp)) != REG
7329 || GET_MODE (SET_DEST (exp)) != CCmode
7330 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7332 unspec = SET_SRC (exp);
7333 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7335 if (GET_CODE (unspec) != UNSPEC
7336 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7337 || XVECLEN (unspec, 0) != 2
7338 || XVECEXP (unspec, 0, 0) != src_reg
7339 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7340 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7346 /* Return 1 for an PARALLEL suitable for lmw. */
7349 lmw_operation (op, mode)
7351 enum machine_mode mode ATTRIBUTE_UNUSED;
7353 int count = XVECLEN (op, 0);
7354 unsigned int dest_regno;
7356 unsigned int base_regno;
7357 HOST_WIDE_INT offset;
7360 /* Perform a quick check so we don't blow up below. */
7362 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7363 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7364 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7367 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7368 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7371 || count != 32 - (int) dest_regno)
7374 if (legitimate_indirect_address_p (src_addr, 0))
7377 base_regno = REGNO (src_addr);
7378 if (base_regno == 0)
7381 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7383 offset = INTVAL (XEXP (src_addr, 1));
7384 base_regno = REGNO (XEXP (src_addr, 0));
7389 for (i = 0; i < count; i++)
7391 rtx elt = XVECEXP (op, 0, i);
7394 HOST_WIDE_INT newoffset;
7396 if (GET_CODE (elt) != SET
7397 || GET_CODE (SET_DEST (elt)) != REG
7398 || GET_MODE (SET_DEST (elt)) != SImode
7399 || REGNO (SET_DEST (elt)) != dest_regno + i
7400 || GET_CODE (SET_SRC (elt)) != MEM
7401 || GET_MODE (SET_SRC (elt)) != SImode)
7403 newaddr = XEXP (SET_SRC (elt), 0);
7404 if (legitimate_indirect_address_p (newaddr, 0))
7409 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7411 addr_reg = XEXP (newaddr, 0);
7412 newoffset = INTVAL (XEXP (newaddr, 1));
7416 if (REGNO (addr_reg) != base_regno
7417 || newoffset != offset + 4 * i)
7424 /* Return 1 for an PARALLEL suitable for stmw. */
7427 stmw_operation (op, mode)
7429 enum machine_mode mode ATTRIBUTE_UNUSED;
7431 int count = XVECLEN (op, 0);
7432 unsigned int src_regno;
7434 unsigned int base_regno;
7435 HOST_WIDE_INT offset;
7438 /* Perform a quick check so we don't blow up below. */
7440 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7441 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7442 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7445 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7446 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7449 || count != 32 - (int) src_regno)
7452 if (legitimate_indirect_address_p (dest_addr, 0))
7455 base_regno = REGNO (dest_addr);
7456 if (base_regno == 0)
7459 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7461 offset = INTVAL (XEXP (dest_addr, 1));
7462 base_regno = REGNO (XEXP (dest_addr, 0));
7467 for (i = 0; i < count; i++)
7469 rtx elt = XVECEXP (op, 0, i);
7472 HOST_WIDE_INT newoffset;
7474 if (GET_CODE (elt) != SET
7475 || GET_CODE (SET_SRC (elt)) != REG
7476 || GET_MODE (SET_SRC (elt)) != SImode
7477 || REGNO (SET_SRC (elt)) != src_regno + i
7478 || GET_CODE (SET_DEST (elt)) != MEM
7479 || GET_MODE (SET_DEST (elt)) != SImode)
7481 newaddr = XEXP (SET_DEST (elt), 0);
7482 if (legitimate_indirect_address_p (newaddr, 0))
7487 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7489 addr_reg = XEXP (newaddr, 0);
7490 newoffset = INTVAL (XEXP (newaddr, 1));
7494 if (REGNO (addr_reg) != base_regno
7495 || newoffset != offset + 4 * i)
7502 /* A validation routine: say whether CODE, a condition code, and MODE
7503 match. The other alternatives either don't make sense or should
7504 never be generated. */
7507 validate_condition_mode (code, mode)
7509 enum machine_mode mode;
7511 if (GET_RTX_CLASS (code) != '<'
7512 || GET_MODE_CLASS (mode) != MODE_CC)
7515 /* These don't make sense. */
7516 if ((code == GT || code == LT || code == GE || code == LE)
7517 && mode == CCUNSmode)
7520 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7521 && mode != CCUNSmode)
7524 if (mode != CCFPmode
7525 && (code == ORDERED || code == UNORDERED
7526 || code == UNEQ || code == LTGT
7527 || code == UNGT || code == UNLT
7528 || code == UNGE || code == UNLE))
7531 /* These should never be generated except for
7532 flag_finite_math_only. */
7533 if (mode == CCFPmode
7534 && ! flag_finite_math_only
7535 && (code == LE || code == GE
7536 || code == UNEQ || code == LTGT
7537 || code == UNGT || code == UNLT))
7540 /* These are invalid; the information is not there. */
7541 if (mode == CCEQmode
7542 && code != EQ && code != NE)
7546 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7547 We only check the opcode against the mode of the CC value here. */
7550 branch_comparison_operator (op, mode)
7552 enum machine_mode mode ATTRIBUTE_UNUSED;
7554 enum rtx_code code = GET_CODE (op);
7555 enum machine_mode cc_mode;
7557 if (GET_RTX_CLASS (code) != '<')
7560 cc_mode = GET_MODE (XEXP (op, 0));
7561 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7564 validate_condition_mode (code, cc_mode);
7569 /* Return 1 if OP is a comparison operation that is valid for a branch
7570 insn and which is true if the corresponding bit in the CC register
7574 branch_positive_comparison_operator (op, mode)
7576 enum machine_mode mode;
7580 if (! branch_comparison_operator (op, mode))
7583 code = GET_CODE (op);
7584 return (code == EQ || code == LT || code == GT
7585 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7586 || code == LTU || code == GTU
7587 || code == UNORDERED);
7590 /* Return 1 if OP is a comparison operation that is valid for an scc
7591 insn: it must be a positive comparison. */
7594 scc_comparison_operator (op, mode)
7596 enum machine_mode mode;
7598 return branch_positive_comparison_operator (op, mode);
7602 trap_comparison_operator (op, mode)
7604 enum machine_mode mode;
7606 if (mode != VOIDmode && mode != GET_MODE (op))
7608 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7612 boolean_operator (op, mode)
7614 enum machine_mode mode ATTRIBUTE_UNUSED;
7616 enum rtx_code code = GET_CODE (op);
7617 return (code == AND || code == IOR || code == XOR);
7621 boolean_or_operator (op, mode)
7623 enum machine_mode mode ATTRIBUTE_UNUSED;
7625 enum rtx_code code = GET_CODE (op);
7626 return (code == IOR || code == XOR);
7630 min_max_operator (op, mode)
7632 enum machine_mode mode ATTRIBUTE_UNUSED;
7634 enum rtx_code code = GET_CODE (op);
7635 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7638 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7639 mask required to convert the result of a rotate insn into a shift
7640 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7643 includes_lshift_p (shiftop, andop)
7647 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7649 shift_mask <<= INTVAL (shiftop);
7651 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7654 /* Similar, but for right shift. */
7657 includes_rshift_p (shiftop, andop)
7661 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7663 shift_mask >>= INTVAL (shiftop);
7665 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7668 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7669 to perform a left shift. It must have exactly SHIFTOP least
7670 significant 0's, then one or more 1's, then zero or more 0's. */
7673 includes_rldic_lshift_p (shiftop, andop)
7677 if (GET_CODE (andop) == CONST_INT)
7679 HOST_WIDE_INT c, lsb, shift_mask;
7682 if (c == 0 || c == ~0)
7686 shift_mask <<= INTVAL (shiftop);
7688 /* Find the least significant one bit. */
7691 /* It must coincide with the LSB of the shift mask. */
7692 if (-lsb != shift_mask)
7695 /* Invert to look for the next transition (if any). */
7698 /* Remove the low group of ones (originally low group of zeros). */
7701 /* Again find the lsb, and check we have all 1's above. */
7705 else if (GET_CODE (andop) == CONST_DOUBLE
7706 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7708 HOST_WIDE_INT low, high, lsb;
7709 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7711 low = CONST_DOUBLE_LOW (andop);
7712 if (HOST_BITS_PER_WIDE_INT < 64)
7713 high = CONST_DOUBLE_HIGH (andop);
7715 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7716 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7719 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7721 shift_mask_high = ~0;
7722 if (INTVAL (shiftop) > 32)
7723 shift_mask_high <<= INTVAL (shiftop) - 32;
7727 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7734 return high == -lsb;
7737 shift_mask_low = ~0;
7738 shift_mask_low <<= INTVAL (shiftop);
7742 if (-lsb != shift_mask_low)
7745 if (HOST_BITS_PER_WIDE_INT < 64)
7750 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7753 return high == -lsb;
7757 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7763 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7764 to perform a left shift. It must have SHIFTOP or more least
7765 significant 0's, with the remainder of the word 1's. */
7768 includes_rldicr_lshift_p (shiftop, andop)
7772 if (GET_CODE (andop) == CONST_INT)
7774 HOST_WIDE_INT c, lsb, shift_mask;
7777 shift_mask <<= INTVAL (shiftop);
7780 /* Find the least significant one bit. */
7783 /* It must be covered by the shift mask.
7784 This test also rejects c == 0. */
7785 if ((lsb & shift_mask) == 0)
7788 /* Check we have all 1's above the transition, and reject all 1's. */
7789 return c == -lsb && lsb != 1;
7791 else if (GET_CODE (andop) == CONST_DOUBLE
7792 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7794 HOST_WIDE_INT low, lsb, shift_mask_low;
7796 low = CONST_DOUBLE_LOW (andop);
7798 if (HOST_BITS_PER_WIDE_INT < 64)
7800 HOST_WIDE_INT high, shift_mask_high;
7802 high = CONST_DOUBLE_HIGH (andop);
7806 shift_mask_high = ~0;
7807 if (INTVAL (shiftop) > 32)
7808 shift_mask_high <<= INTVAL (shiftop) - 32;
7812 if ((lsb & shift_mask_high) == 0)
7815 return high == -lsb;
7821 shift_mask_low = ~0;
7822 shift_mask_low <<= INTVAL (shiftop);
7826 if ((lsb & shift_mask_low) == 0)
7829 return low == -lsb && lsb != 1;
7835 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7836 for lfq and stfq insns.
7838 Note reg1 and reg2 *must* be hard registers. To be sure we will
7839 abort if we are passed pseudo registers. */
7842 registers_ok_for_quad_peep (reg1, reg2)
7845 /* We might have been passed a SUBREG. */
7846 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7849 return (REGNO (reg1) == REGNO (reg2) - 1);
7852 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7853 addr1 and addr2 must be in consecutive memory locations
7854 (addr2 == addr1 + 8). */
7857 addrs_ok_for_quad_peep (addr1, addr2)
7864 /* Extract an offset (if used) from the first addr. */
7865 if (GET_CODE (addr1) == PLUS)
7867 /* If not a REG, return zero. */
7868 if (GET_CODE (XEXP (addr1, 0)) != REG)
7872 reg1 = REGNO (XEXP (addr1, 0));
7873 /* The offset must be constant! */
7874 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7876 offset1 = INTVAL (XEXP (addr1, 1));
7879 else if (GET_CODE (addr1) != REG)
7883 reg1 = REGNO (addr1);
7884 /* This was a simple (mem (reg)) expression. Offset is 0. */
7888 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7889 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7890 register as addr1. */
7891 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7893 if (GET_CODE (addr2) != PLUS)
7896 if (GET_CODE (XEXP (addr2, 0)) != REG
7897 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7900 if (reg1 != REGNO (XEXP (addr2, 0)))
7903 /* The offset for the second addr must be 8 more than the first addr. */
7904 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7907 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7912 /* Return the register class of a scratch register needed to copy IN into
7913 or out of a register in CLASS in MODE. If it can be done directly,
7914 NO_REGS is returned. */
7917 secondary_reload_class (class, mode, in)
7918 enum reg_class class;
7919 enum machine_mode mode ATTRIBUTE_UNUSED;
7924 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7926 && MACHOPIC_INDIRECT
7930 /* We cannot copy a symbolic operand directly into anything
7931 other than BASE_REGS for TARGET_ELF. So indicate that a
7932 register from BASE_REGS is needed as an intermediate
7935 On Darwin, pic addresses require a load from memory, which
7936 needs a base register. */
7937 if (class != BASE_REGS
7938 && (GET_CODE (in) == SYMBOL_REF
7939 || GET_CODE (in) == HIGH
7940 || GET_CODE (in) == LABEL_REF
7941 || GET_CODE (in) == CONST))
7945 if (GET_CODE (in) == REG)
7948 if (regno >= FIRST_PSEUDO_REGISTER)
7950 regno = true_regnum (in);
7951 if (regno >= FIRST_PSEUDO_REGISTER)
7955 else if (GET_CODE (in) == SUBREG)
7957 regno = true_regnum (in);
7958 if (regno >= FIRST_PSEUDO_REGISTER)
7964 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7966 if (class == GENERAL_REGS || class == BASE_REGS
7967 || (regno >= 0 && INT_REGNO_P (regno)))
7970 /* Constants, memory, and FP registers can go into FP registers. */
7971 if ((regno == -1 || FP_REGNO_P (regno))
7972 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7975 /* Memory, and AltiVec registers can go into AltiVec registers. */
7976 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7977 && class == ALTIVEC_REGS)
7980 /* We can copy among the CR registers. */
7981 if ((class == CR_REGS || class == CR0_REGS)
7982 && regno >= 0 && CR_REGNO_P (regno))
7985 /* Otherwise, we need GENERAL_REGS. */
7986 return GENERAL_REGS;
7989 /* Given a comparison operation, return the bit number in CCR to test. We
7990 know this is a valid comparison.
7992 SCC_P is 1 if this is for an scc. That means that %D will have been
7993 used instead of %C, so the bits will be in different places.
7995 Return -1 if OP isn't a valid comparison for some reason. */
8002 enum rtx_code code = GET_CODE (op);
8003 enum machine_mode cc_mode;
8008 if (GET_RTX_CLASS (code) != '<')
8013 if (GET_CODE (reg) != REG
8014 || ! CR_REGNO_P (REGNO (reg)))
8017 cc_mode = GET_MODE (reg);
8018 cc_regnum = REGNO (reg);
8019 base_bit = 4 * (cc_regnum - CR0_REGNO);
8021 validate_condition_mode (code, cc_mode);
8023 /* When generating a sCOND operation, only positive conditions are
8025 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8026 && code != GTU && code != LTU)
8032 if (TARGET_E500 && !TARGET_FPRS
8033 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8034 return base_bit + 1;
8035 return scc_p ? base_bit + 3 : base_bit + 2;
8037 if (TARGET_E500 && !TARGET_FPRS
8038 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8039 return base_bit + 1;
8040 return base_bit + 2;
8041 case GT: case GTU: case UNLE:
8042 return base_bit + 1;
8043 case LT: case LTU: case UNGE:
8045 case ORDERED: case UNORDERED:
8046 return base_bit + 3;
8049 /* If scc, we will have done a cror to put the bit in the
8050 unordered position. So test that bit. For integer, this is ! LT
8051 unless this is an scc insn. */
8052 return scc_p ? base_bit + 3 : base_bit;
8055 return scc_p ? base_bit + 3 : base_bit + 1;
8062 /* Return the GOT register. */
8065 rs6000_got_register (value)
8066 rtx value ATTRIBUTE_UNUSED;
8068 /* The second flow pass currently (June 1999) can't update
8069 regs_ever_live without disturbing other parts of the compiler, so
8070 update it here to make the prolog/epilogue code happy. */
8071 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8072 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8074 current_function_uses_pic_offset_table = 1;
8076 return pic_offset_table_rtx;
8079 /* Function to init struct machine_function.
8080 This will be called, via a pointer variable,
8081 from push_function_context. */
8083 static struct machine_function *
8084 rs6000_init_machine_status ()
8086 return ggc_alloc_cleared (sizeof (machine_function));
8089 /* These macros test for integers and extract the low-order bits. */
8091 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8092 && GET_MODE (X) == VOIDmode)
8094 #define INT_LOWPART(X) \
8095 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8102 unsigned long val = INT_LOWPART (op);
8104 /* If the high bit is zero, the value is the first 1 bit we find
8106 if ((val & 0x80000000) == 0)
8108 if ((val & 0xffffffff) == 0)
8112 while (((val <<= 1) & 0x80000000) == 0)
8117 /* If the high bit is set and the low bit is not, or the mask is all
8118 1's, the value is zero. */
8119 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8122 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8125 while (((val >>= 1) & 1) != 0)
8136 unsigned long val = INT_LOWPART (op);
8138 /* If the low bit is zero, the value is the first 1 bit we find from
8142 if ((val & 0xffffffff) == 0)
8146 while (((val >>= 1) & 1) == 0)
8152 /* If the low bit is set and the high bit is not, or the mask is all
8153 1's, the value is 31. */
8154 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8157 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8160 while (((val <<= 1) & 0x80000000) != 0)
8166 /* Locate some local-dynamic symbol still in use by this function
8167 so that we can print its name in some tls_ld pattern. */
8170 rs6000_get_some_local_dynamic_name ()
8174 if (cfun->machine->some_ld_name)
8175 return cfun->machine->some_ld_name;
8177 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8179 && for_each_rtx (&PATTERN (insn),
8180 rs6000_get_some_local_dynamic_name_1, 0))
8181 return cfun->machine->some_ld_name;
8186 /* Helper function for rs6000_get_some_local_dynamic_name. */
8189 rs6000_get_some_local_dynamic_name_1 (px, data)
8191 void *data ATTRIBUTE_UNUSED;
8195 if (GET_CODE (x) == SYMBOL_REF)
8197 const char *str = XSTR (x, 0);
8198 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8200 cfun->machine->some_ld_name = str;
8208 /* Print an operand. Recognize special options, documented below. */
8211 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8212 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8214 #define SMALL_DATA_RELOC "sda21"
8215 #define SMALL_DATA_REG 0
8219 print_operand (file, x, code)
8226 unsigned HOST_WIDE_INT uval;
8231 /* Write out an instruction after the call which may be replaced
8232 with glue code by the loader. This depends on the AIX version. */
8233 asm_fprintf (file, RS6000_CALL_GLUE);
8236 /* %a is output_address. */
8239 /* If X is a constant integer whose low-order 5 bits are zero,
8240 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8241 in the AIX assembler where "sri" with a zero shift count
8242 writes a trash instruction. */
8243 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8250 /* If constant, low-order 16 bits of constant, unsigned.
8251 Otherwise, write normally. */
8253 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8255 print_operand (file, x, 0);
8259 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8260 for 64-bit mask direction. */
8261 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8264 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8268 /* X is a CR register. Print the number of the EQ bit of the CR */
8269 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8270 output_operand_lossage ("invalid %%E value");
8272 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8276 /* X is a CR register. Print the shift count needed to move it
8277 to the high-order four bits. */
8278 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8279 output_operand_lossage ("invalid %%f value");
8281 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8285 /* Similar, but print the count for the rotate in the opposite
8287 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8288 output_operand_lossage ("invalid %%F value");
8290 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8294 /* X is a constant integer. If it is negative, print "m",
8295 otherwise print "z". This is to make an aze or ame insn. */
8296 if (GET_CODE (x) != CONST_INT)
8297 output_operand_lossage ("invalid %%G value");
8298 else if (INTVAL (x) >= 0)
8305 /* If constant, output low-order five bits. Otherwise, write
8308 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8310 print_operand (file, x, 0);
8314 /* If constant, output low-order six bits. Otherwise, write
8317 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8319 print_operand (file, x, 0);
8323 /* Print `i' if this is a constant, else nothing. */
8329 /* Write the bit number in CCR for jump. */
8332 output_operand_lossage ("invalid %%j code");
8334 fprintf (file, "%d", i);
8338 /* Similar, but add one for shift count in rlinm for scc and pass
8339 scc flag to `ccr_bit'. */
8342 output_operand_lossage ("invalid %%J code");
8344 /* If we want bit 31, write a shift count of zero, not 32. */
8345 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8349 /* X must be a constant. Write the 1's complement of the
8352 output_operand_lossage ("invalid %%k value");
8354 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8358 /* X must be a symbolic constant on ELF. Write an
8359 expression suitable for an 'addi' that adds in the low 16
8361 if (GET_CODE (x) != CONST)
8363 print_operand_address (file, x);
8368 if (GET_CODE (XEXP (x, 0)) != PLUS
8369 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8370 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8371 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8372 output_operand_lossage ("invalid %%K value");
8373 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8375 /* For GNU as, there must be a non-alphanumeric character
8376 between 'l' and the number. The '-' is added by
8377 print_operand() already. */
8378 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8380 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8384 /* %l is output_asm_label. */
8387 /* Write second word of DImode or DFmode reference. Works on register
8388 or non-indexed memory only. */
8389 if (GET_CODE (x) == REG)
8390 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8391 else if (GET_CODE (x) == MEM)
8393 /* Handle possible auto-increment. Since it is pre-increment and
8394 we have already done it, we can just use an offset of word. */
8395 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8396 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8397 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8400 output_address (XEXP (adjust_address_nv (x, SImode,
8404 if (small_data_operand (x, GET_MODE (x)))
8405 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8406 reg_names[SMALL_DATA_REG]);
8411 /* MB value for a mask operand. */
8412 if (! mask_operand (x, SImode))
8413 output_operand_lossage ("invalid %%m value");
8415 fprintf (file, "%d", extract_MB (x));
8419 /* ME value for a mask operand. */
8420 if (! mask_operand (x, SImode))
8421 output_operand_lossage ("invalid %%M value");
8423 fprintf (file, "%d", extract_ME (x));
8426 /* %n outputs the negative of its operand. */
8429 /* Write the number of elements in the vector times 4. */
8430 if (GET_CODE (x) != PARALLEL)
8431 output_operand_lossage ("invalid %%N value");
8433 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8437 /* Similar, but subtract 1 first. */
8438 if (GET_CODE (x) != PARALLEL)
8439 output_operand_lossage ("invalid %%O value");
8441 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8445 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8447 || INT_LOWPART (x) < 0
8448 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8449 output_operand_lossage ("invalid %%p value");
8451 fprintf (file, "%d", i);
8455 /* The operand must be an indirect memory reference. The result
8456 is the register number. */
8457 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8458 || REGNO (XEXP (x, 0)) >= 32)
8459 output_operand_lossage ("invalid %%P value");
8461 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8465 /* This outputs the logical code corresponding to a boolean
8466 expression. The expression may have one or both operands
8467 negated (if one, only the first one). For condition register
8468 logical operations, it will also treat the negated
8469 CR codes as NOTs, but not handle NOTs of them. */
8471 const char *const *t = 0;
8473 enum rtx_code code = GET_CODE (x);
8474 static const char * const tbl[3][3] = {
8475 { "and", "andc", "nor" },
8476 { "or", "orc", "nand" },
8477 { "xor", "eqv", "xor" } };
8481 else if (code == IOR)
8483 else if (code == XOR)
8486 output_operand_lossage ("invalid %%q value");
8488 if (GET_CODE (XEXP (x, 0)) != NOT)
8492 if (GET_CODE (XEXP (x, 1)) == NOT)
8503 /* X is a CR register. Print the mask for `mtcrf'. */
8504 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8505 output_operand_lossage ("invalid %%R value");
8507 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8511 /* Low 5 bits of 32 - value */
8513 output_operand_lossage ("invalid %%s value");
8515 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8519 /* PowerPC64 mask position. All 0's is excluded.
8520 CONST_INT 32-bit mask is considered sign-extended so any
8521 transition must occur within the CONST_INT, not on the boundary. */
8522 if (! mask64_operand (x, DImode))
8523 output_operand_lossage ("invalid %%S value");
8525 uval = INT_LOWPART (x);
8527 if (uval & 1) /* Clear Left */
8529 #if HOST_BITS_PER_WIDE_INT > 64
8530 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8534 else /* Clear Right */
8537 #if HOST_BITS_PER_WIDE_INT > 64
8538 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8546 fprintf (file, "%d", i);
8550 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8551 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8554 /* Bit 3 is OV bit. */
8555 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8557 /* If we want bit 31, write a shift count of zero, not 32. */
8558 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8562 /* Print the symbolic name of a branch target register. */
8563 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8564 && REGNO (x) != COUNT_REGISTER_REGNUM))
8565 output_operand_lossage ("invalid %%T value");
8566 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8567 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8569 fputs ("ctr", file);
8573 /* High-order 16 bits of constant for use in unsigned operand. */
8575 output_operand_lossage ("invalid %%u value");
8577 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8578 (INT_LOWPART (x) >> 16) & 0xffff);
8582 /* High-order 16 bits of constant for use in signed operand. */
8584 output_operand_lossage ("invalid %%v value");
8586 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8587 (INT_LOWPART (x) >> 16) & 0xffff);
8591 /* Print `u' if this has an auto-increment or auto-decrement. */
8592 if (GET_CODE (x) == MEM
8593 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8594 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8599 /* Print the trap code for this operand. */
8600 switch (GET_CODE (x))
8603 fputs ("eq", file); /* 4 */
8606 fputs ("ne", file); /* 24 */
8609 fputs ("lt", file); /* 16 */
8612 fputs ("le", file); /* 20 */
8615 fputs ("gt", file); /* 8 */
8618 fputs ("ge", file); /* 12 */
8621 fputs ("llt", file); /* 2 */
8624 fputs ("lle", file); /* 6 */
8627 fputs ("lgt", file); /* 1 */
8630 fputs ("lge", file); /* 5 */
8638 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8641 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8642 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8644 print_operand (file, x, 0);
8648 /* MB value for a PowerPC64 rldic operand. */
8649 val = (GET_CODE (x) == CONST_INT
8650 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8655 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8656 if ((val <<= 1) < 0)
8659 #if HOST_BITS_PER_WIDE_INT == 32
8660 if (GET_CODE (x) == CONST_INT && i >= 0)
8661 i += 32; /* zero-extend high-part was all 0's */
8662 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8664 val = CONST_DOUBLE_LOW (x);
8671 for ( ; i < 64; i++)
8672 if ((val <<= 1) < 0)
8677 fprintf (file, "%d", i + 1);
8681 if (GET_CODE (x) == MEM
8682 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8687 /* Like 'L', for third word of TImode */
8688 if (GET_CODE (x) == REG)
8689 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8690 else if (GET_CODE (x) == MEM)
8692 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8693 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8694 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8696 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8697 if (small_data_operand (x, GET_MODE (x)))
8698 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8699 reg_names[SMALL_DATA_REG]);
8704 /* X is a SYMBOL_REF. Write out the name preceded by a
8705 period and without any trailing data in brackets. Used for function
8706 names. If we are configured for System V (or the embedded ABI) on
8707 the PowerPC, do not emit the period, since those systems do not use
8708 TOCs and the like. */
8709 if (GET_CODE (x) != SYMBOL_REF)
8712 if (XSTR (x, 0)[0] != '.')
8714 switch (DEFAULT_ABI)
8729 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8731 assemble_name (file, XSTR (x, 0));
8735 /* Like 'L', for last word of TImode. */
8736 if (GET_CODE (x) == REG)
8737 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8738 else if (GET_CODE (x) == MEM)
8740 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8741 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8742 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8744 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8745 if (small_data_operand (x, GET_MODE (x)))
8746 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8747 reg_names[SMALL_DATA_REG]);
8751 /* Print AltiVec or SPE memory operand. */
8756 if (GET_CODE (x) != MEM)
8764 if (GET_CODE (tmp) == REG)
8766 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8769 /* Handle [reg+UIMM]. */
8770 else if (GET_CODE (tmp) == PLUS &&
8771 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8775 if (GET_CODE (XEXP (tmp, 0)) != REG)
8778 x = INTVAL (XEXP (tmp, 1));
8779 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8783 /* Fall through. Must be [reg+reg]. */
8785 if (GET_CODE (tmp) == REG)
8786 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8787 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8789 if (REGNO (XEXP (tmp, 0)) == 0)
8790 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8791 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8793 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8794 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8802 if (GET_CODE (x) == REG)
8803 fprintf (file, "%s", reg_names[REGNO (x)]);
8804 else if (GET_CODE (x) == MEM)
8806 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8807 know the width from the mode. */
8808 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8809 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8810 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8811 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8812 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8813 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8815 output_address (XEXP (x, 0));
8818 output_addr_const (file, x);
8822 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8826 output_operand_lossage ("invalid %%xn code");
8830 /* Print the address of an operand. */
8833 print_operand_address (file, x)
8837 if (GET_CODE (x) == REG)
8838 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8839 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8840 || GET_CODE (x) == LABEL_REF)
8842 output_addr_const (file, x);
8843 if (small_data_operand (x, GET_MODE (x)))
8844 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8845 reg_names[SMALL_DATA_REG]);
8846 else if (TARGET_TOC)
8849 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8851 if (REGNO (XEXP (x, 0)) == 0)
8852 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8853 reg_names[ REGNO (XEXP (x, 0)) ]);
8855 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8856 reg_names[ REGNO (XEXP (x, 1)) ]);
8858 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8859 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
8860 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
8862 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8863 && CONSTANT_P (XEXP (x, 1)))
8865 output_addr_const (file, XEXP (x, 1));
8866 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8870 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8871 && CONSTANT_P (XEXP (x, 1)))
8873 fprintf (file, "lo16(");
8874 output_addr_const (file, XEXP (x, 1));
8875 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8878 else if (legitimate_constant_pool_address_p (x))
8880 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8882 rtx contains_minus = XEXP (x, 1);
8886 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8887 turn it into (sym) for output_addr_const. */
8888 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8889 contains_minus = XEXP (contains_minus, 0);
8891 minus = XEXP (contains_minus, 0);
8892 symref = XEXP (minus, 0);
8893 XEXP (contains_minus, 0) = symref;
8898 name = XSTR (symref, 0);
8899 newname = alloca (strlen (name) + sizeof ("@toc"));
8900 strcpy (newname, name);
8901 strcat (newname, "@toc");
8902 XSTR (symref, 0) = newname;
8904 output_addr_const (file, XEXP (x, 1));
8906 XSTR (symref, 0) = name;
8907 XEXP (contains_minus, 0) = minus;
8910 output_addr_const (file, XEXP (x, 1));
8912 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8918 /* Target hook for assembling integer objects. The PowerPC version has
8919 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8920 is defined. It also needs to handle DI-mode objects on 64-bit
8924 rs6000_assemble_integer (x, size, aligned_p)
8929 #ifdef RELOCATABLE_NEEDS_FIXUP
8930 /* Special handling for SI values. */
8931 if (size == 4 && aligned_p)
8933 extern int in_toc_section PARAMS ((void));
8934 static int recurse = 0;
8936 /* For -mrelocatable, we mark all addresses that need to be fixed up
8937 in the .fixup section. */
8938 if (TARGET_RELOCATABLE
8939 && !in_toc_section ()
8940 && !in_text_section ()
8942 && GET_CODE (x) != CONST_INT
8943 && GET_CODE (x) != CONST_DOUBLE
8949 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8951 ASM_OUTPUT_LABEL (asm_out_file, buf);
8952 fprintf (asm_out_file, "\t.long\t(");
8953 output_addr_const (asm_out_file, x);
8954 fprintf (asm_out_file, ")@fixup\n");
8955 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8956 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8957 fprintf (asm_out_file, "\t.long\t");
8958 assemble_name (asm_out_file, buf);
8959 fprintf (asm_out_file, "\n\t.previous\n");
8963 /* Remove initial .'s to turn a -mcall-aixdesc function
8964 address into the address of the descriptor, not the function
8966 else if (GET_CODE (x) == SYMBOL_REF
8967 && XSTR (x, 0)[0] == '.'
8968 && DEFAULT_ABI == ABI_AIX)
8970 const char *name = XSTR (x, 0);
8971 while (*name == '.')
8974 fprintf (asm_out_file, "\t.long\t%s\n", name);
8978 #endif /* RELOCATABLE_NEEDS_FIXUP */
8979 return default_assemble_integer (x, size, aligned_p);
8982 #ifdef HAVE_GAS_HIDDEN
8983 /* Emit an assembler directive to set symbol visibility for DECL to
8987 rs6000_assemble_visibility (decl, vis)
8991 /* Functions need to have their entry point symbol visibility set as
8992 well as their descriptor symbol visibility. */
8993 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8995 static const char * const visibility_types[] = {
8996 NULL, "internal", "hidden", "protected"
8999 const char *name, *type;
9001 name = ((* targetm.strip_name_encoding)
9002 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9003 type = visibility_types[vis];
9005 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9006 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9009 default_assemble_visibility (decl, vis);
9014 rs6000_reverse_condition (mode, code)
9015 enum machine_mode mode;
9018 /* Reversal of FP compares takes care -- an ordered compare
9019 becomes an unordered compare and vice versa. */
9020 if (mode == CCFPmode
9021 && (!flag_finite_math_only
9022 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9023 || code == UNEQ || code == LTGT))
9024 return reverse_condition_maybe_unordered (code);
9026 return reverse_condition (code);
9029 /* Generate a compare for CODE. Return a brand-new rtx that
9030 represents the result of the compare. */
9033 rs6000_generate_compare (code)
9036 enum machine_mode comp_mode;
9039 if (rs6000_compare_fp_p)
9040 comp_mode = CCFPmode;
9041 else if (code == GTU || code == LTU
9042 || code == GEU || code == LEU)
9043 comp_mode = CCUNSmode;
9047 /* First, the compare. */
9048 compare_result = gen_reg_rtx (comp_mode);
9050 /* SPE FP compare instructions on the GPRs. Yuck! */
9051 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9052 && rs6000_compare_fp_p)
9054 rtx cmp, or1, or2, or_result, compare_result2;
9062 cmp = flag_finite_math_only
9063 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9065 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9066 rs6000_compare_op1);
9074 cmp = flag_finite_math_only
9075 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9077 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9078 rs6000_compare_op1);
9086 cmp = flag_finite_math_only
9087 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9089 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9090 rs6000_compare_op1);
9096 /* Synthesize LE and GE from LT/GT || EQ. */
9097 if (code == LE || code == GE || code == LEU || code == GEU)
9099 /* Synthesize GE/LE frome GT/LT || EQ. */
9105 case LE: code = LT; break;
9106 case GE: code = GT; break;
9107 case LEU: code = LT; break;
9108 case GEU: code = GT; break;
9112 or1 = gen_reg_rtx (SImode);
9113 or2 = gen_reg_rtx (SImode);
9114 or_result = gen_reg_rtx (CCEQmode);
9115 compare_result2 = gen_reg_rtx (CCFPmode);
9118 cmp = flag_finite_math_only
9119 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9121 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9122 rs6000_compare_op1);
9125 /* The MC8540 FP compare instructions set the CR bits
9126 differently than other PPC compare instructions. For
9127 that matter, there is no generic test instruction, but a
9128 testgt, testlt, and testeq. For a true condition, bit 2
9129 is set (x1xx) in the CR. Following the traditional CR
9135 ... bit 2 would be a GT CR alias, so later on we
9136 look in the GT bits for the branch instructions.
9137 However, we must be careful to emit correct RTL in
9138 the meantime, so optimizations don't get confused. */
9140 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9141 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9143 /* OR them together. */
9144 cmp = gen_rtx_SET (VOIDmode, or_result,
9145 gen_rtx_COMPARE (CCEQmode,
9146 gen_rtx_IOR (SImode, or1, or2),
9148 compare_result = or_result;
9153 /* We only care about 1 bit (x1xx), so map everything to NE to
9154 maintain rtl sanity. We'll get to the right bit (x1xx) at
9155 code output time. */
9156 if (code == NE || code == LTGT)
9157 /* Do the inverse here because we have no cmpne
9158 instruction. We use the cmpeq instruction and expect
9159 to get a 0 instead. */
9168 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9169 gen_rtx_COMPARE (comp_mode,
9171 rs6000_compare_op1)));
9173 /* Some kinds of FP comparisons need an OR operation;
9174 under flag_finite_math_only we don't bother. */
9175 if (rs6000_compare_fp_p
9176 && ! flag_finite_math_only
9177 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9178 && (code == LE || code == GE
9179 || code == UNEQ || code == LTGT
9180 || code == UNGT || code == UNLT))
9182 enum rtx_code or1, or2;
9183 rtx or1_rtx, or2_rtx, compare2_rtx;
9184 rtx or_result = gen_reg_rtx (CCEQmode);
9188 case LE: or1 = LT; or2 = EQ; break;
9189 case GE: or1 = GT; or2 = EQ; break;
9190 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9191 case LTGT: or1 = LT; or2 = GT; break;
9192 case UNGT: or1 = UNORDERED; or2 = GT; break;
9193 case UNLT: or1 = UNORDERED; or2 = LT; break;
9196 validate_condition_mode (or1, comp_mode);
9197 validate_condition_mode (or2, comp_mode);
9198 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9199 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9200 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9201 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9203 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9205 compare_result = or_result;
9209 validate_condition_mode (code, GET_MODE (compare_result));
9211 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9215 /* Emit the RTL for an sCOND pattern. */
9218 rs6000_emit_sCOND (code, result)
9223 enum machine_mode op_mode;
9224 enum rtx_code cond_code;
9226 condition_rtx = rs6000_generate_compare (code);
9227 cond_code = GET_CODE (condition_rtx);
9230 || cond_code == GE || cond_code == LE
9231 || cond_code == GEU || cond_code == LEU
9232 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9234 rtx not_result = gen_reg_rtx (CCEQmode);
9235 rtx not_op, rev_cond_rtx;
9236 enum machine_mode cc_mode;
9238 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9240 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9241 SImode, XEXP (condition_rtx, 0), const0_rtx);
9242 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9243 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9244 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9247 op_mode = GET_MODE (rs6000_compare_op0);
9248 if (op_mode == VOIDmode)
9249 op_mode = GET_MODE (rs6000_compare_op1);
9251 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9253 PUT_MODE (condition_rtx, DImode);
9254 convert_move (result, condition_rtx, 0);
9258 PUT_MODE (condition_rtx, SImode);
9259 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9263 /* Emit a branch of kind CODE to location LOC. */
9266 rs6000_emit_cbranch (code, loc)
9270 rtx condition_rtx, loc_ref;
9272 condition_rtx = rs6000_generate_compare (code);
9273 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9274 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9275 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9279 /* Return the string to output a conditional branch to LABEL, which is
9280 the operand number of the label, or -1 if the branch is really a
9283 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9284 condition code register and its mode specifies what kind of
9287 REVERSED is nonzero if we should reverse the sense of the comparison.
9289 INSN is the insn. */
9292 output_cbranch (op, label, reversed, insn)
9298 static char string[64];
9299 enum rtx_code code = GET_CODE (op);
9300 rtx cc_reg = XEXP (op, 0);
9301 enum machine_mode mode = GET_MODE (cc_reg);
9302 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9303 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9304 int really_reversed = reversed ^ need_longbranch;
9310 validate_condition_mode (code, mode);
9312 /* Work out which way this really branches. We could use
9313 reverse_condition_maybe_unordered here always but this
9314 makes the resulting assembler clearer. */
9315 if (really_reversed)
9317 /* Reversal of FP compares takes care -- an ordered compare
9318 becomes an unordered compare and vice versa. */
9319 if (mode == CCFPmode)
9320 code = reverse_condition_maybe_unordered (code);
9322 code = reverse_condition (code);
9325 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9327 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9330 /* Opposite of GT. */
9332 else if (code == NE)
9340 /* Not all of these are actually distinct opcodes, but
9341 we distinguish them for clarity of the resulting assembler. */
9343 ccode = "ne"; break;
9345 ccode = "eq"; break;
9347 ccode = "ge"; break;
9348 case GT: case GTU: case UNGT:
9349 ccode = "gt"; break;
9351 ccode = "le"; break;
9352 case LT: case LTU: case UNLT:
9353 ccode = "lt"; break;
9354 case UNORDERED: ccode = "un"; break;
9355 case ORDERED: ccode = "nu"; break;
9356 case UNGE: ccode = "nl"; break;
9357 case UNLE: ccode = "ng"; break;
9362 /* Maybe we have a guess as to how likely the branch is.
9363 The old mnemonics don't have a way to specify this information. */
9365 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9366 if (note != NULL_RTX)
9368 /* PROB is the difference from 50%. */
9369 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9370 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9372 /* Only hint for highly probable/improbable branches on newer
9373 cpus as static prediction overrides processor dynamic
9374 prediction. For older cpus we may as well always hint, but
9375 assume not taken for branches that are very close to 50% as a
9376 mispredicted taken branch is more expensive than a
9377 mispredicted not-taken branch. */
9379 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9381 if (abs (prob) > REG_BR_PROB_BASE / 20
9382 && ((prob > 0) ^ need_longbranch))
9390 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9392 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9394 /* We need to escape any '%' characters in the reg_names string.
9395 Assume they'd only be the first character... */
9396 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9398 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9402 /* If the branch distance was too far, we may have to use an
9403 unconditional branch to go the distance. */
9404 if (need_longbranch)
9405 s += sprintf (s, ",$+8\n\tb %s", label);
9407 s += sprintf (s, ",%s", label);
9413 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9414 operands of the last comparison is nonzero/true, FALSE_COND if it
9415 is zero/false. Return 0 if the hardware has no such operation. */
9418 rs6000_emit_cmove (dest, op, true_cond, false_cond)
9424 enum rtx_code code = GET_CODE (op);
9425 rtx op0 = rs6000_compare_op0;
9426 rtx op1 = rs6000_compare_op1;
9428 enum machine_mode compare_mode = GET_MODE (op0);
9429 enum machine_mode result_mode = GET_MODE (dest);
9432 /* These modes should always match. */
9433 if (GET_MODE (op1) != compare_mode
9434 /* In the isel case however, we can use a compare immediate, so
9435 op1 may be a small constant. */
9436 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9438 if (GET_MODE (true_cond) != result_mode)
9440 if (GET_MODE (false_cond) != result_mode)
9443 /* First, work out if the hardware can do this at all, or
9444 if it's too slow... */
9445 if (! rs6000_compare_fp_p)
9448 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9452 /* Eliminate half of the comparisons by switching operands, this
9453 makes the remaining code simpler. */
9454 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9455 || code == LTGT || code == LT || code == UNLE)
9457 code = reverse_condition_maybe_unordered (code);
9459 true_cond = false_cond;
9463 /* UNEQ and LTGT take four instructions for a comparison with zero,
9464 it'll probably be faster to use a branch here too. */
9465 if (code == UNEQ && HONOR_NANS (compare_mode))
9468 if (GET_CODE (op1) == CONST_DOUBLE)
9469 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9471 /* We're going to try to implement comparisons by performing
9472 a subtract, then comparing against zero. Unfortunately,
9473 Inf - Inf is NaN which is not zero, and so if we don't
9474 know that the operand is finite and the comparison
9475 would treat EQ different to UNORDERED, we can't do it. */
9476 if (HONOR_INFINITIES (compare_mode)
9477 && code != GT && code != UNGE
9478 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9479 /* Constructs of the form (a OP b ? a : b) are safe. */
9480 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9481 || (! rtx_equal_p (op0, true_cond)
9482 && ! rtx_equal_p (op1, true_cond))))
9484 /* At this point we know we can use fsel. */
9486 /* Reduce the comparison to a comparison against zero. */
9487 temp = gen_reg_rtx (compare_mode);
9488 emit_insn (gen_rtx_SET (VOIDmode, temp,
9489 gen_rtx_MINUS (compare_mode, op0, op1)));
9491 op1 = CONST0_RTX (compare_mode);
9493 /* If we don't care about NaNs we can reduce some of the comparisons
9494 down to faster ones. */
9495 if (! HONOR_NANS (compare_mode))
9501 true_cond = false_cond;
9514 /* Now, reduce everything down to a GE. */
9521 temp = gen_reg_rtx (compare_mode);
9522 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9527 temp = gen_reg_rtx (compare_mode);
9528 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9533 temp = gen_reg_rtx (compare_mode);
9534 emit_insn (gen_rtx_SET (VOIDmode, temp,
9535 gen_rtx_NEG (compare_mode,
9536 gen_rtx_ABS (compare_mode, op0))));
9541 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9542 temp = gen_reg_rtx (result_mode);
9543 emit_insn (gen_rtx_SET (VOIDmode, temp,
9544 gen_rtx_IF_THEN_ELSE (result_mode,
9545 gen_rtx_GE (VOIDmode,
9547 true_cond, false_cond)));
9548 false_cond = true_cond;
9551 temp = gen_reg_rtx (compare_mode);
9552 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9557 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9558 temp = gen_reg_rtx (result_mode);
9559 emit_insn (gen_rtx_SET (VOIDmode, temp,
9560 gen_rtx_IF_THEN_ELSE (result_mode,
9561 gen_rtx_GE (VOIDmode,
9563 true_cond, false_cond)));
9564 true_cond = false_cond;
9567 temp = gen_reg_rtx (compare_mode);
9568 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9576 emit_insn (gen_rtx_SET (VOIDmode, dest,
9577 gen_rtx_IF_THEN_ELSE (result_mode,
9578 gen_rtx_GE (VOIDmode,
9580 true_cond, false_cond)));
9584 /* Same as above, but for ints (isel). */
9587 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9593 rtx condition_rtx, cr;
9595 /* All isel implementations thus far are 32-bits. */
9596 if (GET_MODE (rs6000_compare_op0) != SImode)
9599 /* We still have to do the compare, because isel doesn't do a
9600 compare, it just looks at the CRx bits set by a previous compare
9602 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9603 cr = XEXP (condition_rtx, 0);
9605 if (GET_MODE (cr) == CCmode)
9606 emit_insn (gen_isel_signed (dest, condition_rtx,
9607 true_cond, false_cond, cr));
9609 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9610 true_cond, false_cond, cr));
9616 output_isel (operands)
9621 code = GET_CODE (operands[1]);
9622 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9624 PUT_CODE (operands[1], reverse_condition (code));
9625 return "isel %0,%3,%2,%j1";
9628 return "isel %0,%2,%3,%j1";
9632 rs6000_emit_minmax (dest, code, op0, op1)
9638 enum machine_mode mode = GET_MODE (op0);
9642 if (code == SMAX || code == SMIN)
9647 if (code == SMAX || code == UMAX)
9648 target = emit_conditional_move (dest, c, op0, op1, mode,
9651 target = emit_conditional_move (dest, c, op0, op1, mode,
9653 if (target == NULL_RTX)
9656 emit_move_insn (dest, target);
9659 /* Called by altivec splitter.
9661 operands[0] : Destination of move
9662 operands[1] : Source of move
9663 noperands : Size of operands vector
9665 operands[2-5] ([2-3] in 64 bit) : Destination slots
9666 operands[6-9] ([4-5] in 64 bit) : Source slots
9668 Splits the move of operands[1] to operands[0].
9669 This is done, if GPRs are one of the operands. In this case
9670 a sequence of simple move insns has to be issued. The sequence of these
9671 move insns has to be done in correct order to avoid early clobber of the
9672 base register or destructive overlap of registers.
9676 rs6000_split_altivec_in_gprs (rtx *operands)
9678 int nregs, reg, i, j;
9679 enum machine_mode mode;
9681 /* Calculate number to move (2/4 for 32/64 bit mode). */
9683 reg = REG_P (operands[0]) ? REGNO (operands[0]) : REGNO (operands[1]);
9684 mode = GET_MODE (operands[0]);
9685 nregs = HARD_REGNO_NREGS (reg, mode);
9687 if (REG_P (operands[1])
9688 && REG_P (operands[0])
9689 && (REGNO (operands[1]) < REGNO (operands[0])))
9691 /* Move register range backwards, if we have destructive overlap. */
9694 for (i = 0; i < nregs; i++)
9697 operands[i + 2] = operand_subword (operands[0], j, 0, mode);
9698 operands[i + 2 + nregs] =
9699 operand_subword (operands[1], j, 0, mode);
9706 if (GET_CODE (operands[1]) == MEM)
9709 /* We have offsettable addresses only. If we use one of the
9710 registers to address memory, we have change that register last. */
9711 breg = GET_CODE (XEXP (operands[1], 0)) == PLUS ?
9712 XEXP (XEXP (operands[1], 0), 0) :
9713 XEXP (operands[1], 0);
9715 if (REGNO (breg) >= REGNO (operands[0])
9716 && REGNO (breg) < REGNO (operands[0]) + nregs)
9717 j = REGNO (breg) - REGNO (operands[0]);
9720 for (i = 0; i < nregs; i++)
9722 /* Calculate index to next subword. */
9727 operands[i + 2] = operand_subword (operands[0], j, 0, mode);
9728 operands[i + 2 + nregs] =
9729 operand_subword (operands[1], j, 0, mode);
9736 /* This page contains routines that are used to determine what the
9737 function prologue and epilogue code will do and write them out. */
9739 /* Return the first fixed-point register that is required to be
9740 saved. 32 if none. */
9743 first_reg_to_save ()
9747 /* Find lowest numbered live register. */
9748 for (first_reg = 13; first_reg <= 31; first_reg++)
9749 if (regs_ever_live[first_reg]
9750 && (! call_used_regs[first_reg]
9751 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9752 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9753 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9758 && current_function_uses_pic_offset_table
9759 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9760 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9766 /* Similar, for FP regs. */
9769 first_fp_reg_to_save ()
9773 /* Find lowest numbered live register. */
9774 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9775 if (regs_ever_live[first_reg])
9781 /* Similar, for AltiVec regs. */
9784 first_altivec_reg_to_save ()
9788 /* Stack frame remains as is unless we are in AltiVec ABI. */
9789 if (! TARGET_ALTIVEC_ABI)
9790 return LAST_ALTIVEC_REGNO + 1;
9792 /* Find lowest numbered live register. */
9793 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9794 if (regs_ever_live[i])
9800 /* Return a 32-bit mask of the AltiVec registers we need to set in
9801 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9802 the 32-bit word is 0. */
9805 compute_vrsave_mask ()
9807 unsigned int i, mask = 0;
9809 /* First, find out if we use _any_ altivec registers. */
9810 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9811 if (regs_ever_live[i])
9812 mask |= ALTIVEC_REG_BIT (i);
9817 /* Next, remove the argument registers from the set. These must
9818 be in the VRSAVE mask set by the caller, so we don't need to add
9819 them in again. More importantly, the mask we compute here is
9820 used to generate CLOBBERs in the set_vrsave insn, and we do not
9821 wish the argument registers to die. */
9822 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9823 mask &= ~ALTIVEC_REG_BIT (i);
9825 /* Similarly, remove the return value from the set. */
9828 diddle_return_value (is_altivec_return_reg, &yes);
9830 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9837 is_altivec_return_reg (reg, xyes)
9841 bool *yes = (bool *) xyes;
9842 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9847 /* Calculate the stack information for the current function. This is
9848 complicated by having two separate calling sequences, the AIX calling
9849 sequence and the V.4 calling sequence.
9851 AIX (and Darwin/Mac OS X) stack frames look like:
9853 SP----> +---------------------------------------+
9854 | back chain to caller | 0 0
9855 +---------------------------------------+
9856 | saved CR | 4 8 (8-11)
9857 +---------------------------------------+
9859 +---------------------------------------+
9860 | reserved for compilers | 12 24
9861 +---------------------------------------+
9862 | reserved for binders | 16 32
9863 +---------------------------------------+
9864 | saved TOC pointer | 20 40
9865 +---------------------------------------+
9866 | Parameter save area (P) | 24 48
9867 +---------------------------------------+
9868 | Alloca space (A) | 24+P etc.
9869 +---------------------------------------+
9870 | Local variable space (L) | 24+P+A
9871 +---------------------------------------+
9872 | Float/int conversion temporary (X) | 24+P+A+L
9873 +---------------------------------------+
9874 | Save area for AltiVec registers (W) | 24+P+A+L+X
9875 +---------------------------------------+
9876 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9877 +---------------------------------------+
9878 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9879 +---------------------------------------+
9880 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9881 +---------------------------------------+
9882 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9883 +---------------------------------------+
9884 old SP->| back chain to caller's caller |
9885 +---------------------------------------+
9887 The required alignment for AIX configurations is two words (i.e., 8
9891 V.4 stack frames look like:
9893 SP----> +---------------------------------------+
9894 | back chain to caller | 0
9895 +---------------------------------------+
9896 | caller's saved LR | 4
9897 +---------------------------------------+
9898 | Parameter save area (P) | 8
9899 +---------------------------------------+
9900 | Alloca space (A) | 8+P
9901 +---------------------------------------+
9902 | Varargs save area (V) | 8+P+A
9903 +---------------------------------------+
9904 | Local variable space (L) | 8+P+A+V
9905 +---------------------------------------+
9906 | Float/int conversion temporary (X) | 8+P+A+V+L
9907 +---------------------------------------+
9908 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9909 +---------------------------------------+
9910 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9911 +---------------------------------------+
9912 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9913 +---------------------------------------+
9914 | SPE: area for 64-bit GP registers |
9915 +---------------------------------------+
9916 | SPE alignment padding |
9917 +---------------------------------------+
9918 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9919 +---------------------------------------+
9920 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9921 +---------------------------------------+
9922 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9923 +---------------------------------------+
9924 old SP->| back chain to caller's caller |
9925 +---------------------------------------+
9927 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9928 given. (But note below and in sysv4.h that we require only 8 and
9929 may round up the size of our stack frame anyways. The historical
9930 reason is early versions of powerpc-linux which didn't properly
9931 align the stack at program startup. A happy side-effect is that
9932 -mno-eabi libraries can be used with -meabi programs.)
9934 The EABI configuration defaults to the V.4 layout. However,
9935 the stack alignment requirements may differ. If -mno-eabi is not
9936 given, the required stack alignment is 8 bytes; if -mno-eabi is
9937 given, the required alignment is 16 bytes. (But see V.4 comment
9940 #ifndef ABI_STACK_BOUNDARY
9941 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9945 rs6000_stack_info ()
9947 static rs6000_stack_t info, zero_info;
9948 rs6000_stack_t *info_ptr = &info;
9949 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9953 /* Zero all fields portably. */
9958 /* Cache value so we don't rescan instruction chain over and over. */
9959 if (cfun->machine->insn_chain_scanned_p == 0)
9961 cfun->machine->insn_chain_scanned_p = 1;
9962 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9966 /* Select which calling sequence. */
9967 info_ptr->abi = DEFAULT_ABI;
9969 /* Calculate which registers need to be saved & save area size. */
9970 info_ptr->first_gp_reg_save = first_reg_to_save ();
9971 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9972 even if it currently looks like we won't. */
9973 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9974 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9975 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9976 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9977 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9979 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9981 /* For the SPE, we have an additional upper 32-bits on each GPR.
9982 Ideally we should save the entire 64-bits only when the upper
9983 half is used in SIMD instructions. Since we only record
9984 registers live (not the size they are used in), this proves
9985 difficult because we'd have to traverse the instruction chain at
9986 the right time, taking reload into account. This is a real pain,
9987 so we opt to save the GPRs in 64-bits always if but one register
9988 gets used in 64-bits. Otherwise, all the registers in the frame
9989 get saved in 32-bits.
9991 So... since when we save all GPRs (except the SP) in 64-bits, the
9992 traditional GP save area will be empty. */
9993 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9994 info_ptr->gp_size = 0;
9996 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9997 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9999 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10000 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10001 - info_ptr->first_altivec_reg_save);
10003 /* Does this function call anything? */
10004 info_ptr->calls_p = (! current_function_is_leaf
10005 || cfun->machine->ra_needs_full_frame);
10007 /* Determine if we need to save the link register. */
10008 if (rs6000_ra_ever_killed ()
10009 || (DEFAULT_ABI == ABI_AIX
10010 && current_function_profile
10011 && !TARGET_PROFILE_KERNEL)
10012 #ifdef TARGET_RELOCATABLE
10013 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10015 || (info_ptr->first_fp_reg_save != 64
10016 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10017 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10018 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10019 || (DEFAULT_ABI == ABI_DARWIN
10021 && current_function_uses_pic_offset_table)
10022 || info_ptr->calls_p)
10024 info_ptr->lr_save_p = 1;
10025 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10028 /* Determine if we need to save the condition code registers. */
10029 if (regs_ever_live[CR2_REGNO]
10030 || regs_ever_live[CR3_REGNO]
10031 || regs_ever_live[CR4_REGNO])
10033 info_ptr->cr_save_p = 1;
10034 if (DEFAULT_ABI == ABI_V4)
10035 info_ptr->cr_size = reg_size;
10038 /* If the current function calls __builtin_eh_return, then we need
10039 to allocate stack space for registers that will hold data for
10040 the exception handler. */
10041 if (current_function_calls_eh_return)
10044 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10047 /* SPE saves EH registers in 64-bits. */
10048 ehrd_size = i * (TARGET_SPE_ABI
10049 && info_ptr->spe_64bit_regs_used != 0
10050 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10055 /* Determine various sizes. */
10056 info_ptr->reg_size = reg_size;
10057 info_ptr->fixed_size = RS6000_SAVE_AREA;
10058 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10059 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10060 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10063 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10064 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10066 info_ptr->spe_gp_size = 0;
10068 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
10070 info_ptr->vrsave_mask = compute_vrsave_mask ();
10071 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
10075 info_ptr->vrsave_mask = 0;
10076 info_ptr->vrsave_size = 0;
10079 /* Calculate the offsets. */
10080 switch (DEFAULT_ABI)
10088 info_ptr->fp_save_offset = - info_ptr->fp_size;
10089 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10091 if (TARGET_ALTIVEC_ABI)
10093 info_ptr->vrsave_save_offset
10094 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10096 /* Align stack so vector save area is on a quadword boundary. */
10097 if (info_ptr->altivec_size != 0)
10098 info_ptr->altivec_padding_size
10099 = 16 - (-info_ptr->vrsave_save_offset % 16);
10101 info_ptr->altivec_padding_size = 0;
10103 info_ptr->altivec_save_offset
10104 = info_ptr->vrsave_save_offset
10105 - info_ptr->altivec_padding_size
10106 - info_ptr->altivec_size;
10108 /* Adjust for AltiVec case. */
10109 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10112 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10113 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10114 info_ptr->lr_save_offset = 2*reg_size;
10118 info_ptr->fp_save_offset = - info_ptr->fp_size;
10119 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10120 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10122 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10124 /* Align stack so SPE GPR save area is aligned on a
10125 double-word boundary. */
10126 if (info_ptr->spe_gp_size != 0)
10127 info_ptr->spe_padding_size
10128 = 8 - (-info_ptr->cr_save_offset % 8);
10130 info_ptr->spe_padding_size = 0;
10132 info_ptr->spe_gp_save_offset
10133 = info_ptr->cr_save_offset
10134 - info_ptr->spe_padding_size
10135 - info_ptr->spe_gp_size;
10137 /* Adjust for SPE case. */
10138 info_ptr->toc_save_offset
10139 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10141 else if (TARGET_ALTIVEC_ABI)
10143 info_ptr->vrsave_save_offset
10144 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10146 /* Align stack so vector save area is on a quadword boundary. */
10147 if (info_ptr->altivec_size != 0)
10148 info_ptr->altivec_padding_size
10149 = 16 - (-info_ptr->vrsave_save_offset % 16);
10151 info_ptr->altivec_padding_size = 0;
10153 info_ptr->altivec_save_offset
10154 = info_ptr->vrsave_save_offset
10155 - info_ptr->altivec_padding_size
10156 - info_ptr->altivec_size;
10158 /* Adjust for AltiVec case. */
10159 info_ptr->toc_save_offset
10160 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10163 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10164 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10165 info_ptr->lr_save_offset = reg_size;
10169 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10170 + info_ptr->gp_size
10171 + info_ptr->altivec_size
10172 + info_ptr->altivec_padding_size
10173 + info_ptr->spe_gp_size
10174 + info_ptr->spe_padding_size
10176 + info_ptr->cr_size
10177 + info_ptr->lr_size
10178 + info_ptr->vrsave_size
10179 + info_ptr->toc_size,
10180 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10183 total_raw_size = (info_ptr->vars_size
10184 + info_ptr->parm_size
10185 + info_ptr->save_size
10186 + info_ptr->varargs_size
10187 + info_ptr->fixed_size);
10189 info_ptr->total_size =
10190 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10192 /* Determine if we need to allocate any stack frame:
10194 For AIX we need to push the stack if a frame pointer is needed
10195 (because the stack might be dynamically adjusted), if we are
10196 debugging, if we make calls, or if the sum of fp_save, gp_save,
10197 and local variables are more than the space needed to save all
10198 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10199 + 18*8 = 288 (GPR13 reserved).
10201 For V.4 we don't have the stack cushion that AIX uses, but assume
10202 that the debugger can handle stackless frames. */
10204 if (info_ptr->calls_p)
10205 info_ptr->push_p = 1;
10207 else if (DEFAULT_ABI == ABI_V4)
10208 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10210 else if (frame_pointer_needed)
10211 info_ptr->push_p = 1;
10213 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10214 info_ptr->push_p = 1;
10218 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10220 /* Zero offsets if we're not saving those registers. */
10221 if (info_ptr->fp_size == 0)
10222 info_ptr->fp_save_offset = 0;
10224 if (info_ptr->gp_size == 0)
10225 info_ptr->gp_save_offset = 0;
10227 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10228 info_ptr->altivec_save_offset = 0;
10230 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10231 info_ptr->vrsave_save_offset = 0;
10233 if (! TARGET_SPE_ABI
10234 || info_ptr->spe_64bit_regs_used == 0
10235 || info_ptr->spe_gp_size == 0)
10236 info_ptr->spe_gp_save_offset = 0;
10238 if (! info_ptr->lr_save_p)
10239 info_ptr->lr_save_offset = 0;
10241 if (! info_ptr->cr_save_p)
10242 info_ptr->cr_save_offset = 0;
10244 if (! info_ptr->toc_save_p)
10245 info_ptr->toc_save_offset = 0;
10250 /* Return true if the current function uses any GPRs in 64-bit SIMD
10254 spe_func_has_64bit_regs_p ()
10258 /* Functions that save and restore all the call-saved registers will
10259 need to save/restore the registers in 64-bits. */
10260 if (current_function_calls_eh_return
10261 || current_function_calls_setjmp
10262 || current_function_has_nonlocal_goto)
10265 insns = get_insns ();
10267 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10273 i = PATTERN (insn);
10274 if (GET_CODE (i) == SET
10275 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10284 debug_stack_info (info)
10285 rs6000_stack_t *info;
10287 const char *abi_string;
10290 info = rs6000_stack_info ();
10292 fprintf (stderr, "\nStack information for function %s:\n",
10293 ((current_function_decl && DECL_NAME (current_function_decl))
10294 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10299 default: abi_string = "Unknown"; break;
10300 case ABI_NONE: abi_string = "NONE"; break;
10301 case ABI_AIX: abi_string = "AIX"; break;
10302 case ABI_DARWIN: abi_string = "Darwin"; break;
10303 case ABI_V4: abi_string = "V.4"; break;
10306 fprintf (stderr, "\tABI = %5s\n", abi_string);
10308 if (TARGET_ALTIVEC_ABI)
10309 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10311 if (TARGET_SPE_ABI)
10312 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10314 if (info->first_gp_reg_save != 32)
10315 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10317 if (info->first_fp_reg_save != 64)
10318 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10320 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10321 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10322 info->first_altivec_reg_save);
10324 if (info->lr_save_p)
10325 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10327 if (info->cr_save_p)
10328 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10330 if (info->toc_save_p)
10331 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10333 if (info->vrsave_mask)
10334 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10337 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10340 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10342 if (info->gp_save_offset)
10343 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10345 if (info->fp_save_offset)
10346 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10348 if (info->altivec_save_offset)
10349 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10350 info->altivec_save_offset);
10352 if (info->spe_gp_save_offset)
10353 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10354 info->spe_gp_save_offset);
10356 if (info->vrsave_save_offset)
10357 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10358 info->vrsave_save_offset);
10360 if (info->lr_save_offset)
10361 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10363 if (info->cr_save_offset)
10364 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10366 if (info->toc_save_offset)
10367 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10369 if (info->varargs_save_offset)
10370 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10372 if (info->total_size)
10373 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10375 if (info->varargs_size)
10376 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10378 if (info->vars_size)
10379 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10381 if (info->parm_size)
10382 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10384 if (info->fixed_size)
10385 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10388 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10390 if (info->spe_gp_size)
10391 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10394 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10396 if (info->altivec_size)
10397 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10399 if (info->vrsave_size)
10400 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10402 if (info->altivec_padding_size)
10403 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10404 info->altivec_padding_size);
10406 if (info->spe_padding_size)
10407 fprintf (stderr, "\tspe_padding_size = %5d\n",
10408 info->spe_padding_size);
10411 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10414 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10416 if (info->toc_size)
10417 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10419 if (info->save_size)
10420 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10422 if (info->reg_size != 4)
10423 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10425 fprintf (stderr, "\n");
10429 rs6000_return_addr (count, frame)
10433 /* Currently we don't optimize very well between prolog and body
10434 code and for PIC code the code can be actually quite bad, so
10435 don't try to be too clever here. */
10436 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10438 cfun->machine->ra_needs_full_frame = 1;
10445 plus_constant (copy_to_reg
10446 (gen_rtx_MEM (Pmode,
10447 memory_address (Pmode, frame))),
10448 RETURN_ADDRESS_OFFSET)));
10451 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10454 /* Say whether a function is a candidate for sibcall handling or not.
10455 We do not allow indirect calls to be optimized into sibling calls.
10456 Also, we can't do it if there are any vector parameters; there's
10457 nowhere to put the VRsave code so it works; note that functions with
10458 vector parameters are required to have a prototype, so the argument
10459 type info must be available here. (The tail recursion case can work
10460 with vector parameters, but there's no way to distinguish here.) */
10462 rs6000_function_ok_for_sibcall (decl, exp)
10464 tree exp ATTRIBUTE_UNUSED;
10469 if (TARGET_ALTIVEC_VRSAVE)
10471 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10472 type; type = TREE_CHAIN (type))
10474 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10478 if (DEFAULT_ABI == ABI_DARWIN
10479 || (*targetm.binds_local_p) (decl))
10481 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10483 if (!lookup_attribute ("longcall", attr_list)
10484 || lookup_attribute ("shortcall", attr_list))
10492 rs6000_ra_ever_killed ()
10498 /* Irritatingly, there are two kinds of thunks -- those created with
10499 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10500 through the regular part of the compiler. This is a very hacky
10501 way to tell them apart. */
10502 if (current_function_is_thunk && !no_new_pseudos)
10505 /* regs_ever_live has LR marked as used if any sibcalls are present,
10506 but this should not force saving and restoring in the
10507 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10508 clobbers LR, so that is inappropriate. */
10510 /* Also, the prologue can generate a store into LR that
10511 doesn't really count, like this:
10514 bcl to set PIC register
10518 When we're called from the epilogue, we need to avoid counting
10519 this as a store. */
10521 push_topmost_sequence ();
10522 top = get_insns ();
10523 pop_topmost_sequence ();
10524 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10526 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10530 if (FIND_REG_INC_NOTE (insn, reg))
10532 else if (GET_CODE (insn) == CALL_INSN
10533 && !SIBLING_CALL_P (insn))
10535 else if (set_of (reg, insn) != NULL_RTX
10536 && !prologue_epilogue_contains (insn))
10543 /* Add a REG_MAYBE_DEAD note to the insn. */
10545 rs6000_maybe_dead (insn)
10548 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10553 /* Emit instructions needed to load the TOC register.
10554 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10555 a constant pool; or for SVR4 -fpic. */
10558 rs6000_emit_load_toc_table (fromprolog)
10562 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10564 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10566 rtx temp = (fromprolog
10567 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10568 : gen_reg_rtx (Pmode));
10569 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10571 rs6000_maybe_dead (insn);
10572 insn = emit_move_insn (dest, temp);
10574 rs6000_maybe_dead (insn);
10576 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10579 rtx tempLR = (fromprolog
10580 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10581 : gen_reg_rtx (Pmode));
10582 rtx temp0 = (fromprolog
10583 ? gen_rtx_REG (Pmode, 0)
10584 : gen_reg_rtx (Pmode));
10587 /* possibly create the toc section */
10588 if (! toc_initialized)
10591 function_section (current_function_decl);
10598 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10599 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10601 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10602 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10604 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10606 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10607 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10614 static int reload_toc_labelno = 0;
10616 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10618 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10619 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10621 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10622 emit_move_insn (dest, tempLR);
10623 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10625 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10627 rs6000_maybe_dead (insn);
10629 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10631 /* This is for AIX code running in non-PIC ELF32. */
10634 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10635 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10637 insn = emit_insn (gen_elf_high (dest, realsym));
10639 rs6000_maybe_dead (insn);
10640 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10642 rs6000_maybe_dead (insn);
10644 else if (DEFAULT_ABI == ABI_AIX)
10647 insn = emit_insn (gen_load_toc_aix_si (dest));
10649 insn = emit_insn (gen_load_toc_aix_di (dest));
10651 rs6000_maybe_dead (insn);
10658 get_TOC_alias_set ()
10660 static int set = -1;
10662 set = new_alias_set ();
10666 /* This returns nonzero if the current function uses the TOC. This is
10667 determined by the presence of (unspec ... UNSPEC_TOC) or
10668 use (unspec ... UNSPEC_TOC), which are generated by the various
10669 load_toc_* patterns. */
10676 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10679 rtx pat = PATTERN (insn);
10682 if (GET_CODE (pat) == PARALLEL)
10683 for (i = 0; i < XVECLEN (pat, 0); i++)
10685 rtx sub = XVECEXP (pat, 0, i);
10686 if (GET_CODE (sub) == USE)
10688 sub = XEXP (sub, 0);
10689 if (GET_CODE (sub) == UNSPEC
10690 && XINT (sub, 1) == UNSPEC_TOC)
10699 create_TOC_reference (symbol)
10702 return gen_rtx_PLUS (Pmode,
10703 gen_rtx_REG (Pmode, TOC_REGISTER),
10704 gen_rtx_CONST (Pmode,
10705 gen_rtx_MINUS (Pmode, symbol,
10706 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10709 /* __throw will restore its own return address to be the same as the
10710 return address of the function that the throw is being made to.
10711 This is unfortunate, because we want to check the original
10712 return address to see if we need to restore the TOC.
10713 So we have to squirrel it away here.
10714 This is used only in compiling __throw and __rethrow.
10716 Most of this code should be removed by CSE. */
10717 static rtx insn_after_throw;
10719 /* This does the saving... */
10721 rs6000_aix_emit_builtin_unwind_init ()
10724 rtx stack_top = gen_reg_rtx (Pmode);
10725 rtx opcode_addr = gen_reg_rtx (Pmode);
10727 insn_after_throw = gen_reg_rtx (SImode);
10729 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10730 emit_move_insn (stack_top, mem);
10732 mem = gen_rtx_MEM (Pmode,
10733 gen_rtx_PLUS (Pmode, stack_top,
10734 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10735 emit_move_insn (opcode_addr, mem);
10736 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10739 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10740 in _eh.o). Only used on AIX.
10742 The idea is that on AIX, function calls look like this:
10743 bl somefunction-trampoline
10747 somefunction-trampoline:
10749 ... load function address in the count register ...
10751 or like this, if the linker determines that this is not a cross-module call
10752 and so the TOC need not be restored:
10755 or like this, if the compiler could determine that this is not a
10758 now, the tricky bit here is that register 2 is saved and restored
10759 by the _linker_, so we can't readily generate debugging information
10760 for it. So we need to go back up the call chain looking at the
10761 insns at return addresses to see which calls saved the TOC register
10762 and so see where it gets restored from.
10764 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10765 just before the actual epilogue.
10767 On the bright side, this incurs no space or time overhead unless an
10768 exception is thrown, except for the extra code in libgcc.a.
10770 The parameter STACKSIZE is a register containing (at runtime)
10771 the amount to be popped off the stack in addition to the stack frame
10772 of this routine (which will be __throw or __rethrow, and so is
10773 guaranteed to have a stack frame). */
10776 rs6000_emit_eh_toc_restore (stacksize)
10780 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10781 rtx tocompare = gen_reg_rtx (SImode);
10782 rtx opcode = gen_reg_rtx (SImode);
10783 rtx opcode_addr = gen_reg_rtx (Pmode);
10785 rtx loop_start = gen_label_rtx ();
10786 rtx no_toc_restore_needed = gen_label_rtx ();
10787 rtx loop_exit = gen_label_rtx ();
10789 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10790 set_mem_alias_set (mem, rs6000_sr_alias_set);
10791 emit_move_insn (bottom_of_stack, mem);
10793 top_of_stack = expand_binop (Pmode, add_optab,
10794 bottom_of_stack, stacksize,
10795 NULL_RTX, 1, OPTAB_WIDEN);
10797 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10798 : 0xE8410028, SImode));
10800 if (insn_after_throw == NULL_RTX)
10802 emit_move_insn (opcode, insn_after_throw);
10804 emit_note (NOTE_INSN_LOOP_BEG);
10805 emit_label (loop_start);
10807 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10808 SImode, NULL_RTX, NULL_RTX,
10809 no_toc_restore_needed);
10811 mem = gen_rtx_MEM (Pmode,
10812 gen_rtx_PLUS (Pmode, bottom_of_stack,
10813 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10814 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10816 emit_label (no_toc_restore_needed);
10817 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10818 Pmode, NULL_RTX, NULL_RTX,
10821 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10822 set_mem_alias_set (mem, rs6000_sr_alias_set);
10823 emit_move_insn (bottom_of_stack, mem);
10825 mem = gen_rtx_MEM (Pmode,
10826 gen_rtx_PLUS (Pmode, bottom_of_stack,
10827 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10828 emit_move_insn (opcode_addr, mem);
10829 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10831 emit_note (NOTE_INSN_LOOP_CONT);
10832 emit_jump (loop_start);
10833 emit_note (NOTE_INSN_LOOP_END);
10834 emit_label (loop_exit);
10837 /* This ties together stack memory (MEM with an alias set of
10838 rs6000_sr_alias_set) and the change to the stack pointer. */
10841 rs6000_emit_stack_tie ()
10843 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10845 set_mem_alias_set (mem, rs6000_sr_alias_set);
10846 emit_insn (gen_stack_tie (mem));
10849 /* Emit the correct code for allocating stack space, as insns.
10850 If COPY_R12, make sure a copy of the old frame is left in r12.
10851 The generated code may use hard register 0 as a temporary. */
10854 rs6000_emit_allocate_stack (size, copy_r12)
10855 HOST_WIDE_INT size;
10859 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10860 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10861 rtx todec = GEN_INT (-size);
10863 if (current_function_limit_stack)
10865 if (REG_P (stack_limit_rtx)
10866 && REGNO (stack_limit_rtx) > 1
10867 && REGNO (stack_limit_rtx) <= 31)
10869 emit_insn (TARGET_32BIT
10870 ? gen_addsi3 (tmp_reg,
10873 : gen_adddi3 (tmp_reg,
10877 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10880 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10882 && DEFAULT_ABI == ABI_V4)
10884 rtx toload = gen_rtx_CONST (VOIDmode,
10885 gen_rtx_PLUS (Pmode,
10889 emit_insn (gen_elf_high (tmp_reg, toload));
10890 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10891 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10895 warning ("stack limit expression is not supported");
10898 if (copy_r12 || ! TARGET_UPDATE)
10899 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10905 /* Need a note here so that try_split doesn't get confused. */
10906 if (get_last_insn() == NULL_RTX)
10907 emit_note (NOTE_INSN_DELETED);
10908 insn = emit_move_insn (tmp_reg, todec);
10909 try_split (PATTERN (insn), insn, 0);
10913 insn = emit_insn (TARGET_32BIT
10914 ? gen_movsi_update (stack_reg, stack_reg,
10916 : gen_movdi_update (stack_reg, stack_reg,
10917 todec, stack_reg));
10921 insn = emit_insn (TARGET_32BIT
10922 ? gen_addsi3 (stack_reg, stack_reg, todec)
10923 : gen_adddi3 (stack_reg, stack_reg, todec));
10924 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10925 gen_rtx_REG (Pmode, 12));
10928 RTX_FRAME_RELATED_P (insn) = 1;
10930 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10931 gen_rtx_SET (VOIDmode, stack_reg,
10932 gen_rtx_PLUS (Pmode, stack_reg,
10937 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10938 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10939 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10940 deduce these equivalences by itself so it wasn't necessary to hold
10941 its hand so much. */
10944 rs6000_frame_related (insn, reg, val, reg2, rreg)
10953 /* copy_rtx will not make unique copies of registers, so we need to
10954 ensure we don't have unwanted sharing here. */
10956 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10959 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10961 real = copy_rtx (PATTERN (insn));
10963 if (reg2 != NULL_RTX)
10964 real = replace_rtx (real, reg2, rreg);
10966 real = replace_rtx (real, reg,
10967 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10968 STACK_POINTER_REGNUM),
10971 /* We expect that 'real' is either a SET or a PARALLEL containing
10972 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10973 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10975 if (GET_CODE (real) == SET)
10979 temp = simplify_rtx (SET_SRC (set));
10981 SET_SRC (set) = temp;
10982 temp = simplify_rtx (SET_DEST (set));
10984 SET_DEST (set) = temp;
10985 if (GET_CODE (SET_DEST (set)) == MEM)
10987 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10989 XEXP (SET_DEST (set), 0) = temp;
10992 else if (GET_CODE (real) == PARALLEL)
10995 for (i = 0; i < XVECLEN (real, 0); i++)
10996 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10998 rtx set = XVECEXP (real, 0, i);
11000 temp = simplify_rtx (SET_SRC (set));
11002 SET_SRC (set) = temp;
11003 temp = simplify_rtx (SET_DEST (set));
11005 SET_DEST (set) = temp;
11006 if (GET_CODE (SET_DEST (set)) == MEM)
11008 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11010 XEXP (SET_DEST (set), 0) = temp;
11012 RTX_FRAME_RELATED_P (set) = 1;
11019 real = spe_synthesize_frame_save (real);
11021 RTX_FRAME_RELATED_P (insn) = 1;
11022 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11027 /* Given an SPE frame note, return a PARALLEL of SETs with the
11028 original note, plus a synthetic register save. */
11031 spe_synthesize_frame_save (real)
11034 rtx synth, offset, reg, real2;
11036 if (GET_CODE (real) != SET
11037 || GET_MODE (SET_SRC (real)) != V2SImode)
11040 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11041 frame related note. The parallel contains a set of the register
11042 being saved, and another set to a synthetic register (n+1200).
11043 This is so we can differentiate between 64-bit and 32-bit saves.
11044 Words cannot describe this nastiness. */
11046 if (GET_CODE (SET_DEST (real)) != MEM
11047 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11048 || GET_CODE (SET_SRC (real)) != REG)
11052 (set (mem (plus (reg x) (const y)))
11055 (set (mem (plus (reg x) (const y+4)))
11059 real2 = copy_rtx (real);
11060 PUT_MODE (SET_DEST (real2), SImode);
11061 reg = SET_SRC (real2);
11062 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11063 synth = copy_rtx (real2);
11065 if (BYTES_BIG_ENDIAN)
11067 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11068 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11071 reg = SET_SRC (synth);
11073 synth = replace_rtx (synth, reg,
11074 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11076 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11077 synth = replace_rtx (synth, offset,
11078 GEN_INT (INTVAL (offset)
11079 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11081 RTX_FRAME_RELATED_P (synth) = 1;
11082 RTX_FRAME_RELATED_P (real2) = 1;
11083 if (BYTES_BIG_ENDIAN)
11084 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11086 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11091 /* Returns an insn that has a vrsave set operation with the
11092 appropriate CLOBBERs. */
11095 generate_set_vrsave (reg, info, epiloguep)
11097 rs6000_stack_t *info;
11101 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11102 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11105 = gen_rtx_SET (VOIDmode,
11107 gen_rtx_UNSPEC_VOLATILE (SImode,
11108 gen_rtvec (2, reg, vrsave),
11113 /* We need to clobber the registers in the mask so the scheduler
11114 does not move sets to VRSAVE before sets of AltiVec registers.
11116 However, if the function receives nonlocal gotos, reload will set
11117 all call saved registers live. We will end up with:
11119 (set (reg 999) (mem))
11120 (parallel [ (set (reg vrsave) (unspec blah))
11121 (clobber (reg 999))])
11123 The clobber will cause the store into reg 999 to be dead, and
11124 flow will attempt to delete an epilogue insn. In this case, we
11125 need an unspec use/set of the register. */
11127 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11128 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11130 if (!epiloguep || call_used_regs [i])
11131 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11132 gen_rtx_REG (V4SImode, i));
11135 rtx reg = gen_rtx_REG (V4SImode, i);
11138 = gen_rtx_SET (VOIDmode,
11140 gen_rtx_UNSPEC (V4SImode,
11141 gen_rtvec (1, reg), 27));
11145 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11147 for (i = 0; i < nclobs; ++i)
11148 XVECEXP (insn, 0, i) = clobs[i];
11153 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11154 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11157 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
11160 enum machine_mode mode;
11161 unsigned int regno;
11165 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11166 rtx replacea, replaceb;
11168 int_rtx = GEN_INT (offset);
11170 /* Some cases that need register indexed addressing. */
11171 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11173 && SPE_VECTOR_MODE (mode)
11174 && !SPE_CONST_OFFSET_OK (offset)))
11176 /* Whomever calls us must make sure r11 is available in the
11177 flow path of instructions in the prologue. */
11178 offset_rtx = gen_rtx_REG (Pmode, 11);
11179 emit_move_insn (offset_rtx, int_rtx);
11181 replacea = offset_rtx;
11182 replaceb = int_rtx;
11186 offset_rtx = int_rtx;
11187 replacea = NULL_RTX;
11188 replaceb = NULL_RTX;
11191 reg = gen_rtx_REG (mode, regno);
11192 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11193 mem = gen_rtx_MEM (mode, addr);
11194 set_mem_alias_set (mem, rs6000_sr_alias_set);
11196 insn = emit_move_insn (mem, reg);
11198 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11201 /* Emit an offset memory reference suitable for a frame store, while
11202 converting to a valid addressing mode. */
11205 gen_frame_mem_offset (mode, reg, offset)
11206 enum machine_mode mode;
11210 rtx int_rtx, offset_rtx;
11212 int_rtx = GEN_INT (offset);
11214 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11216 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11217 emit_move_insn (offset_rtx, int_rtx);
11220 offset_rtx = int_rtx;
11222 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11225 /* Emit function prologue as insns. */
11228 rs6000_emit_prologue ()
11230 rs6000_stack_t *info = rs6000_stack_info ();
11231 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11232 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11233 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11234 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11235 rtx frame_reg_rtx = sp_reg_rtx;
11236 rtx cr_save_rtx = NULL;
11238 int saving_FPRs_inline;
11239 int using_store_multiple;
11240 HOST_WIDE_INT sp_offset = 0;
11242 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11244 reg_mode = V2SImode;
11248 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11249 && (!TARGET_SPE_ABI
11250 || info->spe_64bit_regs_used == 0)
11251 && info->first_gp_reg_save < 31);
11252 saving_FPRs_inline = (info->first_fp_reg_save == 64
11253 || FP_SAVE_INLINE (info->first_fp_reg_save));
11255 /* For V.4, update stack before we do any saving and set back pointer. */
11256 if (info->push_p && DEFAULT_ABI == ABI_V4)
11258 if (info->total_size < 32767)
11259 sp_offset = info->total_size;
11261 frame_reg_rtx = frame_ptr_rtx;
11262 rs6000_emit_allocate_stack (info->total_size,
11263 (frame_reg_rtx != sp_reg_rtx
11264 && (info->cr_save_p
11266 || info->first_fp_reg_save < 64
11267 || info->first_gp_reg_save < 32
11269 if (frame_reg_rtx != sp_reg_rtx)
11270 rs6000_emit_stack_tie ();
11273 /* Save AltiVec registers if needed. */
11274 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11278 /* There should be a non inline version of this, for when we
11279 are saving lots of vector registers. */
11280 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11281 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11283 rtx areg, savereg, mem;
11286 offset = info->altivec_save_offset + sp_offset
11287 + 16 * (i - info->first_altivec_reg_save);
11289 savereg = gen_rtx_REG (V4SImode, i);
11291 areg = gen_rtx_REG (Pmode, 0);
11292 emit_move_insn (areg, GEN_INT (offset));
11294 /* AltiVec addressing mode is [reg+reg]. */
11295 mem = gen_rtx_MEM (V4SImode,
11296 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11298 set_mem_alias_set (mem, rs6000_sr_alias_set);
11300 insn = emit_move_insn (mem, savereg);
11302 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11303 areg, GEN_INT (offset));
11307 /* VRSAVE is a bit vector representing which AltiVec registers
11308 are used. The OS uses this to determine which vector
11309 registers to save on a context switch. We need to save
11310 VRSAVE on the stack frame, add whatever AltiVec registers we
11311 used in this function, and do the corresponding magic in the
11314 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11316 rtx reg, mem, vrsave;
11319 /* Get VRSAVE onto a GPR. */
11320 reg = gen_rtx_REG (SImode, 12);
11321 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11323 emit_insn (gen_get_vrsave_internal (reg));
11325 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11328 offset = info->vrsave_save_offset + sp_offset;
11330 = gen_rtx_MEM (SImode,
11331 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11332 set_mem_alias_set (mem, rs6000_sr_alias_set);
11333 insn = emit_move_insn (mem, reg);
11335 /* Include the registers in the mask. */
11336 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11338 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11341 /* If we use the link register, get it into r0. */
11342 if (info->lr_save_p)
11343 emit_move_insn (gen_rtx_REG (Pmode, 0),
11344 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11346 /* If we need to save CR, put it into r12. */
11347 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11349 cr_save_rtx = gen_rtx_REG (SImode, 12);
11350 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11353 /* Do any required saving of fpr's. If only one or two to save, do
11354 it ourselves. Otherwise, call function. */
11355 if (saving_FPRs_inline)
11358 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11359 if ((regs_ever_live[info->first_fp_reg_save+i]
11360 && ! call_used_regs[info->first_fp_reg_save+i]))
11361 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11362 info->first_fp_reg_save + i,
11363 info->fp_save_offset + sp_offset + 8 * i,
11366 else if (info->first_fp_reg_save != 64)
11370 const char *alloc_rname;
11372 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11374 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11375 gen_rtx_REG (Pmode,
11376 LINK_REGISTER_REGNUM));
11377 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11378 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11379 alloc_rname = ggc_strdup (rname);
11380 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11381 gen_rtx_SYMBOL_REF (Pmode,
11383 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11385 rtx addr, reg, mem;
11386 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11387 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11388 GEN_INT (info->fp_save_offset
11389 + sp_offset + 8*i));
11390 mem = gen_rtx_MEM (DFmode, addr);
11391 set_mem_alias_set (mem, rs6000_sr_alias_set);
11393 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11395 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11396 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11397 NULL_RTX, NULL_RTX);
11400 /* Save GPRs. This is done as a PARALLEL if we are using
11401 the store-multiple instructions. */
11402 if (using_store_multiple)
11406 p = rtvec_alloc (32 - info->first_gp_reg_save);
11407 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11409 rtx addr, reg, mem;
11410 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11411 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11412 GEN_INT (info->gp_save_offset
11415 mem = gen_rtx_MEM (reg_mode, addr);
11416 set_mem_alias_set (mem, rs6000_sr_alias_set);
11418 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11420 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11421 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11422 NULL_RTX, NULL_RTX);
11427 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11428 if ((regs_ever_live[info->first_gp_reg_save+i]
11429 && ! call_used_regs[info->first_gp_reg_save+i])
11430 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11431 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11432 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11434 rtx addr, reg, mem;
11435 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11437 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11439 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11442 if (!SPE_CONST_OFFSET_OK (offset))
11444 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11445 emit_move_insn (b, GEN_INT (offset));
11448 b = GEN_INT (offset);
11450 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11451 mem = gen_rtx_MEM (V2SImode, addr);
11452 set_mem_alias_set (mem, rs6000_sr_alias_set);
11453 insn = emit_move_insn (mem, reg);
11455 if (GET_CODE (b) == CONST_INT)
11456 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11457 NULL_RTX, NULL_RTX);
11459 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11460 b, GEN_INT (offset));
11464 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11465 GEN_INT (info->gp_save_offset
11468 mem = gen_rtx_MEM (reg_mode, addr);
11469 set_mem_alias_set (mem, rs6000_sr_alias_set);
11471 insn = emit_move_insn (mem, reg);
11472 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11473 NULL_RTX, NULL_RTX);
11478 /* ??? There's no need to emit actual instructions here, but it's the
11479 easiest way to get the frame unwind information emitted. */
11480 if (current_function_calls_eh_return)
11482 unsigned int i, regno;
11486 regno = EH_RETURN_DATA_REGNO (i);
11487 if (regno == INVALID_REGNUM)
11490 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11491 info->ehrd_offset + sp_offset
11492 + reg_size * (int) i,
11497 /* Save lr if we used it. */
11498 if (info->lr_save_p)
11500 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11501 GEN_INT (info->lr_save_offset + sp_offset));
11502 rtx reg = gen_rtx_REG (Pmode, 0);
11503 rtx mem = gen_rtx_MEM (Pmode, addr);
11504 /* This should not be of rs6000_sr_alias_set, because of
11505 __builtin_return_address. */
11507 insn = emit_move_insn (mem, reg);
11508 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11509 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11512 /* Save CR if we use any that must be preserved. */
11513 if (info->cr_save_p)
11515 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11516 GEN_INT (info->cr_save_offset + sp_offset));
11517 rtx mem = gen_rtx_MEM (SImode, addr);
11519 set_mem_alias_set (mem, rs6000_sr_alias_set);
11521 /* If r12 was used to hold the original sp, copy cr into r0 now
11523 if (REGNO (frame_reg_rtx) == 12)
11525 cr_save_rtx = gen_rtx_REG (SImode, 0);
11526 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11528 insn = emit_move_insn (mem, cr_save_rtx);
11530 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11531 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11532 But that's OK. All we have to do is specify that _one_ condition
11533 code register is saved in this stack slot. The thrower's epilogue
11534 will then restore all the call-saved registers.
11535 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11536 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11537 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11540 /* Update stack and set back pointer unless this is V.4,
11541 for which it was done previously. */
11542 if (info->push_p && DEFAULT_ABI != ABI_V4)
11543 rs6000_emit_allocate_stack (info->total_size, FALSE);
11545 /* Set frame pointer, if needed. */
11546 if (frame_pointer_needed)
11548 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11550 RTX_FRAME_RELATED_P (insn) = 1;
11553 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11554 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11555 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11556 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11558 /* If emit_load_toc_table will use the link register, we need to save
11559 it. We use R12 for this purpose because emit_load_toc_table
11560 can use register 0. This allows us to use a plain 'blr' to return
11561 from the procedure more often. */
11562 int save_LR_around_toc_setup = (TARGET_ELF
11563 && DEFAULT_ABI != ABI_AIX
11565 && ! info->lr_save_p
11566 && EXIT_BLOCK_PTR->pred != NULL);
11567 if (save_LR_around_toc_setup)
11569 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11570 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11571 rs6000_emit_load_toc_table (TRUE);
11572 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11575 rs6000_emit_load_toc_table (TRUE);
11579 if (DEFAULT_ABI == ABI_DARWIN
11580 && flag_pic && current_function_uses_pic_offset_table)
11582 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11583 const char *picbase = machopic_function_base_name ();
11584 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11586 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11588 rs6000_maybe_dead (
11589 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11590 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11595 /* Write function prologue. */
11598 rs6000_output_function_prologue (file, size)
11600 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11602 rs6000_stack_t *info = rs6000_stack_info ();
11604 if (TARGET_DEBUG_STACK)
11605 debug_stack_info (info);
11607 /* Write .extern for any function we will call to save and restore
11609 if (info->first_fp_reg_save < 64
11610 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11611 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11612 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11613 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11614 RESTORE_FP_SUFFIX);
11616 /* Write .extern for AIX common mode routines, if needed. */
11617 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11619 fputs ("\t.extern __mulh\n", file);
11620 fputs ("\t.extern __mull\n", file);
11621 fputs ("\t.extern __divss\n", file);
11622 fputs ("\t.extern __divus\n", file);
11623 fputs ("\t.extern __quoss\n", file);
11624 fputs ("\t.extern __quous\n", file);
11625 common_mode_defined = 1;
11628 if (! HAVE_prologue)
11632 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11633 the "toplevel" insn chain. */
11634 emit_note (NOTE_INSN_DELETED);
11635 rs6000_emit_prologue ();
11636 emit_note (NOTE_INSN_DELETED);
11638 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11642 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11644 INSN_ADDRESSES_NEW (insn, addr);
11649 if (TARGET_DEBUG_STACK)
11650 debug_rtx_list (get_insns (), 100);
11651 final (get_insns (), file, FALSE, FALSE);
11655 rs6000_pic_labelno++;
11658 /* Emit function epilogue as insns.
11660 At present, dwarf2out_frame_debug_expr doesn't understand
11661 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11662 anywhere in the epilogue. Most of the insns below would in any case
11663 need special notes to explain where r11 is in relation to the stack. */
11666 rs6000_emit_epilogue (sibcall)
11669 rs6000_stack_t *info;
11670 int restoring_FPRs_inline;
11671 int using_load_multiple;
11672 int using_mfcr_multiple;
11673 int use_backchain_to_restore_sp;
11675 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11676 rtx frame_reg_rtx = sp_reg_rtx;
11677 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11678 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11681 info = rs6000_stack_info ();
11683 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11685 reg_mode = V2SImode;
11689 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11690 && (!TARGET_SPE_ABI
11691 || info->spe_64bit_regs_used == 0)
11692 && info->first_gp_reg_save < 31);
11693 restoring_FPRs_inline = (sibcall
11694 || current_function_calls_eh_return
11695 || info->first_fp_reg_save == 64
11696 || FP_SAVE_INLINE (info->first_fp_reg_save));
11697 use_backchain_to_restore_sp = (frame_pointer_needed
11698 || current_function_calls_alloca
11699 || info->total_size > 32767);
11700 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11701 || rs6000_cpu == PROCESSOR_PPC603
11702 || rs6000_cpu == PROCESSOR_PPC750
11705 /* If we have a frame pointer, a call to alloca, or a large stack
11706 frame, restore the old stack pointer using the backchain. Otherwise,
11707 we know what size to update it with. */
11708 if (use_backchain_to_restore_sp)
11710 /* Under V.4, don't reset the stack pointer until after we're done
11711 loading the saved registers. */
11712 if (DEFAULT_ABI == ABI_V4)
11713 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11715 emit_move_insn (frame_reg_rtx,
11716 gen_rtx_MEM (Pmode, sp_reg_rtx));
11719 else if (info->push_p)
11721 if (DEFAULT_ABI == ABI_V4)
11722 sp_offset = info->total_size;
11725 emit_insn (TARGET_32BIT
11726 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11727 GEN_INT (info->total_size))
11728 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11729 GEN_INT (info->total_size)));
11733 /* Restore AltiVec registers if needed. */
11734 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11738 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11739 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11741 rtx addr, areg, mem;
11743 areg = gen_rtx_REG (Pmode, 0);
11745 (areg, GEN_INT (info->altivec_save_offset
11747 + 16 * (i - info->first_altivec_reg_save)));
11749 /* AltiVec addressing mode is [reg+reg]. */
11750 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11751 mem = gen_rtx_MEM (V4SImode, addr);
11752 set_mem_alias_set (mem, rs6000_sr_alias_set);
11754 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11758 /* Restore VRSAVE if needed. */
11759 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11761 rtx addr, mem, reg;
11763 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11764 GEN_INT (info->vrsave_save_offset + sp_offset));
11765 mem = gen_rtx_MEM (SImode, addr);
11766 set_mem_alias_set (mem, rs6000_sr_alias_set);
11767 reg = gen_rtx_REG (SImode, 12);
11768 emit_move_insn (reg, mem);
11770 emit_insn (generate_set_vrsave (reg, info, 1));
11773 /* Get the old lr if we saved it. */
11774 if (info->lr_save_p)
11776 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11777 info->lr_save_offset + sp_offset);
11779 set_mem_alias_set (mem, rs6000_sr_alias_set);
11781 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11784 /* Get the old cr if we saved it. */
11785 if (info->cr_save_p)
11787 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11788 GEN_INT (info->cr_save_offset + sp_offset));
11789 rtx mem = gen_rtx_MEM (SImode, addr);
11791 set_mem_alias_set (mem, rs6000_sr_alias_set);
11793 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11796 /* Set LR here to try to overlap restores below. */
11797 if (info->lr_save_p)
11798 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11799 gen_rtx_REG (Pmode, 0));
11801 /* Load exception handler data registers, if needed. */
11802 if (current_function_calls_eh_return)
11804 unsigned int i, regno;
11810 regno = EH_RETURN_DATA_REGNO (i);
11811 if (regno == INVALID_REGNUM)
11814 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11815 info->ehrd_offset + sp_offset
11816 + reg_size * (int) i);
11817 set_mem_alias_set (mem, rs6000_sr_alias_set);
11819 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11823 /* Restore GPRs. This is done as a PARALLEL if we are using
11824 the load-multiple instructions. */
11825 if (using_load_multiple)
11828 p = rtvec_alloc (32 - info->first_gp_reg_save);
11829 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11831 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11832 GEN_INT (info->gp_save_offset
11835 rtx mem = gen_rtx_MEM (reg_mode, addr);
11837 set_mem_alias_set (mem, rs6000_sr_alias_set);
11840 gen_rtx_SET (VOIDmode,
11841 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11844 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11847 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11848 if ((regs_ever_live[info->first_gp_reg_save+i]
11849 && ! call_used_regs[info->first_gp_reg_save+i])
11850 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11851 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11852 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11854 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11855 GEN_INT (info->gp_save_offset
11858 rtx mem = gen_rtx_MEM (reg_mode, addr);
11860 /* Restore 64-bit quantities for SPE. */
11861 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11863 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11866 if (!SPE_CONST_OFFSET_OK (offset))
11868 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11869 emit_move_insn (b, GEN_INT (offset));
11872 b = GEN_INT (offset);
11874 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11875 mem = gen_rtx_MEM (V2SImode, addr);
11878 set_mem_alias_set (mem, rs6000_sr_alias_set);
11880 emit_move_insn (gen_rtx_REG (reg_mode,
11881 info->first_gp_reg_save + i), mem);
11884 /* Restore fpr's if we need to do it without calling a function. */
11885 if (restoring_FPRs_inline)
11886 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11887 if ((regs_ever_live[info->first_fp_reg_save+i]
11888 && ! call_used_regs[info->first_fp_reg_save+i]))
11891 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11892 GEN_INT (info->fp_save_offset
11895 mem = gen_rtx_MEM (DFmode, addr);
11896 set_mem_alias_set (mem, rs6000_sr_alias_set);
11898 emit_move_insn (gen_rtx_REG (DFmode,
11899 info->first_fp_reg_save + i),
11903 /* If we saved cr, restore it here. Just those that were used. */
11904 if (info->cr_save_p)
11906 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11909 if (using_mfcr_multiple)
11911 for (i = 0; i < 8; i++)
11912 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11918 if (using_mfcr_multiple && count > 1)
11923 p = rtvec_alloc (count);
11926 for (i = 0; i < 8; i++)
11927 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11929 rtvec r = rtvec_alloc (2);
11930 RTVEC_ELT (r, 0) = r12_rtx;
11931 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11932 RTVEC_ELT (p, ndx) =
11933 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11934 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11937 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11942 for (i = 0; i < 8; i++)
11943 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11945 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11951 /* If this is V.4, unwind the stack pointer after all of the loads
11952 have been done. We need to emit a block here so that sched
11953 doesn't decide to move the sp change before the register restores
11954 (which may not have any obvious dependency on the stack). This
11955 doesn't hurt performance, because there is no scheduling that can
11956 be done after this point. */
11957 if (DEFAULT_ABI == ABI_V4)
11959 if (frame_reg_rtx != sp_reg_rtx)
11960 rs6000_emit_stack_tie ();
11962 if (use_backchain_to_restore_sp)
11964 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11966 else if (sp_offset != 0)
11968 emit_insn (TARGET_32BIT
11969 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11970 GEN_INT (sp_offset))
11971 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11972 GEN_INT (sp_offset)));
11976 if (current_function_calls_eh_return)
11978 rtx sa = EH_RETURN_STACKADJ_RTX;
11979 emit_insn (TARGET_32BIT
11980 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11981 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11987 if (! restoring_FPRs_inline)
11988 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11990 p = rtvec_alloc (2);
11992 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11993 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11994 gen_rtx_REG (Pmode,
11995 LINK_REGISTER_REGNUM));
11997 /* If we have to restore more than two FP registers, branch to the
11998 restore function. It will return to our caller. */
11999 if (! restoring_FPRs_inline)
12003 const char *alloc_rname;
12005 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12006 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12007 alloc_rname = ggc_strdup (rname);
12008 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12009 gen_rtx_SYMBOL_REF (Pmode,
12012 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12015 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12016 GEN_INT (info->fp_save_offset + 8*i));
12017 mem = gen_rtx_MEM (DFmode, addr);
12018 set_mem_alias_set (mem, rs6000_sr_alias_set);
12020 RTVEC_ELT (p, i+3) =
12021 gen_rtx_SET (VOIDmode,
12022 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12027 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12031 /* Write function epilogue. */
12034 rs6000_output_function_epilogue (file, size)
12036 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
12038 rs6000_stack_t *info = rs6000_stack_info ();
12040 if (! HAVE_epilogue)
12042 rtx insn = get_last_insn ();
12043 /* If the last insn was a BARRIER, we don't have to write anything except
12044 the trace table. */
12045 if (GET_CODE (insn) == NOTE)
12046 insn = prev_nonnote_insn (insn);
12047 if (insn == 0 || GET_CODE (insn) != BARRIER)
12049 /* This is slightly ugly, but at least we don't have two
12050 copies of the epilogue-emitting code. */
12053 /* A NOTE_INSN_DELETED is supposed to be at the start
12054 and end of the "toplevel" insn chain. */
12055 emit_note (NOTE_INSN_DELETED);
12056 rs6000_emit_epilogue (FALSE);
12057 emit_note (NOTE_INSN_DELETED);
12059 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12063 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12065 INSN_ADDRESSES_NEW (insn, addr);
12070 if (TARGET_DEBUG_STACK)
12071 debug_rtx_list (get_insns (), 100);
12072 final (get_insns (), file, FALSE, FALSE);
12077 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12080 We don't output a traceback table if -finhibit-size-directive was
12081 used. The documentation for -finhibit-size-directive reads
12082 ``don't output a @code{.size} assembler directive, or anything
12083 else that would cause trouble if the function is split in the
12084 middle, and the two halves are placed at locations far apart in
12085 memory.'' The traceback table has this property, since it
12086 includes the offset from the start of the function to the
12087 traceback table itself.
12089 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12090 different traceback table. */
12091 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12092 && rs6000_traceback != traceback_none)
12094 const char *fname = NULL;
12095 const char *language_string = lang_hooks.name;
12096 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12098 int optional_tbtab;
12100 if (rs6000_traceback == traceback_full)
12101 optional_tbtab = 1;
12102 else if (rs6000_traceback == traceback_part)
12103 optional_tbtab = 0;
12105 optional_tbtab = !optimize_size && !TARGET_ELF;
12107 if (optional_tbtab)
12109 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12110 while (*fname == '.') /* V.4 encodes . in the name */
12113 /* Need label immediately before tbtab, so we can compute
12114 its offset from the function start. */
12115 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12116 ASM_OUTPUT_LABEL (file, fname);
12119 /* The .tbtab pseudo-op can only be used for the first eight
12120 expressions, since it can't handle the possibly variable
12121 length fields that follow. However, if you omit the optional
12122 fields, the assembler outputs zeros for all optional fields
12123 anyways, giving each variable length field is minimum length
12124 (as defined in sys/debug.h). Thus we can not use the .tbtab
12125 pseudo-op at all. */
12127 /* An all-zero word flags the start of the tbtab, for debuggers
12128 that have to find it by searching forward from the entry
12129 point or from the current pc. */
12130 fputs ("\t.long 0\n", file);
12132 /* Tbtab format type. Use format type 0. */
12133 fputs ("\t.byte 0,", file);
12135 /* Language type. Unfortunately, there doesn't seem to be any
12136 official way to get this info, so we use language_string. C
12137 is 0. C++ is 9. No number defined for Obj-C, so use the
12138 value for C for now. There is no official value for Java,
12139 although IBM appears to be using 13. There is no official value
12140 for Chill, so we've chosen 44 pseudo-randomly. */
12141 if (! strcmp (language_string, "GNU C")
12142 || ! strcmp (language_string, "GNU Objective-C"))
12144 else if (! strcmp (language_string, "GNU F77"))
12146 else if (! strcmp (language_string, "GNU Ada"))
12148 else if (! strcmp (language_string, "GNU Pascal"))
12150 else if (! strcmp (language_string, "GNU C++"))
12152 else if (! strcmp (language_string, "GNU Java"))
12154 else if (! strcmp (language_string, "GNU CHILL"))
12158 fprintf (file, "%d,", i);
12160 /* 8 single bit fields: global linkage (not set for C extern linkage,
12161 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12162 from start of procedure stored in tbtab, internal function, function
12163 has controlled storage, function has no toc, function uses fp,
12164 function logs/aborts fp operations. */
12165 /* Assume that fp operations are used if any fp reg must be saved. */
12166 fprintf (file, "%d,",
12167 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12169 /* 6 bitfields: function is interrupt handler, name present in
12170 proc table, function calls alloca, on condition directives
12171 (controls stack walks, 3 bits), saves condition reg, saves
12173 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12174 set up as a frame pointer, even when there is no alloca call. */
12175 fprintf (file, "%d,",
12176 ((optional_tbtab << 6)
12177 | ((optional_tbtab & frame_pointer_needed) << 5)
12178 | (info->cr_save_p << 1)
12179 | (info->lr_save_p)));
12181 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12183 fprintf (file, "%d,",
12184 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12186 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12187 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12189 if (optional_tbtab)
12191 /* Compute the parameter info from the function decl argument
12194 int next_parm_info_bit = 31;
12196 for (decl = DECL_ARGUMENTS (current_function_decl);
12197 decl; decl = TREE_CHAIN (decl))
12199 rtx parameter = DECL_INCOMING_RTL (decl);
12200 enum machine_mode mode = GET_MODE (parameter);
12202 if (GET_CODE (parameter) == REG)
12204 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12210 if (mode == SFmode)
12212 else if (mode == DFmode || mode == TFmode)
12217 /* If only one bit will fit, don't or in this entry. */
12218 if (next_parm_info_bit > 0)
12219 parm_info |= (bits << (next_parm_info_bit - 1));
12220 next_parm_info_bit -= 2;
12224 fixed_parms += ((GET_MODE_SIZE (mode)
12225 + (UNITS_PER_WORD - 1))
12227 next_parm_info_bit -= 1;
12233 /* Number of fixed point parameters. */
12234 /* This is actually the number of words of fixed point parameters; thus
12235 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12236 fprintf (file, "%d,", fixed_parms);
12238 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12240 /* This is actually the number of fp registers that hold parameters;
12241 and thus the maximum value is 13. */
12242 /* Set parameters on stack bit if parameters are not in their original
12243 registers, regardless of whether they are on the stack? Xlc
12244 seems to set the bit when not optimizing. */
12245 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12247 if (! optional_tbtab)
12250 /* Optional fields follow. Some are variable length. */
12252 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12253 11 double float. */
12254 /* There is an entry for each parameter in a register, in the order that
12255 they occur in the parameter list. Any intervening arguments on the
12256 stack are ignored. If the list overflows a long (max possible length
12257 34 bits) then completely leave off all elements that don't fit. */
12258 /* Only emit this long if there was at least one parameter. */
12259 if (fixed_parms || float_parms)
12260 fprintf (file, "\t.long %d\n", parm_info);
12262 /* Offset from start of code to tb table. */
12263 fputs ("\t.long ", file);
12264 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12266 RS6000_OUTPUT_BASENAME (file, fname);
12268 assemble_name (file, fname);
12270 fputs ("-.", file);
12272 RS6000_OUTPUT_BASENAME (file, fname);
12274 assemble_name (file, fname);
12278 /* Interrupt handler mask. */
12279 /* Omit this long, since we never set the interrupt handler bit
12282 /* Number of CTL (controlled storage) anchors. */
12283 /* Omit this long, since the has_ctl bit is never set above. */
12285 /* Displacement into stack of each CTL anchor. */
12286 /* Omit this list of longs, because there are no CTL anchors. */
12288 /* Length of function name. */
12291 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12293 /* Function name. */
12294 assemble_string (fname, strlen (fname));
12296 /* Register for alloca automatic storage; this is always reg 31.
12297 Only emit this if the alloca bit was set above. */
12298 if (frame_pointer_needed)
12299 fputs ("\t.byte 31\n", file);
12301 fputs ("\t.align 2\n", file);
12305 /* A C compound statement that outputs the assembler code for a thunk
12306 function, used to implement C++ virtual function calls with
12307 multiple inheritance. The thunk acts as a wrapper around a virtual
12308 function, adjusting the implicit object parameter before handing
12309 control off to the real function.
12311 First, emit code to add the integer DELTA to the location that
12312 contains the incoming first argument. Assume that this argument
12313 contains a pointer, and is the one used to pass the `this' pointer
12314 in C++. This is the incoming argument *before* the function
12315 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12316 values of all other incoming arguments.
12318 After the addition, emit code to jump to FUNCTION, which is a
12319 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12320 not touch the return address. Hence returning from FUNCTION will
12321 return to whoever called the current `thunk'.
12323 The effect must be as if FUNCTION had been called directly with the
12324 adjusted first argument. This macro is responsible for emitting
12325 all of the code for a thunk function; output_function_prologue()
12326 and output_function_epilogue() are not invoked.
12328 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12329 been extracted from it.) It might possibly be useful on some
12330 targets, but probably not.
12332 If you do not define this macro, the target-independent code in the
12333 C++ frontend will generate a less efficient heavyweight thunk that
12334 calls FUNCTION instead of jumping to it. The generic approach does
12335 not support varargs. */
12338 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
12340 tree thunk_fndecl ATTRIBUTE_UNUSED;
12341 HOST_WIDE_INT delta;
12342 HOST_WIDE_INT vcall_offset;
12345 rtx this, insn, funexp;
12347 reload_completed = 1;
12348 epilogue_completed = 1;
12349 no_new_pseudos = 1;
12351 /* Mark the end of the (empty) prologue. */
12352 emit_note (NOTE_INSN_PROLOGUE_END);
12354 /* Find the "this" pointer. If the function returns a structure,
12355 the structure return pointer is in r3. */
12356 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
12357 this = gen_rtx_REG (Pmode, 4);
12359 this = gen_rtx_REG (Pmode, 3);
12361 /* Apply the constant offset, if required. */
12364 rtx delta_rtx = GEN_INT (delta);
12365 emit_insn (TARGET_32BIT
12366 ? gen_addsi3 (this, this, delta_rtx)
12367 : gen_adddi3 (this, this, delta_rtx));
12370 /* Apply the offset from the vtable, if required. */
12373 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12374 rtx tmp = gen_rtx_REG (Pmode, 12);
12376 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12377 emit_insn (TARGET_32BIT
12378 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12379 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12380 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12381 emit_insn (TARGET_32BIT
12382 ? gen_addsi3 (this, this, tmp)
12383 : gen_adddi3 (this, this, tmp));
12386 /* Generate a tail call to the target function. */
12387 if (!TREE_USED (function))
12389 assemble_external (function);
12390 TREE_USED (function) = 1;
12392 funexp = XEXP (DECL_RTL (function), 0);
12393 SYMBOL_REF_FLAGS (funexp) &= ~SYMBOL_FLAG_LOCAL;
12394 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12397 if (MACHOPIC_INDIRECT)
12398 funexp = machopic_indirect_call_target (funexp);
12401 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12402 generate sibcall RTL explicitly to avoid constraint abort. */
12403 insn = emit_call_insn (
12404 gen_rtx_PARALLEL (VOIDmode,
12406 gen_rtx_CALL (VOIDmode,
12407 funexp, const0_rtx),
12408 gen_rtx_USE (VOIDmode, const0_rtx),
12409 gen_rtx_USE (VOIDmode,
12410 gen_rtx_REG (SImode,
12411 LINK_REGISTER_REGNUM)),
12412 gen_rtx_RETURN (VOIDmode))));
12413 SIBLING_CALL_P (insn) = 1;
12416 /* Run just enough of rest_of_compilation to get the insns emitted.
12417 There's not really enough bulk here to make other passes such as
12418 instruction scheduling worth while. Note that use_thunk calls
12419 assemble_start_function and assemble_end_function. */
12420 insn = get_insns ();
12421 insn_locators_initialize ();
12422 shorten_branches (insn);
12423 final_start_function (insn, file, 1);
12424 final (insn, file, 1, 0);
12425 final_end_function ();
12427 reload_completed = 0;
12428 epilogue_completed = 0;
12429 no_new_pseudos = 0;
12432 /* A quick summary of the various types of 'constant-pool tables'
12435 Target Flags Name One table per
12436 AIX (none) AIX TOC object file
12437 AIX -mfull-toc AIX TOC object file
12438 AIX -mminimal-toc AIX minimal TOC translation unit
12439 SVR4/EABI (none) SVR4 SDATA object file
12440 SVR4/EABI -fpic SVR4 pic object file
12441 SVR4/EABI -fPIC SVR4 PIC translation unit
12442 SVR4/EABI -mrelocatable EABI TOC function
12443 SVR4/EABI -maix AIX TOC object file
12444 SVR4/EABI -maix -mminimal-toc
12445 AIX minimal TOC translation unit
12447 Name Reg. Set by entries contains:
12448 made by addrs? fp? sum?
12450 AIX TOC 2 crt0 as Y option option
12451 AIX minimal TOC 30 prolog gcc Y Y option
12452 SVR4 SDATA 13 crt0 gcc N Y N
12453 SVR4 pic 30 prolog ld Y not yet N
12454 SVR4 PIC 30 prolog gcc Y option option
12455 EABI TOC 30 prolog gcc Y option option
12459 /* Hash functions for the hash table. */
12462 rs6000_hash_constant (k)
12465 enum rtx_code code = GET_CODE (k);
12466 enum machine_mode mode = GET_MODE (k);
12467 unsigned result = (code << 3) ^ mode;
12468 const char *format;
12471 format = GET_RTX_FORMAT (code);
12472 flen = strlen (format);
12478 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12481 if (mode != VOIDmode)
12482 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12494 for (; fidx < flen; fidx++)
12495 switch (format[fidx])
12500 const char *str = XSTR (k, fidx);
12501 len = strlen (str);
12502 result = result * 613 + len;
12503 for (i = 0; i < len; i++)
12504 result = result * 613 + (unsigned) str[i];
12509 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12513 result = result * 613 + (unsigned) XINT (k, fidx);
12516 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12517 result = result * 613 + (unsigned) XWINT (k, fidx);
12521 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12522 result = result * 613 + (unsigned) (XWINT (k, fidx)
12536 toc_hash_function (hash_entry)
12537 const void * hash_entry;
12539 const struct toc_hash_struct *thc =
12540 (const struct toc_hash_struct *) hash_entry;
12541 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12544 /* Compare H1 and H2 for equivalence. */
12547 toc_hash_eq (h1, h2)
12551 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12552 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12554 if (((const struct toc_hash_struct *) h1)->key_mode
12555 != ((const struct toc_hash_struct *) h2)->key_mode)
12558 return rtx_equal_p (r1, r2);
12561 /* These are the names given by the C++ front-end to vtables, and
12562 vtable-like objects. Ideally, this logic should not be here;
12563 instead, there should be some programmatic way of inquiring as
12564 to whether or not an object is a vtable. */
12566 #define VTABLE_NAME_P(NAME) \
12567 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12568 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12569 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12570 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12573 rs6000_output_symbol_ref (file, x)
12577 /* Currently C++ toc references to vtables can be emitted before it
12578 is decided whether the vtable is public or private. If this is
12579 the case, then the linker will eventually complain that there is
12580 a reference to an unknown section. Thus, for vtables only,
12581 we emit the TOC reference to reference the symbol and not the
12583 const char *name = XSTR (x, 0);
12585 if (VTABLE_NAME_P (name))
12587 RS6000_OUTPUT_BASENAME (file, name);
12590 assemble_name (file, name);
12593 /* Output a TOC entry. We derive the entry name from what is being
12597 output_toc (file, x, labelno, mode)
12601 enum machine_mode mode;
12604 const char *name = buf;
12605 const char *real_name;
12612 /* When the linker won't eliminate them, don't output duplicate
12613 TOC entries (this happens on AIX if there is any kind of TOC,
12614 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12616 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12618 struct toc_hash_struct *h;
12621 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12622 time because GGC is not initialised at that point. */
12623 if (toc_hash_table == NULL)
12624 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12625 toc_hash_eq, NULL);
12627 h = ggc_alloc (sizeof (*h));
12629 h->key_mode = mode;
12630 h->labelno = labelno;
12632 found = htab_find_slot (toc_hash_table, h, 1);
12633 if (*found == NULL)
12635 else /* This is indeed a duplicate.
12636 Set this label equal to that label. */
12638 fputs ("\t.set ", file);
12639 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12640 fprintf (file, "%d,", labelno);
12641 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12642 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12648 /* If we're going to put a double constant in the TOC, make sure it's
12649 aligned properly when strict alignment is on. */
12650 if (GET_CODE (x) == CONST_DOUBLE
12651 && STRICT_ALIGNMENT
12652 && GET_MODE_BITSIZE (mode) >= 64
12653 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12654 ASM_OUTPUT_ALIGN (file, 3);
12657 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12659 /* Handle FP constants specially. Note that if we have a minimal
12660 TOC, things we put here aren't actually in the TOC, so we can allow
12662 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12664 REAL_VALUE_TYPE rv;
12667 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12668 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12672 if (TARGET_MINIMAL_TOC)
12673 fputs (DOUBLE_INT_ASM_OP, file);
12675 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12676 k[0] & 0xffffffff, k[1] & 0xffffffff,
12677 k[2] & 0xffffffff, k[3] & 0xffffffff);
12678 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12679 k[0] & 0xffffffff, k[1] & 0xffffffff,
12680 k[2] & 0xffffffff, k[3] & 0xffffffff);
12685 if (TARGET_MINIMAL_TOC)
12686 fputs ("\t.long ", file);
12688 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12689 k[0] & 0xffffffff, k[1] & 0xffffffff,
12690 k[2] & 0xffffffff, k[3] & 0xffffffff);
12691 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12692 k[0] & 0xffffffff, k[1] & 0xffffffff,
12693 k[2] & 0xffffffff, k[3] & 0xffffffff);
12697 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12699 REAL_VALUE_TYPE rv;
12702 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12703 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12707 if (TARGET_MINIMAL_TOC)
12708 fputs (DOUBLE_INT_ASM_OP, file);
12710 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12711 k[0] & 0xffffffff, k[1] & 0xffffffff);
12712 fprintf (file, "0x%lx%08lx\n",
12713 k[0] & 0xffffffff, k[1] & 0xffffffff);
12718 if (TARGET_MINIMAL_TOC)
12719 fputs ("\t.long ", file);
12721 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12722 k[0] & 0xffffffff, k[1] & 0xffffffff);
12723 fprintf (file, "0x%lx,0x%lx\n",
12724 k[0] & 0xffffffff, k[1] & 0xffffffff);
12728 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12730 REAL_VALUE_TYPE rv;
12733 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12734 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12738 if (TARGET_MINIMAL_TOC)
12739 fputs (DOUBLE_INT_ASM_OP, file);
12741 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12742 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12747 if (TARGET_MINIMAL_TOC)
12748 fputs ("\t.long ", file);
12750 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12751 fprintf (file, "0x%lx\n", l & 0xffffffff);
12755 else if (GET_MODE (x) == VOIDmode
12756 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12758 unsigned HOST_WIDE_INT low;
12759 HOST_WIDE_INT high;
12761 if (GET_CODE (x) == CONST_DOUBLE)
12763 low = CONST_DOUBLE_LOW (x);
12764 high = CONST_DOUBLE_HIGH (x);
12767 #if HOST_BITS_PER_WIDE_INT == 32
12770 high = (low & 0x80000000) ? ~0 : 0;
12774 low = INTVAL (x) & 0xffffffff;
12775 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12779 /* TOC entries are always Pmode-sized, but since this
12780 is a bigendian machine then if we're putting smaller
12781 integer constants in the TOC we have to pad them.
12782 (This is still a win over putting the constants in
12783 a separate constant pool, because then we'd have
12784 to have both a TOC entry _and_ the actual constant.)
12786 For a 32-bit target, CONST_INT values are loaded and shifted
12787 entirely within `low' and can be stored in one TOC entry. */
12789 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12790 abort ();/* It would be easy to make this work, but it doesn't now. */
12792 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12794 #if HOST_BITS_PER_WIDE_INT == 32
12795 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12796 POINTER_SIZE, &low, &high, 0);
12799 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12800 high = (HOST_WIDE_INT) low >> 32;
12807 if (TARGET_MINIMAL_TOC)
12808 fputs (DOUBLE_INT_ASM_OP, file);
12810 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12811 (long) high & 0xffffffff, (long) low & 0xffffffff);
12812 fprintf (file, "0x%lx%08lx\n",
12813 (long) high & 0xffffffff, (long) low & 0xffffffff);
12818 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12820 if (TARGET_MINIMAL_TOC)
12821 fputs ("\t.long ", file);
12823 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12824 (long) high & 0xffffffff, (long) low & 0xffffffff);
12825 fprintf (file, "0x%lx,0x%lx\n",
12826 (long) high & 0xffffffff, (long) low & 0xffffffff);
12830 if (TARGET_MINIMAL_TOC)
12831 fputs ("\t.long ", file);
12833 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12834 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12840 if (GET_CODE (x) == CONST)
12842 if (GET_CODE (XEXP (x, 0)) != PLUS)
12845 base = XEXP (XEXP (x, 0), 0);
12846 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12849 if (GET_CODE (base) == SYMBOL_REF)
12850 name = XSTR (base, 0);
12851 else if (GET_CODE (base) == LABEL_REF)
12852 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12853 else if (GET_CODE (base) == CODE_LABEL)
12854 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12858 real_name = (*targetm.strip_name_encoding) (name);
12859 if (TARGET_MINIMAL_TOC)
12860 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12863 fprintf (file, "\t.tc %s", real_name);
12866 fprintf (file, ".N%d", - offset);
12868 fprintf (file, ".P%d", offset);
12870 fputs ("[TC],", file);
12873 /* Currently C++ toc references to vtables can be emitted before it
12874 is decided whether the vtable is public or private. If this is
12875 the case, then the linker will eventually complain that there is
12876 a TOC reference to an unknown section. Thus, for vtables only,
12877 we emit the TOC reference to reference the symbol and not the
12879 if (VTABLE_NAME_P (name))
12881 RS6000_OUTPUT_BASENAME (file, name);
12883 fprintf (file, "%d", offset);
12884 else if (offset > 0)
12885 fprintf (file, "+%d", offset);
12888 output_addr_const (file, x);
12892 /* Output an assembler pseudo-op to write an ASCII string of N characters
12893 starting at P to FILE.
12895 On the RS/6000, we have to do this using the .byte operation and
12896 write out special characters outside the quoted string.
12897 Also, the assembler is broken; very long strings are truncated,
12898 so we must artificially break them up early. */
12901 output_ascii (file, p, n)
12907 int i, count_string;
12908 const char *for_string = "\t.byte \"";
12909 const char *for_decimal = "\t.byte ";
12910 const char *to_close = NULL;
12913 for (i = 0; i < n; i++)
12916 if (c >= ' ' && c < 0177)
12919 fputs (for_string, file);
12922 /* Write two quotes to get one. */
12930 for_decimal = "\"\n\t.byte ";
12934 if (count_string >= 512)
12936 fputs (to_close, file);
12938 for_string = "\t.byte \"";
12939 for_decimal = "\t.byte ";
12947 fputs (for_decimal, file);
12948 fprintf (file, "%d", c);
12950 for_string = "\n\t.byte \"";
12951 for_decimal = ", ";
12957 /* Now close the string if we have written one. Then end the line. */
12959 fputs (to_close, file);
12962 /* Generate a unique section name for FILENAME for a section type
12963 represented by SECTION_DESC. Output goes into BUF.
12965 SECTION_DESC can be any string, as long as it is different for each
12966 possible section type.
12968 We name the section in the same manner as xlc. The name begins with an
12969 underscore followed by the filename (after stripping any leading directory
12970 names) with the last period replaced by the string SECTION_DESC. If
12971 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12975 rs6000_gen_section_name (buf, filename, section_desc)
12977 const char *filename;
12978 const char *section_desc;
12980 const char *q, *after_last_slash, *last_period = 0;
12984 after_last_slash = filename;
12985 for (q = filename; *q; q++)
12988 after_last_slash = q + 1;
12989 else if (*q == '.')
12993 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12994 *buf = (char *) xmalloc (len);
12999 for (q = after_last_slash; *q; q++)
13001 if (q == last_period)
13003 strcpy (p, section_desc);
13004 p += strlen (section_desc);
13008 else if (ISALNUM (*q))
13012 if (last_period == 0)
13013 strcpy (p, section_desc);
13018 /* Emit profile function. */
13021 output_profile_hook (labelno)
13022 int labelno ATTRIBUTE_UNUSED;
13024 if (TARGET_PROFILE_KERNEL)
13027 if (DEFAULT_ABI == ABI_AIX)
13029 #ifndef NO_PROFILE_COUNTERS
13030 # define NO_PROFILE_COUNTERS 0
13032 if (NO_PROFILE_COUNTERS)
13033 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13037 const char *label_name;
13040 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13041 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13042 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13044 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13048 else if (DEFAULT_ABI == ABI_DARWIN)
13050 const char *mcount_name = RS6000_MCOUNT;
13051 int caller_addr_regno = LINK_REGISTER_REGNUM;
13053 /* Be conservative and always set this, at least for now. */
13054 current_function_uses_pic_offset_table = 1;
13057 /* For PIC code, set up a stub and collect the caller's address
13058 from r0, which is where the prologue puts it. */
13059 if (MACHOPIC_INDIRECT)
13061 mcount_name = machopic_stub_name (mcount_name);
13062 if (current_function_uses_pic_offset_table)
13063 caller_addr_regno = 0;
13066 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13068 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13072 /* Write function profiler code. */
13075 output_function_profiler (file, labelno)
13082 switch (DEFAULT_ABI)
13091 warning ("no profiling of 64-bit code for this ABI");
13094 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13095 fprintf (file, "\tmflr %s\n", reg_names[0]);
13098 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13099 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13100 reg_names[0], save_lr, reg_names[1]);
13101 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13102 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13103 assemble_name (file, buf);
13104 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13106 else if (flag_pic > 1)
13108 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13109 reg_names[0], save_lr, reg_names[1]);
13110 /* Now, we need to get the address of the label. */
13111 fputs ("\tbl 1f\n\t.long ", file);
13112 assemble_name (file, buf);
13113 fputs ("-.\n1:", file);
13114 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13115 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13116 reg_names[0], reg_names[11]);
13117 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13118 reg_names[0], reg_names[0], reg_names[11]);
13122 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13123 assemble_name (file, buf);
13124 fputs ("@ha\n", file);
13125 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13126 reg_names[0], save_lr, reg_names[1]);
13127 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13128 assemble_name (file, buf);
13129 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13132 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13133 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13138 if (!TARGET_PROFILE_KERNEL)
13140 /* Don't do anything, done in output_profile_hook (). */
13147 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13148 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13150 if (current_function_needs_context)
13152 asm_fprintf (file, "\tstd %s,24(%s)\n",
13153 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13154 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13155 asm_fprintf (file, "\tld %s,24(%s)\n",
13156 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13159 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13167 rs6000_use_dfa_pipeline_interface ()
13172 /* Power4 load update and store update instructions are cracked into a
13173 load or store and an integer insn which are executed in the same cycle.
13174 Branches have their own dispatch slot which does not count against the
13175 GCC issue rate, but it changes the program flow so there are no other
13176 instructions to issue in this cycle. */
13179 rs6000_variable_issue (stream, verbose, insn, more)
13180 FILE *stream ATTRIBUTE_UNUSED;
13181 int verbose ATTRIBUTE_UNUSED;
13185 if (GET_CODE (PATTERN (insn)) == USE
13186 || GET_CODE (PATTERN (insn)) == CLOBBER)
13189 if (rs6000_cpu == PROCESSOR_POWER4)
13191 enum attr_type type = get_attr_type (insn);
13192 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
13193 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
13195 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13196 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13197 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13198 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13199 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13200 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13201 || type == TYPE_IDIV || type == TYPE_LDIV
13202 || type == TYPE_INSERT_WORD)
13203 return more > 2 ? more - 2 : 0;
13209 /* Adjust the cost of a scheduling dependency. Return the new cost of
13210 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13213 rs6000_adjust_cost (insn, link, dep_insn, cost)
13216 rtx dep_insn ATTRIBUTE_UNUSED;
13219 if (! recog_memoized (insn))
13222 if (REG_NOTE_KIND (link) != 0)
13225 if (REG_NOTE_KIND (link) == 0)
13227 /* Data dependency; DEP_INSN writes a register that INSN reads
13228 some cycles later. */
13229 switch (get_attr_type (insn))
13232 /* Tell the first scheduling pass about the latency between
13233 a mtctr and bctr (and mtlr and br/blr). The first
13234 scheduling pass will not know about this latency since
13235 the mtctr instruction, which has the latency associated
13236 to it, will be generated by reload. */
13237 return TARGET_POWER ? 5 : 4;
13239 /* Leave some extra cycles between a compare and its
13240 dependent branch, to inhibit expensive mispredicts. */
13241 if ((rs6000_cpu_attr == CPU_PPC603
13242 || rs6000_cpu_attr == CPU_PPC604
13243 || rs6000_cpu_attr == CPU_PPC604E
13244 || rs6000_cpu_attr == CPU_PPC620
13245 || rs6000_cpu_attr == CPU_PPC630
13246 || rs6000_cpu_attr == CPU_PPC750
13247 || rs6000_cpu_attr == CPU_PPC7400
13248 || rs6000_cpu_attr == CPU_PPC7450
13249 || rs6000_cpu_attr == CPU_POWER4)
13250 && recog_memoized (dep_insn)
13251 && (INSN_CODE (dep_insn) >= 0)
13252 && (get_attr_type (dep_insn) == TYPE_CMP
13253 || get_attr_type (dep_insn) == TYPE_COMPARE
13254 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13255 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13256 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13257 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13258 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13259 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13264 /* Fall out to return default cost. */
13270 /* A C statement (sans semicolon) to update the integer scheduling
13271 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13272 INSN earlier, increase the priority to execute INSN later. Do not
13273 define this macro if you do not need to adjust the scheduling
13274 priorities of insns. */
13277 rs6000_adjust_priority (insn, priority)
13278 rtx insn ATTRIBUTE_UNUSED;
13281 /* On machines (like the 750) which have asymmetric integer units,
13282 where one integer unit can do multiply and divides and the other
13283 can't, reduce the priority of multiply/divide so it is scheduled
13284 before other integer operations. */
13287 if (! INSN_P (insn))
13290 if (GET_CODE (PATTERN (insn)) == USE)
13293 switch (rs6000_cpu_attr) {
13295 switch (get_attr_type (insn))
13302 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13303 priority, priority);
13304 if (priority >= 0 && priority < 0x01000000)
13314 /* Return how many instructions the machine can issue per cycle. */
13317 rs6000_issue_rate ()
13319 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13320 if (!reload_completed)
13323 switch (rs6000_cpu_attr) {
13324 case CPU_RIOS1: /* ? */
13326 case CPU_PPC601: /* ? */
13347 /* Return how many instructions to look ahead for better insn
13351 rs6000_use_sched_lookahead ()
13353 if (rs6000_cpu_attr == CPU_PPC8540)
13359 /* Length in units of the trampoline for entering a nested function. */
13362 rs6000_trampoline_size ()
13366 switch (DEFAULT_ABI)
13372 ret = (TARGET_32BIT) ? 12 : 24;
13377 ret = (TARGET_32BIT) ? 40 : 48;
13384 /* Emit RTL insns to initialize the variable parts of a trampoline.
13385 FNADDR is an RTX for the address of the function's pure code.
13386 CXT is an RTX for the static chain value for the function. */
13389 rs6000_initialize_trampoline (addr, fnaddr, cxt)
13394 enum machine_mode pmode = Pmode;
13395 int regsize = (TARGET_32BIT) ? 4 : 8;
13396 rtx ctx_reg = force_reg (pmode, cxt);
13398 switch (DEFAULT_ABI)
13403 /* Macros to shorten the code expansions below. */
13404 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13405 #define MEM_PLUS(addr,offset) \
13406 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13408 /* Under AIX, just build the 3 word function descriptor */
13411 rtx fn_reg = gen_reg_rtx (pmode);
13412 rtx toc_reg = gen_reg_rtx (pmode);
13413 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13414 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13415 emit_move_insn (MEM_DEREF (addr), fn_reg);
13416 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13417 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13421 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13424 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13425 FALSE, VOIDmode, 4,
13427 GEN_INT (rs6000_trampoline_size ()), SImode,
13437 /* Table of valid machine attributes. */
13439 const struct attribute_spec rs6000_attribute_table[] =
13441 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13442 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13443 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13444 { NULL, 0, 0, false, false, false, NULL }
13447 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13448 struct attribute_spec.handler. */
13451 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
13454 tree args ATTRIBUTE_UNUSED;
13455 int flags ATTRIBUTE_UNUSED;
13456 bool *no_add_attrs;
13458 if (TREE_CODE (*node) != FUNCTION_TYPE
13459 && TREE_CODE (*node) != FIELD_DECL
13460 && TREE_CODE (*node) != TYPE_DECL)
13462 warning ("`%s' attribute only applies to functions",
13463 IDENTIFIER_POINTER (name));
13464 *no_add_attrs = true;
13470 /* Set longcall attributes on all functions declared when
13471 rs6000_default_long_calls is true. */
13473 rs6000_set_default_type_attributes (type)
13476 if (rs6000_default_long_calls
13477 && (TREE_CODE (type) == FUNCTION_TYPE
13478 || TREE_CODE (type) == METHOD_TYPE))
13479 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13481 TYPE_ATTRIBUTES (type));
13484 /* Return a reference suitable for calling a function with the
13485 longcall attribute. */
13488 rs6000_longcall_ref (call_ref)
13491 const char *call_name;
13494 if (GET_CODE (call_ref) != SYMBOL_REF)
13497 /* System V adds '.' to the internal name, so skip them. */
13498 call_name = XSTR (call_ref, 0);
13499 if (*call_name == '.')
13501 while (*call_name == '.')
13504 node = get_identifier (call_name);
13505 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13508 return force_reg (Pmode, call_ref);
13511 #ifdef USING_ELFOS_H
13513 /* A C statement or statements to switch to the appropriate section
13514 for output of RTX in mode MODE. You can assume that RTX is some
13515 kind of constant in RTL. The argument MODE is redundant except in
13516 the case of a `const_int' rtx. Select the section by calling
13517 `text_section' or one of the alternatives for other sections.
13519 Do not define this macro if you put all constants in the read-only
13523 rs6000_elf_select_rtx_section (mode, x, align)
13524 enum machine_mode mode;
13526 unsigned HOST_WIDE_INT align;
13528 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13531 default_elf_select_rtx_section (mode, x, align);
13534 /* A C statement or statements to switch to the appropriate
13535 section for output of DECL. DECL is either a `VAR_DECL' node
13536 or a constant of some sort. RELOC indicates whether forming
13537 the initial value of DECL requires link-time relocations. */
13540 rs6000_elf_select_section (decl, reloc, align)
13543 unsigned HOST_WIDE_INT align;
13545 /* Pretend that we're always building for a shared library when
13546 ABI_AIX, because otherwise we end up with dynamic relocations
13547 in read-only sections. This happens for function pointers,
13548 references to vtables in typeinfo, and probably other cases. */
13549 default_elf_select_section_1 (decl, reloc, align,
13550 flag_pic || DEFAULT_ABI == ABI_AIX);
13553 /* A C statement to build up a unique section name, expressed as a
13554 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13555 RELOC indicates whether the initial value of EXP requires
13556 link-time relocations. If you do not define this macro, GCC will use
13557 the symbol name prefixed by `.' as the section name. Note - this
13558 macro can now be called for uninitialized data items as well as
13559 initialized data and functions. */
13562 rs6000_elf_unique_section (decl, reloc)
13566 /* As above, pretend that we're always building for a shared library
13567 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13568 default_unique_section_1 (decl, reloc,
13569 flag_pic || DEFAULT_ABI == ABI_AIX);
13572 /* For a SYMBOL_REF, set generic flags and then perform some
13573 target-specific processing.
13575 When the AIX ABI is requested on a non-AIX system, replace the
13576 function name with the real name (with a leading .) rather than the
13577 function descriptor name. This saves a lot of overriding code to
13578 read the prefixes. */
13581 rs6000_elf_encode_section_info (decl, rtl, first)
13586 default_encode_section_info (decl, rtl, first);
13589 && TREE_CODE (decl) == FUNCTION_DECL
13591 && DEFAULT_ABI == ABI_AIX)
13593 rtx sym_ref = XEXP (rtl, 0);
13594 size_t len = strlen (XSTR (sym_ref, 0));
13595 char *str = alloca (len + 2);
13597 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13598 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13603 rs6000_elf_in_small_data_p (decl)
13606 if (rs6000_sdata == SDATA_NONE)
13609 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13611 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13612 if (strcmp (section, ".sdata") == 0
13613 || strcmp (section, ".sdata2") == 0
13614 || strcmp (section, ".sbss") == 0
13615 || strcmp (section, ".sbss2") == 0
13616 || strcmp (section, ".PPC.EMB.sdata0") == 0
13617 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13622 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13625 && (unsigned HOST_WIDE_INT) size <= g_switch_value
13626 /* If it's not public, and we're not going to reference it there,
13627 there's no need to put it in the small data section. */
13628 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13635 #endif /* USING_ELFOS_H */
13638 /* Return a REG that occurs in ADDR with coefficient 1.
13639 ADDR can be effectively incremented by incrementing REG.
13641 r0 is special and we must not select it as an address
13642 register by this routine since our caller will try to
13643 increment the returned register via an "la" instruction. */
13646 find_addr_reg (addr)
13649 while (GET_CODE (addr) == PLUS)
13651 if (GET_CODE (XEXP (addr, 0)) == REG
13652 && REGNO (XEXP (addr, 0)) != 0)
13653 addr = XEXP (addr, 0);
13654 else if (GET_CODE (XEXP (addr, 1)) == REG
13655 && REGNO (XEXP (addr, 1)) != 0)
13656 addr = XEXP (addr, 1);
13657 else if (CONSTANT_P (XEXP (addr, 0)))
13658 addr = XEXP (addr, 1);
13659 else if (CONSTANT_P (XEXP (addr, 1)))
13660 addr = XEXP (addr, 0);
13664 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13670 rs6000_fatal_bad_address (op)
13673 fatal_insn ("bad address", op);
13679 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13680 reference and a constant. */
13683 symbolic_operand (op)
13686 switch (GET_CODE (op))
13693 return (GET_CODE (op) == SYMBOL_REF ||
13694 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13695 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13696 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13703 #ifdef RS6000_LONG_BRANCH
13705 static tree stub_list = 0;
13707 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13708 procedure calls to the linked list. */
13711 add_compiler_stub (label_name, function_name, line_number)
13713 tree function_name;
13716 tree stub = build_tree_list (function_name, label_name);
13717 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13718 TREE_CHAIN (stub) = stub_list;
13722 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13723 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13724 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13726 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13727 handling procedure calls from the linked list and initializes the
13731 output_compiler_stub ()
13734 char label_buf[256];
13738 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13740 fprintf (asm_out_file,
13741 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13743 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13744 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13745 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13746 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13748 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13750 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13753 label_buf[0] = '_';
13754 strcpy (label_buf+1,
13755 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13758 strcpy (tmp_buf, "lis r12,hi16(");
13759 strcat (tmp_buf, label_buf);
13760 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13761 strcat (tmp_buf, label_buf);
13762 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13763 output_asm_insn (tmp_buf, 0);
13765 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13766 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13767 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13768 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13774 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13775 already there or not. */
13778 no_previous_def (function_name)
13779 tree function_name;
13782 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13783 if (function_name == STUB_FUNCTION_NAME (stub))
13788 /* GET_PREV_LABEL gets the label name from the previous definition of
13792 get_prev_label (function_name)
13793 tree function_name;
13796 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13797 if (function_name == STUB_FUNCTION_NAME (stub))
13798 return STUB_LABEL_NAME (stub);
13802 /* INSN is either a function call or a millicode call. It may have an
13803 unconditional jump in its delay slot.
13805 CALL_DEST is the routine we are calling. */
13808 output_call (insn, call_dest, operand_number)
13811 int operand_number;
13813 static char buf[256];
13814 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13817 tree funname = get_identifier (XSTR (call_dest, 0));
13819 if (no_previous_def (funname))
13821 int line_number = 0;
13822 rtx label_rtx = gen_label_rtx ();
13823 char *label_buf, temp_buf[256];
13824 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13825 CODE_LABEL_NUMBER (label_rtx));
13826 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13827 labelname = get_identifier (label_buf);
13828 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13830 line_number = NOTE_LINE_NUMBER (insn);
13831 add_compiler_stub (labelname, funname, line_number);
13834 labelname = get_prev_label (funname);
13836 sprintf (buf, "jbsr %%z%d,%.246s",
13837 operand_number, IDENTIFIER_POINTER (labelname));
13842 sprintf (buf, "bl %%z%d", operand_number);
13847 #endif /* RS6000_LONG_BRANCH */
13849 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13851 const char *const symbol_ = (SYMBOL); \
13852 char *buffer_ = (BUF); \
13853 if (symbol_[0] == '"') \
13855 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13857 else if (name_needs_quotes(symbol_)) \
13859 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13863 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13868 /* Generate PIC and indirect symbol stubs. */
13871 machopic_output_stub (file, symb, stub)
13873 const char *symb, *stub;
13875 unsigned int length;
13876 char *symbol_name, *lazy_ptr_name;
13877 char *local_label_0;
13878 static int label = 0;
13880 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13881 symb = (*targetm.strip_name_encoding) (symb);
13885 length = strlen (symb);
13886 symbol_name = alloca (length + 32);
13887 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13889 lazy_ptr_name = alloca (length + 32);
13890 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13892 local_label_0 = alloca (length + 32);
13893 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13896 machopic_picsymbol_stub1_section ();
13898 machopic_symbol_stub1_section ();
13899 fprintf (file, "\t.align 2\n");
13901 fprintf (file, "%s:\n", stub);
13902 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13906 fprintf (file, "\tmflr r0\n");
13907 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13908 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13909 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13910 lazy_ptr_name, local_label_0);
13911 fprintf (file, "\tmtlr r0\n");
13912 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13913 lazy_ptr_name, local_label_0);
13914 fprintf (file, "\tmtctr r12\n");
13915 fprintf (file, "\tbctr\n");
13919 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13920 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13921 fprintf (file, "\tmtctr r12\n");
13922 fprintf (file, "\tbctr\n");
13925 machopic_lazy_symbol_ptr_section ();
13926 fprintf (file, "%s:\n", lazy_ptr_name);
13927 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13928 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13931 /* Legitimize PIC addresses. If the address is already
13932 position-independent, we return ORIG. Newly generated
13933 position-independent addresses go into a reg. This is REG if non
13934 zero, otherwise we allocate register(s) as necessary. */
13936 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13939 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13941 enum machine_mode mode;
13946 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13947 reg = gen_reg_rtx (Pmode);
13949 if (GET_CODE (orig) == CONST)
13951 if (GET_CODE (XEXP (orig, 0)) == PLUS
13952 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13955 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13958 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13961 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13967 if (GET_CODE (offset) == CONST_INT)
13969 if (SMALL_INT (offset))
13970 return plus_constant (base, INTVAL (offset));
13971 else if (! reload_in_progress && ! reload_completed)
13972 offset = force_reg (Pmode, offset);
13975 rtx mem = force_const_mem (Pmode, orig);
13976 return machopic_legitimize_pic_address (mem, Pmode, reg);
13979 return gen_rtx (PLUS, Pmode, base, offset);
13982 /* Fall back on generic machopic code. */
13983 return machopic_legitimize_pic_address (orig, mode, reg);
13986 /* This is just a placeholder to make linking work without having to
13987 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13988 ever needed for Darwin (not too likely!) this would have to get a
13989 real definition. */
13996 #endif /* TARGET_MACHO */
13999 static unsigned int
14000 rs6000_elf_section_type_flags (decl, name, reloc)
14006 = default_section_type_flags_1 (decl, name, reloc,
14007 flag_pic || DEFAULT_ABI == ABI_AIX);
14009 if (TARGET_RELOCATABLE)
14010 flags |= SECTION_WRITE;
14015 /* Record an element in the table of global constructors. SYMBOL is
14016 a SYMBOL_REF of the function to be called; PRIORITY is a number
14017 between 0 and MAX_INIT_PRIORITY.
14019 This differs from default_named_section_asm_out_constructor in
14020 that we have special handling for -mrelocatable. */
14023 rs6000_elf_asm_out_constructor (symbol, priority)
14027 const char *section = ".ctors";
14030 if (priority != DEFAULT_INIT_PRIORITY)
14032 sprintf (buf, ".ctors.%.5u",
14033 /* Invert the numbering so the linker puts us in the proper
14034 order; constructors are run from right to left, and the
14035 linker sorts in increasing order. */
14036 MAX_INIT_PRIORITY - priority);
14040 named_section_flags (section, SECTION_WRITE);
14041 assemble_align (POINTER_SIZE);
14043 if (TARGET_RELOCATABLE)
14045 fputs ("\t.long (", asm_out_file);
14046 output_addr_const (asm_out_file, symbol);
14047 fputs (")@fixup\n", asm_out_file);
14050 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14054 rs6000_elf_asm_out_destructor (symbol, priority)
14058 const char *section = ".dtors";
14061 if (priority != DEFAULT_INIT_PRIORITY)
14063 sprintf (buf, ".dtors.%.5u",
14064 /* Invert the numbering so the linker puts us in the proper
14065 order; constructors are run from right to left, and the
14066 linker sorts in increasing order. */
14067 MAX_INIT_PRIORITY - priority);
14071 named_section_flags (section, SECTION_WRITE);
14072 assemble_align (POINTER_SIZE);
14074 if (TARGET_RELOCATABLE)
14076 fputs ("\t.long (", asm_out_file);
14077 output_addr_const (asm_out_file, symbol);
14078 fputs (")@fixup\n", asm_out_file);
14081 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14085 rs6000_elf_declare_function_name (file, name, decl)
14092 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
14093 ASM_OUTPUT_LABEL (file, name);
14094 fputs (DOUBLE_INT_ASM_OP, file);
14096 assemble_name (file, name);
14097 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
14098 assemble_name (file, name);
14099 fputs (",24\n\t.type\t.", file);
14100 assemble_name (file, name);
14101 fputs (",@function\n", file);
14102 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
14104 fputs ("\t.globl\t.", file);
14105 assemble_name (file, name);
14108 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14110 ASM_OUTPUT_LABEL (file, name);
14114 if (TARGET_RELOCATABLE
14115 && (get_pool_size () != 0 || current_function_profile)
14120 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
14122 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14123 fprintf (file, "\t.long ");
14124 assemble_name (file, buf);
14126 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14127 assemble_name (file, buf);
14131 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
14132 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14134 if (DEFAULT_ABI == ABI_AIX)
14136 const char *desc_name, *orig_name;
14138 orig_name = (*targetm.strip_name_encoding) (name);
14139 desc_name = orig_name;
14140 while (*desc_name == '.')
14143 if (TREE_PUBLIC (decl))
14144 fprintf (file, "\t.globl %s\n", desc_name);
14146 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
14147 fprintf (file, "%s:\n", desc_name);
14148 fprintf (file, "\t.long %s\n", orig_name);
14149 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
14150 if (DEFAULT_ABI == ABI_AIX)
14151 fputs ("\t.long 0\n", file);
14152 fprintf (file, "\t.previous\n");
14154 ASM_OUTPUT_LABEL (file, name);
14160 rs6000_xcoff_asm_globalize_label (stream, name)
14164 fputs (GLOBAL_ASM_OP, stream);
14165 RS6000_OUTPUT_BASENAME (stream, name);
14166 putc ('\n', stream);
14170 rs6000_xcoff_asm_named_section (name, flags)
14172 unsigned int flags;
14175 static const char * const suffix[3] = { "PR", "RO", "RW" };
14177 if (flags & SECTION_CODE)
14179 else if (flags & SECTION_WRITE)
14184 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
14185 (flags & SECTION_CODE) ? "." : "",
14186 name, suffix[smclass], flags & SECTION_ENTSIZE);
14190 rs6000_xcoff_select_section (decl, reloc, align)
14193 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14195 if (decl_readonly_section_1 (decl, reloc, 1))
14197 if (TREE_PUBLIC (decl))
14198 read_only_data_section ();
14200 read_only_private_data_section ();
14204 if (TREE_PUBLIC (decl))
14207 private_data_section ();
14212 rs6000_xcoff_unique_section (decl, reloc)
14214 int reloc ATTRIBUTE_UNUSED;
14218 /* Use select_section for private and uninitialized data. */
14219 if (!TREE_PUBLIC (decl)
14220 || DECL_COMMON (decl)
14221 || DECL_INITIAL (decl) == NULL_TREE
14222 || DECL_INITIAL (decl) == error_mark_node
14223 || (flag_zero_initialized_in_bss
14224 && initializer_zerop (DECL_INITIAL (decl))))
14227 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
14228 name = (*targetm.strip_name_encoding) (name);
14229 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
14232 /* Select section for constant in constant pool.
14234 On RS/6000, all constants are in the private read-only data area.
14235 However, if this is being placed in the TOC it must be output as a
14239 rs6000_xcoff_select_rtx_section (mode, x, align)
14240 enum machine_mode mode;
14242 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14244 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14247 read_only_private_data_section ();
14250 /* Remove any trailing [DS] or the like from the symbol name. */
14252 static const char *
14253 rs6000_xcoff_strip_name_encoding (name)
14259 len = strlen (name);
14260 if (name[len - 1] == ']')
14261 return ggc_alloc_string (name, len - 4);
14266 /* Section attributes. AIX is always PIC. */
14268 static unsigned int
14269 rs6000_xcoff_section_type_flags (decl, name, reloc)
14274 unsigned int align;
14275 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
14277 /* Align to at least UNIT size. */
14278 if (flags & SECTION_CODE)
14279 align = MIN_UNITS_PER_WORD;
14281 /* Increase alignment of large objects if not already stricter. */
14282 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
14283 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
14284 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
14286 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
14289 /* Output at beginning of assembler file.
14291 Initialize the section names for the RS/6000 at this point.
14293 Specify filename, including full path, to assembler.
14295 We want to go into the TOC section so at least one .toc will be emitted.
14296 Also, in order to output proper .bs/.es pairs, we need at least one static
14297 [RW] section emitted.
14299 Finally, declare mcount when profiling to make the assembler happy. */
14302 rs6000_xcoff_file_start ()
14304 rs6000_gen_section_name (&xcoff_bss_section_name,
14305 main_input_filename, ".bss_");
14306 rs6000_gen_section_name (&xcoff_private_data_section_name,
14307 main_input_filename, ".rw_");
14308 rs6000_gen_section_name (&xcoff_read_only_section_name,
14309 main_input_filename, ".ro_");
14311 fputs ("\t.file\t", asm_out_file);
14312 output_quoted_string (asm_out_file, main_input_filename);
14313 fputc ('\n', asm_out_file);
14315 if (write_symbols != NO_DEBUG)
14316 private_data_section ();
14319 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
14320 rs6000_file_start ();
14323 /* Output at end of assembler file.
14324 On the RS/6000, referencing data should automatically pull in text. */
14327 rs6000_xcoff_file_end ()
14330 fputs ("_section_.text:\n", asm_out_file);
14332 fputs (TARGET_32BIT
14333 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
14336 #endif /* TARGET_XCOFF */
14339 /* Cross-module name binding. Darwin does not support overriding
14340 functions at dynamic-link time. */
14343 rs6000_binds_local_p (decl)
14346 return default_binds_local_p_1 (decl, 0);
14350 /* Compute a (partial) cost for rtx X. Return true if the complete
14351 cost has been computed, and false if subexpressions should be
14352 scanned. In either case, *TOTAL contains the cost result. */
14355 rs6000_rtx_costs (x, code, outer_code, total)
14357 int code, outer_code ATTRIBUTE_UNUSED;
14362 /* On the RS/6000, if it is valid in the insn, it is free.
14363 So this always returns 0. */
14374 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14375 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14376 + 0x8000) >= 0x10000)
14377 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14378 ? COSTS_N_INSNS (2)
14379 : COSTS_N_INSNS (1));
14385 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14386 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14387 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14388 ? COSTS_N_INSNS (2)
14389 : COSTS_N_INSNS (1));
14395 *total = COSTS_N_INSNS (2);
14398 switch (rs6000_cpu)
14400 case PROCESSOR_RIOS1:
14401 case PROCESSOR_PPC405:
14402 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14403 ? COSTS_N_INSNS (5)
14404 : (INTVAL (XEXP (x, 1)) >= -256
14405 && INTVAL (XEXP (x, 1)) <= 255)
14406 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14409 case PROCESSOR_PPC440:
14410 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14411 ? COSTS_N_INSNS (3)
14412 : COSTS_N_INSNS (2));
14415 case PROCESSOR_RS64A:
14416 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14417 ? GET_MODE (XEXP (x, 1)) != DImode
14418 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14419 : (INTVAL (XEXP (x, 1)) >= -256
14420 && INTVAL (XEXP (x, 1)) <= 255)
14421 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14424 case PROCESSOR_RIOS2:
14425 case PROCESSOR_MPCCORE:
14426 case PROCESSOR_PPC604e:
14427 *total = COSTS_N_INSNS (2);
14430 case PROCESSOR_PPC601:
14431 *total = COSTS_N_INSNS (5);
14434 case PROCESSOR_PPC603:
14435 case PROCESSOR_PPC7400:
14436 case PROCESSOR_PPC750:
14437 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14438 ? COSTS_N_INSNS (5)
14439 : (INTVAL (XEXP (x, 1)) >= -256
14440 && INTVAL (XEXP (x, 1)) <= 255)
14441 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14444 case PROCESSOR_PPC7450:
14445 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14446 ? COSTS_N_INSNS (4)
14447 : COSTS_N_INSNS (3));
14450 case PROCESSOR_PPC403:
14451 case PROCESSOR_PPC604:
14452 case PROCESSOR_PPC8540:
14453 *total = COSTS_N_INSNS (4);
14456 case PROCESSOR_PPC620:
14457 case PROCESSOR_PPC630:
14458 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14459 ? GET_MODE (XEXP (x, 1)) != DImode
14460 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14461 : (INTVAL (XEXP (x, 1)) >= -256
14462 && INTVAL (XEXP (x, 1)) <= 255)
14463 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14466 case PROCESSOR_POWER4:
14467 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14468 ? GET_MODE (XEXP (x, 1)) != DImode
14469 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14470 : COSTS_N_INSNS (2));
14479 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14480 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14482 *total = COSTS_N_INSNS (2);
14489 switch (rs6000_cpu)
14491 case PROCESSOR_RIOS1:
14492 *total = COSTS_N_INSNS (19);
14495 case PROCESSOR_RIOS2:
14496 *total = COSTS_N_INSNS (13);
14499 case PROCESSOR_RS64A:
14500 *total = (GET_MODE (XEXP (x, 1)) != DImode
14501 ? COSTS_N_INSNS (65)
14502 : COSTS_N_INSNS (67));
14505 case PROCESSOR_MPCCORE:
14506 *total = COSTS_N_INSNS (6);
14509 case PROCESSOR_PPC403:
14510 *total = COSTS_N_INSNS (33);
14513 case PROCESSOR_PPC405:
14514 *total = COSTS_N_INSNS (35);
14517 case PROCESSOR_PPC440:
14518 *total = COSTS_N_INSNS (34);
14521 case PROCESSOR_PPC601:
14522 *total = COSTS_N_INSNS (36);
14525 case PROCESSOR_PPC603:
14526 *total = COSTS_N_INSNS (37);
14529 case PROCESSOR_PPC604:
14530 case PROCESSOR_PPC604e:
14531 *total = COSTS_N_INSNS (20);
14534 case PROCESSOR_PPC620:
14535 case PROCESSOR_PPC630:
14536 *total = (GET_MODE (XEXP (x, 1)) != DImode
14537 ? COSTS_N_INSNS (21)
14538 : COSTS_N_INSNS (37));
14541 case PROCESSOR_PPC750:
14542 case PROCESSOR_PPC8540:
14543 case PROCESSOR_PPC7400:
14544 *total = COSTS_N_INSNS (19);
14547 case PROCESSOR_PPC7450:
14548 *total = COSTS_N_INSNS (23);
14551 case PROCESSOR_POWER4:
14552 *total = (GET_MODE (XEXP (x, 1)) != DImode
14553 ? COSTS_N_INSNS (18)
14554 : COSTS_N_INSNS (34));
14562 *total = COSTS_N_INSNS (4);
14566 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14575 /* A C expression returning the cost of moving data from a register of class
14576 CLASS1 to one of CLASS2. */
14579 rs6000_register_move_cost (mode, from, to)
14580 enum machine_mode mode;
14581 enum reg_class from, to;
14583 /* Moves from/to GENERAL_REGS. */
14584 if (reg_classes_intersect_p (to, GENERAL_REGS)
14585 || reg_classes_intersect_p (from, GENERAL_REGS))
14587 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14590 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14591 return (rs6000_memory_move_cost (mode, from, 0)
14592 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14594 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14595 else if (from == CR_REGS)
14599 /* A move will cost one instruction per GPR moved. */
14600 return 2 * HARD_REGNO_NREGS (0, mode);
14603 /* Moving between two similar registers is just one instruction. */
14604 else if (reg_classes_intersect_p (to, from))
14605 return mode == TFmode ? 4 : 2;
14607 /* Everything else has to go through GENERAL_REGS. */
14609 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14610 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14613 /* A C expressions returning the cost of moving data of MODE from a register to
14617 rs6000_memory_move_cost (mode, class, in)
14618 enum machine_mode mode;
14619 enum reg_class class;
14620 int in ATTRIBUTE_UNUSED;
14622 if (reg_classes_intersect_p (class, GENERAL_REGS))
14623 return 4 * HARD_REGNO_NREGS (0, mode);
14624 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14625 return 4 * HARD_REGNO_NREGS (32, mode);
14626 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14627 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14629 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14632 /* Return an RTX representing where to find the function value of a
14633 function returning MODE. */
14635 rs6000_complex_function_value (enum machine_mode mode)
14637 unsigned int regno;
14639 enum machine_mode inner = GET_MODE_INNER (mode);
14641 if (FLOAT_MODE_P (mode))
14642 regno = FP_ARG_RETURN;
14645 regno = GP_ARG_RETURN;
14647 /* 32-bit is OK since it'll go in r3/r4. */
14649 && GET_MODE_BITSIZE (inner) >= 32)
14650 return gen_rtx_REG (mode, regno);
14653 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
14655 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
14656 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
14657 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
14660 /* Define how to find the value returned by a function.
14661 VALTYPE is the data type of the value (as a tree).
14662 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14663 otherwise, FUNC is 0.
14665 On the SPE, both FPs and vectors are returned in r3.
14667 On RS/6000 an integer value is in r3 and a floating-point value is in
14668 fp1, unless -msoft-float. */
14671 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14673 enum machine_mode mode;
14674 unsigned int regno;
14676 if ((INTEGRAL_TYPE_P (valtype)
14677 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14678 || POINTER_TYPE_P (valtype))
14681 mode = TYPE_MODE (valtype);
14683 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
14684 regno = FP_ARG_RETURN;
14685 else if (TREE_CODE (valtype) == COMPLEX_TYPE
14686 && TARGET_HARD_FLOAT
14687 && SPLIT_COMPLEX_ARGS)
14688 return rs6000_complex_function_value (mode);
14689 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
14690 regno = ALTIVEC_ARG_RETURN;
14692 regno = GP_ARG_RETURN;
14694 return gen_rtx_REG (mode, regno);
14697 /* Define how to find the value returned by a library function
14698 assuming the value has mode MODE. */
14700 rs6000_libcall_value (enum machine_mode mode)
14702 unsigned int regno;
14704 if (GET_MODE_CLASS (mode) == MODE_FLOAT
14705 && TARGET_HARD_FLOAT && TARGET_FPRS)
14706 regno = FP_ARG_RETURN;
14707 else if (ALTIVEC_VECTOR_MODE (mode))
14708 regno = ALTIVEC_ARG_RETURN;
14709 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
14710 return rs6000_complex_function_value (mode);
14712 regno = GP_ARG_RETURN;
14714 return gen_rtx_REG (mode, regno);
14717 /* Return true if TYPE is of type __ev64_opaque__. */
14720 is_ev64_opaque_type (type)
14724 && (type == opaque_V2SI_type_node
14725 || type == opaque_V2SF_type_node
14726 || type == opaque_p_V2SI_type_node
14727 || (TREE_CODE (type) == VECTOR_TYPE
14728 && TYPE_NAME (type)
14729 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14730 && DECL_NAME (TYPE_NAME (type))
14731 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
14732 "__ev64_opaque__") == 0)));
14736 rs6000_dwarf_register_span (reg)
14741 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14744 regno = REGNO (reg);
14746 /* The duality of the SPE register size wreaks all kinds of havoc.
14747 This is a way of distinguishing r0 in 32-bits from r0 in
14750 gen_rtx_PARALLEL (VOIDmode,
14753 gen_rtx_REG (SImode, regno + 1200),
14754 gen_rtx_REG (SImode, regno))
14756 gen_rtx_REG (SImode, regno),
14757 gen_rtx_REG (SImode, regno + 1200)));
14760 #include "gt-rs6000.h"