1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
63 && easy_vector_same (x, y))
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
70 enum processor_type rs6000_cpu;
71 struct rs6000_cpu_select rs6000_select[3] =
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
79 /* Size of long double */
80 const char *rs6000_long_double_size_string;
81 int rs6000_long_double_type_size;
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi;
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave;
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string;
92 /* Nonzero if we want SPE ABI extensions. */
95 /* Whether isel instructions should be generated. */
98 /* Whether SPE simd instructions should be generated. */
101 /* Nonzero if floating point operations are done in the GPRs. */
102 int rs6000_float_gprs = 0;
104 /* String from -mfloat-gprs=. */
105 const char *rs6000_float_gprs_string;
107 /* String from -misel=. */
108 const char *rs6000_isel_string;
110 /* String from -mspe=. */
111 const char *rs6000_spe_string;
113 /* Set to nonzero once AIX common-mode calls have been defined. */
114 static GTY(()) int common_mode_defined;
116 /* Save information from a "cmpxx" operation until the branch or scc is
118 rtx rs6000_compare_op0, rs6000_compare_op1;
119 int rs6000_compare_fp_p;
121 /* Label number of label created for -mrelocatable, to call to so we can
122 get the address of the GOT section */
123 int rs6000_pic_labelno;
126 /* Which abi to adhere to */
127 const char *rs6000_abi_name = RS6000_ABI_NAME;
129 /* Semantics of the small data area */
130 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
132 /* Which small data model to use */
133 const char *rs6000_sdata_name = (char *)0;
135 /* Counter for labels which are to be placed in .fixup. */
136 int fixuplabelno = 0;
139 /* ABI enumeration available for subtarget to use. */
140 enum rs6000_abi rs6000_current_abi;
142 /* ABI string from -mabi= option. */
143 const char *rs6000_abi_string;
146 const char *rs6000_debug_name;
147 int rs6000_debug_stack; /* debug stack applications */
148 int rs6000_debug_arg; /* debug argument handling */
151 static GTY(()) tree opaque_V2SI_type_node;
152 static GTY(()) tree opaque_V2SF_type_node;
153 static GTY(()) tree opaque_p_V2SI_type_node;
155 const char *rs6000_traceback_name;
157 traceback_default = 0,
163 /* Flag to say the TOC is initialized */
165 char toc_label_name[10];
167 /* Alias set for saves and restores from the rs6000 stack. */
168 static int rs6000_sr_alias_set;
170 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
171 The only place that looks at this is rs6000_set_default_type_attributes;
172 everywhere else should rely on the presence or absence of a longcall
173 attribute on the function declaration. */
174 int rs6000_default_long_calls;
175 const char *rs6000_longcall_switch;
177 struct builtin_description
179 /* mask is not const because we're going to alter it below. This
180 nonsense will go away when we rewrite the -march infrastructure
181 to give us more target flag bits. */
183 const enum insn_code icode;
184 const char *const name;
185 const enum rs6000_builtins code;
188 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
189 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
190 static void validate_condition_mode
191 PARAMS ((enum rtx_code, enum machine_mode));
192 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
193 static void rs6000_maybe_dead PARAMS ((rtx));
194 static void rs6000_emit_stack_tie PARAMS ((void));
195 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
196 static rtx spe_synthesize_frame_save PARAMS ((rtx));
197 static bool spe_func_has_64bit_regs_p PARAMS ((void));
198 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
199 unsigned int, int, int));
200 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
201 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
202 static unsigned rs6000_hash_constant PARAMS ((rtx));
203 static unsigned toc_hash_function PARAMS ((const void *));
204 static int toc_hash_eq PARAMS ((const void *, const void *));
205 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
206 static bool constant_pool_expr_p PARAMS ((rtx));
207 static bool toc_relative_expr_p PARAMS ((rtx));
208 static bool legitimate_small_data_p PARAMS ((enum machine_mode, rtx));
209 static bool legitimate_offset_address_p PARAMS ((enum machine_mode, rtx, int));
210 static bool legitimate_indexed_address_p PARAMS ((rtx, int));
211 static bool legitimate_indirect_address_p PARAMS ((rtx, int));
212 static bool legitimate_lo_sum_address_p PARAMS ((enum machine_mode, rtx, int));
213 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
214 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
215 #ifdef HAVE_GAS_HIDDEN
216 static void rs6000_assemble_visibility PARAMS ((tree, int));
218 static int rs6000_ra_ever_killed PARAMS ((void));
219 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
220 extern const struct attribute_spec rs6000_attribute_table[];
221 static void rs6000_set_default_type_attributes PARAMS ((tree));
222 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
223 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
224 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
225 HOST_WIDE_INT, tree));
226 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
227 HOST_WIDE_INT, HOST_WIDE_INT));
229 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
231 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
232 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
233 static void rs6000_elf_select_section PARAMS ((tree, int,
234 unsigned HOST_WIDE_INT));
235 static void rs6000_elf_unique_section PARAMS ((tree, int));
236 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
237 unsigned HOST_WIDE_INT));
238 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
240 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
243 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
244 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
245 static void rs6000_xcoff_select_section PARAMS ((tree, int,
246 unsigned HOST_WIDE_INT));
247 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
248 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
249 unsigned HOST_WIDE_INT));
250 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
251 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
254 static bool rs6000_binds_local_p PARAMS ((tree));
256 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
257 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
258 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
259 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
260 static int rs6000_adjust_priority PARAMS ((rtx, int));
261 static int rs6000_issue_rate PARAMS ((void));
262 static int rs6000_use_sched_lookahead PARAMS ((void));
264 static void rs6000_init_builtins PARAMS ((void));
265 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
266 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
267 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
268 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
269 static void altivec_init_builtins PARAMS ((void));
270 static void rs6000_common_init_builtins PARAMS ((void));
272 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
273 int, enum rs6000_builtins,
274 enum rs6000_builtins));
275 static void spe_init_builtins PARAMS ((void));
276 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
277 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
278 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
279 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
281 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
282 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
283 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
284 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
285 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
286 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
287 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
288 static void rs6000_parse_abi_options PARAMS ((void));
289 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
290 static int first_altivec_reg_to_save PARAMS ((void));
291 static unsigned int compute_vrsave_mask PARAMS ((void));
292 static void is_altivec_return_reg PARAMS ((rtx, void *));
293 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
294 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
295 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
296 static bool is_ev64_opaque_type PARAMS ((tree));
297 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
299 /* Hash table stuff for keeping track of TOC entries. */
301 struct toc_hash_struct GTY(())
303 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
304 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
306 enum machine_mode key_mode;
310 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
312 /* Default register names. */
313 char rs6000_reg_names[][8] =
315 "0", "1", "2", "3", "4", "5", "6", "7",
316 "8", "9", "10", "11", "12", "13", "14", "15",
317 "16", "17", "18", "19", "20", "21", "22", "23",
318 "24", "25", "26", "27", "28", "29", "30", "31",
319 "0", "1", "2", "3", "4", "5", "6", "7",
320 "8", "9", "10", "11", "12", "13", "14", "15",
321 "16", "17", "18", "19", "20", "21", "22", "23",
322 "24", "25", "26", "27", "28", "29", "30", "31",
323 "mq", "lr", "ctr","ap",
324 "0", "1", "2", "3", "4", "5", "6", "7",
326 /* AltiVec registers. */
327 "0", "1", "2", "3", "4", "5", "6", "7",
328 "8", "9", "10", "11", "12", "13", "14", "15",
329 "16", "17", "18", "19", "20", "21", "22", "23",
330 "24", "25", "26", "27", "28", "29", "30", "31",
336 #ifdef TARGET_REGNAMES
337 static const char alt_reg_names[][8] =
339 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
340 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
341 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
342 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
343 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
344 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
345 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
346 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
347 "mq", "lr", "ctr", "ap",
348 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
350 /* AltiVec registers. */
351 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
352 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
353 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
354 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
361 #ifndef MASK_STRICT_ALIGN
362 #define MASK_STRICT_ALIGN 0
364 #ifndef TARGET_PROFILE_KERNEL
365 #define TARGET_PROFILE_KERNEL 0
368 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
369 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
371 /* Initialize the GCC target structure. */
372 #undef TARGET_ATTRIBUTE_TABLE
373 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
374 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
375 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
377 #undef TARGET_ASM_ALIGNED_DI_OP
378 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
380 /* Default unaligned ops are only provided for ELF. Find the ops needed
381 for non-ELF systems. */
382 #ifndef OBJECT_FORMAT_ELF
384 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
386 #undef TARGET_ASM_UNALIGNED_HI_OP
387 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
388 #undef TARGET_ASM_UNALIGNED_SI_OP
389 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
390 #undef TARGET_ASM_UNALIGNED_DI_OP
391 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
394 #undef TARGET_ASM_UNALIGNED_HI_OP
395 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
396 #undef TARGET_ASM_UNALIGNED_SI_OP
397 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
401 /* This hook deals with fixups for relocatable code and DI-mode objects
403 #undef TARGET_ASM_INTEGER
404 #define TARGET_ASM_INTEGER rs6000_assemble_integer
406 #ifdef HAVE_GAS_HIDDEN
407 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
408 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
411 #undef TARGET_ASM_FUNCTION_PROLOGUE
412 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
413 #undef TARGET_ASM_FUNCTION_EPILOGUE
414 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
416 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
417 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
418 #undef TARGET_SCHED_VARIABLE_ISSUE
419 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
421 #undef TARGET_SCHED_ISSUE_RATE
422 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
423 #undef TARGET_SCHED_ADJUST_COST
424 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
425 #undef TARGET_SCHED_ADJUST_PRIORITY
426 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
428 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
429 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
431 #undef TARGET_INIT_BUILTINS
432 #define TARGET_INIT_BUILTINS rs6000_init_builtins
434 #undef TARGET_EXPAND_BUILTIN
435 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
438 #undef TARGET_BINDS_LOCAL_P
439 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
442 #undef TARGET_ASM_OUTPUT_MI_THUNK
443 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
445 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
446 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
448 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
449 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
451 #undef TARGET_RTX_COSTS
452 #define TARGET_RTX_COSTS rs6000_rtx_costs
453 #undef TARGET_ADDRESS_COST
454 #define TARGET_ADDRESS_COST hook_int_rtx_0
456 #undef TARGET_VECTOR_OPAQUE_P
457 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
459 #undef TARGET_DWARF_REGISTER_SPAN
460 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
462 struct gcc_target targetm = TARGET_INITIALIZER;
464 /* Override command line options. Mostly we process the processor
465 type and sometimes adjust other TARGET_ options. */
468 rs6000_override_options (default_cpu)
469 const char *default_cpu;
472 struct rs6000_cpu_select *ptr;
474 /* Simplify the entries below by making a mask for any POWER
475 variant and any PowerPC variant. */
477 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
478 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
479 | MASK_PPC_GFXOPT | MASK_POWERPC64)
480 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
484 const char *const name; /* Canonical processor name. */
485 const enum processor_type processor; /* Processor type enum value. */
486 const int target_enable; /* Target flags to enable. */
487 const int target_disable; /* Target flags to disable. */
488 } const processor_target_table[]
489 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
490 POWER_MASKS | POWERPC_MASKS},
491 {"power", PROCESSOR_POWER,
492 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
493 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
494 {"power2", PROCESSOR_POWER,
495 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
496 POWERPC_MASKS | MASK_NEW_MNEMONICS},
497 {"power3", PROCESSOR_PPC630,
498 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
500 {"power4", PROCESSOR_POWER4,
501 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
503 {"powerpc", PROCESSOR_POWERPC,
504 MASK_POWERPC | MASK_NEW_MNEMONICS,
505 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
506 {"powerpc64", PROCESSOR_POWERPC64,
507 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
508 POWER_MASKS | POWERPC_OPT_MASKS},
509 {"rios", PROCESSOR_RIOS1,
510 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
511 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
512 {"rios1", PROCESSOR_RIOS1,
513 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
514 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
515 {"rsc", PROCESSOR_PPC601,
516 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
517 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
518 {"rsc1", PROCESSOR_PPC601,
519 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
520 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
521 {"rios2", PROCESSOR_RIOS2,
522 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
523 POWERPC_MASKS | MASK_NEW_MNEMONICS},
524 {"rs64a", PROCESSOR_RS64A,
525 MASK_POWERPC | MASK_NEW_MNEMONICS,
526 POWER_MASKS | POWERPC_OPT_MASKS},
527 {"401", PROCESSOR_PPC403,
528 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
529 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
530 {"403", PROCESSOR_PPC403,
531 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
532 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
533 {"405", PROCESSOR_PPC405,
534 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
535 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
536 {"405fp", PROCESSOR_PPC405,
537 MASK_POWERPC | MASK_NEW_MNEMONICS,
538 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
539 {"440", PROCESSOR_PPC440,
540 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
541 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
542 {"440fp", PROCESSOR_PPC440,
543 MASK_POWERPC | MASK_NEW_MNEMONICS,
544 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
545 {"505", PROCESSOR_MPCCORE,
546 MASK_POWERPC | MASK_NEW_MNEMONICS,
547 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
548 {"601", PROCESSOR_PPC601,
549 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
550 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
551 {"602", PROCESSOR_PPC603,
552 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
553 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
554 {"603", PROCESSOR_PPC603,
555 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
556 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
557 {"603e", PROCESSOR_PPC603,
558 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
559 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
560 {"ec603e", PROCESSOR_PPC603,
561 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
562 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
563 {"604", PROCESSOR_PPC604,
564 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
565 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
566 {"604e", PROCESSOR_PPC604e,
567 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
568 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
569 {"620", PROCESSOR_PPC620,
570 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
572 {"630", PROCESSOR_PPC630,
573 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
575 {"740", PROCESSOR_PPC750,
576 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
577 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
578 {"750", PROCESSOR_PPC750,
579 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
580 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
581 {"7400", PROCESSOR_PPC7400,
582 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
583 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
584 {"7450", PROCESSOR_PPC7450,
585 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
586 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
587 {"8540", PROCESSOR_PPC8540,
588 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
589 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
590 {"801", PROCESSOR_MPCCORE,
591 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
592 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
593 {"821", PROCESSOR_MPCCORE,
594 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
595 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
596 {"823", PROCESSOR_MPCCORE,
597 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
598 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
599 {"860", PROCESSOR_MPCCORE,
600 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
601 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
603 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
605 /* Save current -mmultiple/-mno-multiple status. */
606 int multiple = TARGET_MULTIPLE;
607 /* Save current -mstring/-mno-string status. */
608 int string = TARGET_STRING;
610 /* Identify the processor type. */
611 rs6000_select[0].string = default_cpu;
612 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
614 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
616 ptr = &rs6000_select[i];
617 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
619 for (j = 0; j < ptt_size; j++)
620 if (! strcmp (ptr->string, processor_target_table[j].name))
623 rs6000_cpu = processor_target_table[j].processor;
627 target_flags |= processor_target_table[j].target_enable;
628 target_flags &= ~processor_target_table[j].target_disable;
634 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
641 /* If we are optimizing big endian systems for space, use the load/store
642 multiple and string instructions. */
643 if (BYTES_BIG_ENDIAN && optimize_size)
644 target_flags |= MASK_MULTIPLE | MASK_STRING;
646 /* If -mmultiple or -mno-multiple was explicitly used, don't
647 override with the processor default */
648 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
649 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
651 /* If -mstring or -mno-string was explicitly used, don't override
652 with the processor default. */
653 if ((target_flags_explicit & MASK_STRING) != 0)
654 target_flags = (target_flags & ~MASK_STRING) | string;
656 /* Don't allow -mmultiple or -mstring on little endian systems
657 unless the cpu is a 750, because the hardware doesn't support the
658 instructions used in little endian mode, and causes an alignment
659 trap. The 750 does not cause an alignment trap (except when the
660 target is unaligned). */
662 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
666 target_flags &= ~MASK_MULTIPLE;
667 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
668 warning ("-mmultiple is not supported on little endian systems");
673 target_flags &= ~MASK_STRING;
674 if ((target_flags_explicit & MASK_STRING) != 0)
675 warning ("-mstring is not supported on little endian systems");
679 /* Set debug flags */
680 if (rs6000_debug_name)
682 if (! strcmp (rs6000_debug_name, "all"))
683 rs6000_debug_stack = rs6000_debug_arg = 1;
684 else if (! strcmp (rs6000_debug_name, "stack"))
685 rs6000_debug_stack = 1;
686 else if (! strcmp (rs6000_debug_name, "arg"))
687 rs6000_debug_arg = 1;
689 error ("unknown -mdebug-%s switch", rs6000_debug_name);
692 if (rs6000_traceback_name)
694 if (! strncmp (rs6000_traceback_name, "full", 4))
695 rs6000_traceback = traceback_full;
696 else if (! strncmp (rs6000_traceback_name, "part", 4))
697 rs6000_traceback = traceback_part;
698 else if (! strncmp (rs6000_traceback_name, "no", 2))
699 rs6000_traceback = traceback_none;
701 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
702 rs6000_traceback_name);
705 /* Set size of long double */
706 rs6000_long_double_type_size = 64;
707 if (rs6000_long_double_size_string)
710 int size = strtol (rs6000_long_double_size_string, &tail, 10);
711 if (*tail != '\0' || (size != 64 && size != 128))
712 error ("Unknown switch -mlong-double-%s",
713 rs6000_long_double_size_string);
715 rs6000_long_double_type_size = size;
718 /* Handle -mabi= options. */
719 rs6000_parse_abi_options ();
721 /* Handle generic -mFOO=YES/NO options. */
722 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
723 &rs6000_altivec_vrsave);
724 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
726 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
727 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
730 #ifdef SUBTARGET_OVERRIDE_OPTIONS
731 SUBTARGET_OVERRIDE_OPTIONS;
733 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
734 SUBSUBTARGET_OVERRIDE_OPTIONS;
739 /* The e500 does not have string instructions, and we set
740 MASK_STRING above when optimizing for size. */
741 if ((target_flags & MASK_STRING) != 0)
742 target_flags = target_flags & ~MASK_STRING;
744 /* No SPE means 64-bit long doubles, even if an E500. */
745 if (rs6000_spe_string != 0
746 && !strcmp (rs6000_spe_string, "no"))
747 rs6000_long_double_type_size = 64;
749 else if (rs6000_select[1].string != NULL)
751 /* For the powerpc-eabispe configuration, we set all these by
752 default, so let's unset them if we manually set another
753 CPU that is not the E500. */
754 if (rs6000_abi_string == 0)
756 if (rs6000_spe_string == 0)
758 if (rs6000_float_gprs_string == 0)
759 rs6000_float_gprs = 0;
760 if (rs6000_isel_string == 0)
762 if (rs6000_long_double_size_string == 0)
763 rs6000_long_double_type_size = 64;
766 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
767 using TARGET_OPTIONS to handle a toggle switch, but we're out of
768 bits in target_flags so TARGET_SWITCHES cannot be used.
769 Assumption here is that rs6000_longcall_switch points into the
770 text of the complete option, rather than being a copy, so we can
771 scan back for the presence or absence of the no- modifier. */
772 if (rs6000_longcall_switch)
774 const char *base = rs6000_longcall_switch;
775 while (base[-1] != 'm') base--;
777 if (*rs6000_longcall_switch != '\0')
778 error ("invalid option `%s'", base);
779 rs6000_default_long_calls = (base[0] != 'n');
782 #ifdef TARGET_REGNAMES
783 /* If the user desires alternate register names, copy in the
784 alternate names now. */
786 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
789 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
790 If -maix-struct-return or -msvr4-struct-return was explicitly
791 used, don't override with the ABI default. */
792 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
794 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
795 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
797 target_flags |= MASK_AIX_STRUCT_RET;
800 if (TARGET_LONG_DOUBLE_128
801 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
802 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
804 /* Allocate an alias set for register saves & restores from stack. */
805 rs6000_sr_alias_set = new_alias_set ();
808 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
810 /* We can only guarantee the availability of DI pseudo-ops when
811 assembling for 64-bit targets. */
814 targetm.asm_out.aligned_op.di = NULL;
815 targetm.asm_out.unaligned_op.di = NULL;
818 /* Set maximum branch target alignment at two instructions, eight bytes. */
819 align_jumps_max_skip = 8;
820 align_loops_max_skip = 8;
822 /* Arrange to save and restore machine status around nested functions. */
823 init_machine_status = rs6000_init_machine_status;
826 /* Handle generic options of the form -mfoo=yes/no.
827 NAME is the option name.
828 VALUE is the option value.
829 FLAG is the pointer to the flag where to store a 1 or 0, depending on
830 whether the option value is 'yes' or 'no' respectively. */
832 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
836 else if (!strcmp (value, "yes"))
838 else if (!strcmp (value, "no"))
841 error ("unknown -m%s= option specified: '%s'", name, value);
844 /* Handle -mabi= options. */
846 rs6000_parse_abi_options ()
848 if (rs6000_abi_string == 0)
850 else if (! strcmp (rs6000_abi_string, "altivec"))
851 rs6000_altivec_abi = 1;
852 else if (! strcmp (rs6000_abi_string, "no-altivec"))
853 rs6000_altivec_abi = 0;
854 else if (! strcmp (rs6000_abi_string, "spe"))
858 error ("not configured for ABI: '%s'", rs6000_abi_string);
861 else if (! strcmp (rs6000_abi_string, "no-spe"))
864 error ("unknown ABI specified: '%s'", rs6000_abi_string);
868 optimization_options (level, size)
869 int level ATTRIBUTE_UNUSED;
870 int size ATTRIBUTE_UNUSED;
874 /* Do anything needed at the start of the asm file. */
877 rs6000_file_start (file, default_cpu)
879 const char *default_cpu;
883 const char *start = buffer;
884 struct rs6000_cpu_select *ptr;
886 if (flag_verbose_asm)
888 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
889 rs6000_select[0].string = default_cpu;
891 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
893 ptr = &rs6000_select[i];
894 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
896 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
902 switch (rs6000_sdata)
904 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
905 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
906 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
907 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
910 if (rs6000_sdata && g_switch_value)
912 fprintf (file, "%s -G %d", start, g_switch_value);
922 /* Return nonzero if this function is known to have a null epilogue. */
927 if (reload_completed)
929 rs6000_stack_t *info = rs6000_stack_info ();
931 if (info->first_gp_reg_save == 32
932 && info->first_fp_reg_save == 64
933 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
936 && info->vrsave_mask == 0
944 /* Returns 1 always. */
947 any_operand (op, mode)
948 rtx op ATTRIBUTE_UNUSED;
949 enum machine_mode mode ATTRIBUTE_UNUSED;
954 /* Returns 1 if op is the count register. */
956 count_register_operand (op, mode)
958 enum machine_mode mode ATTRIBUTE_UNUSED;
960 if (GET_CODE (op) != REG)
963 if (REGNO (op) == COUNT_REGISTER_REGNUM)
966 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
972 /* Returns 1 if op is an altivec register. */
974 altivec_register_operand (op, mode)
976 enum machine_mode mode ATTRIBUTE_UNUSED;
979 return (register_operand (op, mode)
980 && (GET_CODE (op) != REG
981 || REGNO (op) > FIRST_PSEUDO_REGISTER
982 || ALTIVEC_REGNO_P (REGNO (op))));
986 xer_operand (op, mode)
988 enum machine_mode mode ATTRIBUTE_UNUSED;
990 if (GET_CODE (op) != REG)
993 if (XER_REGNO_P (REGNO (op)))
999 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1000 by such constants completes more quickly. */
1003 s8bit_cint_operand (op, mode)
1005 enum machine_mode mode ATTRIBUTE_UNUSED;
1007 return ( GET_CODE (op) == CONST_INT
1008 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1011 /* Return 1 if OP is a constant that can fit in a D field. */
1014 short_cint_operand (op, mode)
1016 enum machine_mode mode ATTRIBUTE_UNUSED;
1018 return (GET_CODE (op) == CONST_INT
1019 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1022 /* Similar for an unsigned D field. */
1025 u_short_cint_operand (op, mode)
1027 enum machine_mode mode ATTRIBUTE_UNUSED;
1029 return (GET_CODE (op) == CONST_INT
1030 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1033 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1036 non_short_cint_operand (op, mode)
1038 enum machine_mode mode ATTRIBUTE_UNUSED;
1040 return (GET_CODE (op) == CONST_INT
1041 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1044 /* Returns 1 if OP is a CONST_INT that is a positive value
1045 and an exact power of 2. */
1048 exact_log2_cint_operand (op, mode)
1050 enum machine_mode mode ATTRIBUTE_UNUSED;
1052 return (GET_CODE (op) == CONST_INT
1054 && exact_log2 (INTVAL (op)) >= 0);
1057 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1061 gpc_reg_operand (op, mode)
1063 enum machine_mode mode;
1065 return (register_operand (op, mode)
1066 && (GET_CODE (op) != REG
1067 || (REGNO (op) >= ARG_POINTER_REGNUM
1068 && !XER_REGNO_P (REGNO (op)))
1069 || REGNO (op) < MQ_REGNO));
1072 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1076 cc_reg_operand (op, mode)
1078 enum machine_mode mode;
1080 return (register_operand (op, mode)
1081 && (GET_CODE (op) != REG
1082 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1083 || CR_REGNO_P (REGNO (op))));
1086 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1087 CR field that isn't CR0. */
1090 cc_reg_not_cr0_operand (op, mode)
1092 enum machine_mode mode;
1094 return (register_operand (op, mode)
1095 && (GET_CODE (op) != REG
1096 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1097 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1100 /* Returns 1 if OP is either a constant integer valid for a D-field or
1101 a non-special register. If a register, it must be in the proper
1102 mode unless MODE is VOIDmode. */
1105 reg_or_short_operand (op, mode)
1107 enum machine_mode mode;
1109 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1112 /* Similar, except check if the negation of the constant would be
1113 valid for a D-field. */
1116 reg_or_neg_short_operand (op, mode)
1118 enum machine_mode mode;
1120 if (GET_CODE (op) == CONST_INT)
1121 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1123 return gpc_reg_operand (op, mode);
1126 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1127 a non-special register. If a register, it must be in the proper
1128 mode unless MODE is VOIDmode. */
1131 reg_or_aligned_short_operand (op, mode)
1133 enum machine_mode mode;
1135 if (gpc_reg_operand (op, mode))
1137 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1144 /* Return 1 if the operand is either a register or an integer whose
1145 high-order 16 bits are zero. */
1148 reg_or_u_short_operand (op, mode)
1150 enum machine_mode mode;
1152 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1155 /* Return 1 is the operand is either a non-special register or ANY
1156 constant integer. */
1159 reg_or_cint_operand (op, mode)
1161 enum machine_mode mode;
1163 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1166 /* Return 1 is the operand is either a non-special register or ANY
1167 32-bit signed constant integer. */
1170 reg_or_arith_cint_operand (op, mode)
1172 enum machine_mode mode;
1174 return (gpc_reg_operand (op, mode)
1175 || (GET_CODE (op) == CONST_INT
1176 #if HOST_BITS_PER_WIDE_INT != 32
1177 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1178 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1183 /* Return 1 is the operand is either a non-special register or a 32-bit
1184 signed constant integer valid for 64-bit addition. */
1187 reg_or_add_cint64_operand (op, mode)
1189 enum machine_mode mode;
1191 return (gpc_reg_operand (op, mode)
1192 || (GET_CODE (op) == CONST_INT
1193 #if HOST_BITS_PER_WIDE_INT == 32
1194 && INTVAL (op) < 0x7fff8000
1196 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1202 /* Return 1 is the operand is either a non-special register or a 32-bit
1203 signed constant integer valid for 64-bit subtraction. */
1206 reg_or_sub_cint64_operand (op, mode)
1208 enum machine_mode mode;
1210 return (gpc_reg_operand (op, mode)
1211 || (GET_CODE (op) == CONST_INT
1212 #if HOST_BITS_PER_WIDE_INT == 32
1213 && (- INTVAL (op)) < 0x7fff8000
1215 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1221 /* Return 1 is the operand is either a non-special register or ANY
1222 32-bit unsigned constant integer. */
1225 reg_or_logical_cint_operand (op, mode)
1227 enum machine_mode mode;
1229 if (GET_CODE (op) == CONST_INT)
1231 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1233 if (GET_MODE_BITSIZE (mode) <= 32)
1236 if (INTVAL (op) < 0)
1240 return ((INTVAL (op) & GET_MODE_MASK (mode)
1241 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1243 else if (GET_CODE (op) == CONST_DOUBLE)
1245 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1249 return CONST_DOUBLE_HIGH (op) == 0;
1252 return gpc_reg_operand (op, mode);
1255 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1258 got_operand (op, mode)
1260 enum machine_mode mode ATTRIBUTE_UNUSED;
1262 return (GET_CODE (op) == SYMBOL_REF
1263 || GET_CODE (op) == CONST
1264 || GET_CODE (op) == LABEL_REF);
1267 /* Return 1 if the operand is a simple references that can be loaded via
1268 the GOT (labels involving addition aren't allowed). */
1271 got_no_const_operand (op, mode)
1273 enum machine_mode mode ATTRIBUTE_UNUSED;
1275 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1278 /* Return the number of instructions it takes to form a constant in an
1279 integer register. */
1282 num_insns_constant_wide (value)
1283 HOST_WIDE_INT value;
1285 /* signed constant loadable with {cal|addi} */
1286 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1289 /* constant loadable with {cau|addis} */
1290 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1293 #if HOST_BITS_PER_WIDE_INT == 64
1294 else if (TARGET_POWERPC64)
1296 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1297 HOST_WIDE_INT high = value >> 31;
1299 if (high == 0 || high == -1)
1305 return num_insns_constant_wide (high) + 1;
1307 return (num_insns_constant_wide (high)
1308 + num_insns_constant_wide (low) + 1);
1317 num_insns_constant (op, mode)
1319 enum machine_mode mode;
1321 if (GET_CODE (op) == CONST_INT)
1323 #if HOST_BITS_PER_WIDE_INT == 64
1324 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1325 && mask64_operand (op, mode))
1329 return num_insns_constant_wide (INTVAL (op));
1332 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1337 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1338 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1339 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1342 else if (GET_CODE (op) == CONST_DOUBLE)
1348 int endian = (WORDS_BIG_ENDIAN == 0);
1350 if (mode == VOIDmode || mode == DImode)
1352 high = CONST_DOUBLE_HIGH (op);
1353 low = CONST_DOUBLE_LOW (op);
1357 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1358 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1360 low = l[1 - endian];
1364 return (num_insns_constant_wide (low)
1365 + num_insns_constant_wide (high));
1369 if (high == 0 && low >= 0)
1370 return num_insns_constant_wide (low);
1372 else if (high == -1 && low < 0)
1373 return num_insns_constant_wide (low);
1375 else if (mask64_operand (op, mode))
1379 return num_insns_constant_wide (high) + 1;
1382 return (num_insns_constant_wide (high)
1383 + num_insns_constant_wide (low) + 1);
1391 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1392 register with one instruction per word. We only do this if we can
1393 safely read CONST_DOUBLE_{LOW,HIGH}. */
1396 easy_fp_constant (op, mode)
1398 enum machine_mode mode;
1400 if (GET_CODE (op) != CONST_DOUBLE
1401 || GET_MODE (op) != mode
1402 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1405 /* Consider all constants with -msoft-float to be easy. */
1406 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1410 /* If we are using V.4 style PIC, consider all constants to be hard. */
1411 if (flag_pic && DEFAULT_ABI == ABI_V4)
1414 #ifdef TARGET_RELOCATABLE
1415 /* Similarly if we are using -mrelocatable, consider all constants
1417 if (TARGET_RELOCATABLE)
1426 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1427 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1429 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1430 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1431 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1432 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1435 else if (mode == DFmode)
1440 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1441 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1443 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1444 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1447 else if (mode == SFmode)
1452 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1453 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1455 return num_insns_constant_wide (l) == 1;
1458 else if (mode == DImode)
1459 return ((TARGET_POWERPC64
1460 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1461 || (num_insns_constant (op, DImode) <= 2));
1463 else if (mode == SImode)
1469 /* Return non zero if all elements of a vector have the same value. */
1472 easy_vector_same (op, mode)
1474 enum machine_mode mode ATTRIBUTE_UNUSED;
1478 units = CONST_VECTOR_NUNITS (op);
1480 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1481 for (i = 1; i < units; ++i)
1482 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1489 /* Return 1 if the operand is a CONST_INT and can be put into a
1490 register without using memory. */
1493 easy_vector_constant (op, mode)
1495 enum machine_mode mode;
1499 if (GET_CODE (op) != CONST_VECTOR
1504 if (zero_constant (op, mode)
1505 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1506 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1509 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1512 if (TARGET_SPE && mode == V1DImode)
1515 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1516 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1518 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1520 evmergelo r0, r0, r0
1523 I don't know how efficient it would be to allow bigger constants,
1524 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1525 instructions is better than a 64-bit memory load, but I don't
1526 have the e500 timing specs. */
1527 if (TARGET_SPE && mode == V2SImode
1528 && cst >= -0x7fff && cst <= 0x7fff
1529 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1532 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1535 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1541 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1544 easy_vector_constant_add_self (op, mode)
1546 enum machine_mode mode;
1550 if (!easy_vector_constant (op, mode))
1553 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1555 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1559 output_vec_const_move (operands)
1563 enum machine_mode mode;
1569 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1570 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1571 mode = GET_MODE (dest);
1575 if (zero_constant (vec, mode))
1576 return "vxor %0,%0,%0";
1577 else if (EASY_VECTOR_15 (cst, vec, mode))
1579 operands[1] = GEN_INT (cst);
1583 return "vspltisw %0,%1";
1585 return "vspltish %0,%1";
1587 return "vspltisb %0,%1";
1592 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1600 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1601 pattern of V1DI, V4HI, and V2SF.
1603 FIXME: We should probabl return # and add post reload
1604 splitters for these, but this way is so easy ;-).
1606 operands[1] = GEN_INT (cst);
1607 operands[2] = GEN_INT (cst2);
1609 return "li %0,%1\n\tevmergelo %0,%0,%0";
1611 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1617 /* Return 1 if the operand is the constant 0. This works for scalars
1618 as well as vectors. */
1620 zero_constant (op, mode)
1622 enum machine_mode mode;
1624 return op == CONST0_RTX (mode);
1627 /* Return 1 if the operand is 0.0. */
1629 zero_fp_constant (op, mode)
1631 enum machine_mode mode;
1633 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1636 /* Return 1 if the operand is in volatile memory. Note that during
1637 the RTL generation phase, memory_operand does not return TRUE for
1638 volatile memory references. So this function allows us to
1639 recognize volatile references where its safe. */
1642 volatile_mem_operand (op, mode)
1644 enum machine_mode mode;
1646 if (GET_CODE (op) != MEM)
1649 if (!MEM_VOLATILE_P (op))
1652 if (mode != GET_MODE (op))
1655 if (reload_completed)
1656 return memory_operand (op, mode);
1658 if (reload_in_progress)
1659 return strict_memory_address_p (mode, XEXP (op, 0));
1661 return memory_address_p (mode, XEXP (op, 0));
1664 /* Return 1 if the operand is an offsettable memory operand. */
1667 offsettable_mem_operand (op, mode)
1669 enum machine_mode mode;
1671 return ((GET_CODE (op) == MEM)
1672 && offsettable_address_p (reload_completed || reload_in_progress,
1673 mode, XEXP (op, 0)));
1676 /* Return 1 if the operand is either an easy FP constant (see above) or
1680 mem_or_easy_const_operand (op, mode)
1682 enum machine_mode mode;
1684 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1687 /* Return 1 if the operand is either a non-special register or an item
1688 that can be used as the operand of a `mode' add insn. */
1691 add_operand (op, mode)
1693 enum machine_mode mode;
1695 if (GET_CODE (op) == CONST_INT)
1696 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1697 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1699 return gpc_reg_operand (op, mode);
1702 /* Return 1 if OP is a constant but not a valid add_operand. */
1705 non_add_cint_operand (op, mode)
1707 enum machine_mode mode ATTRIBUTE_UNUSED;
1709 return (GET_CODE (op) == CONST_INT
1710 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1711 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1714 /* Return 1 if the operand is a non-special register or a constant that
1715 can be used as the operand of an OR or XOR insn on the RS/6000. */
1718 logical_operand (op, mode)
1720 enum machine_mode mode;
1722 HOST_WIDE_INT opl, oph;
1724 if (gpc_reg_operand (op, mode))
1727 if (GET_CODE (op) == CONST_INT)
1729 opl = INTVAL (op) & GET_MODE_MASK (mode);
1731 #if HOST_BITS_PER_WIDE_INT <= 32
1732 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1736 else if (GET_CODE (op) == CONST_DOUBLE)
1738 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1741 opl = CONST_DOUBLE_LOW (op);
1742 oph = CONST_DOUBLE_HIGH (op);
1749 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1750 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1753 /* Return 1 if C is a constant that is not a logical operand (as
1754 above), but could be split into one. */
1757 non_logical_cint_operand (op, mode)
1759 enum machine_mode mode;
1761 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1762 && ! logical_operand (op, mode)
1763 && reg_or_logical_cint_operand (op, mode));
1766 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1767 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1768 Reject all ones and all zeros, since these should have been optimized
1769 away and confuse the making of MB and ME. */
1772 mask_operand (op, mode)
1774 enum machine_mode mode ATTRIBUTE_UNUSED;
1776 HOST_WIDE_INT c, lsb;
1778 if (GET_CODE (op) != CONST_INT)
1783 /* Fail in 64-bit mode if the mask wraps around because the upper
1784 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1785 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1788 /* We don't change the number of transitions by inverting,
1789 so make sure we start with the LS bit zero. */
1793 /* Reject all zeros or all ones. */
1797 /* Find the first transition. */
1800 /* Invert to look for a second transition. */
1803 /* Erase first transition. */
1806 /* Find the second transition (if any). */
1809 /* Match if all the bits above are 1's (or c is zero). */
1813 /* Return 1 for the PowerPC64 rlwinm corner case. */
1816 mask_operand_wrap (op, mode)
1818 enum machine_mode mode ATTRIBUTE_UNUSED;
1820 HOST_WIDE_INT c, lsb;
1822 if (GET_CODE (op) != CONST_INT)
1827 if ((c & 0x80000001) != 0x80000001)
1841 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1842 It is if there are no more than one 1->0 or 0->1 transitions.
1843 Reject all zeros, since zero should have been optimized away and
1844 confuses the making of MB and ME. */
1847 mask64_operand (op, mode)
1849 enum machine_mode mode ATTRIBUTE_UNUSED;
1851 if (GET_CODE (op) == CONST_INT)
1853 HOST_WIDE_INT c, lsb;
1857 /* Reject all zeros. */
1861 /* We don't change the number of transitions by inverting,
1862 so make sure we start with the LS bit zero. */
1866 /* Find the transition, and check that all bits above are 1's. */
1869 /* Match if all the bits above are 1's (or c is zero). */
1875 /* Like mask64_operand, but allow up to three transitions. This
1876 predicate is used by insn patterns that generate two rldicl or
1877 rldicr machine insns. */
1880 mask64_2_operand (op, mode)
1882 enum machine_mode mode ATTRIBUTE_UNUSED;
1884 if (GET_CODE (op) == CONST_INT)
1886 HOST_WIDE_INT c, lsb;
1890 /* Disallow all zeros. */
1894 /* We don't change the number of transitions by inverting,
1895 so make sure we start with the LS bit zero. */
1899 /* Find the first transition. */
1902 /* Invert to look for a second transition. */
1905 /* Erase first transition. */
1908 /* Find the second transition. */
1911 /* Invert to look for a third transition. */
1914 /* Erase second transition. */
1917 /* Find the third transition (if any). */
1920 /* Match if all the bits above are 1's (or c is zero). */
1926 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1927 implement ANDing by the mask IN. */
1929 build_mask64_2_operands (in, out)
1933 #if HOST_BITS_PER_WIDE_INT >= 64
1934 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1937 if (GET_CODE (in) != CONST_INT)
1943 /* Assume c initially something like 0x00fff000000fffff. The idea
1944 is to rotate the word so that the middle ^^^^^^ group of zeros
1945 is at the MS end and can be cleared with an rldicl mask. We then
1946 rotate back and clear off the MS ^^ group of zeros with a
1948 c = ~c; /* c == 0xff000ffffff00000 */
1949 lsb = c & -c; /* lsb == 0x0000000000100000 */
1950 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1951 c = ~c; /* c == 0x00fff000000fffff */
1952 c &= -lsb; /* c == 0x00fff00000000000 */
1953 lsb = c & -c; /* lsb == 0x0000100000000000 */
1954 c = ~c; /* c == 0xff000fffffffffff */
1955 c &= -lsb; /* c == 0xff00000000000000 */
1957 while ((lsb >>= 1) != 0)
1958 shift++; /* shift == 44 on exit from loop */
1959 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1960 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1961 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1965 /* Assume c initially something like 0xff000f0000000000. The idea
1966 is to rotate the word so that the ^^^ middle group of zeros
1967 is at the LS end and can be cleared with an rldicr mask. We then
1968 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1970 lsb = c & -c; /* lsb == 0x0000010000000000 */
1971 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1972 c = ~c; /* c == 0x00fff0ffffffffff */
1973 c &= -lsb; /* c == 0x00fff00000000000 */
1974 lsb = c & -c; /* lsb == 0x0000100000000000 */
1975 c = ~c; /* c == 0xff000fffffffffff */
1976 c &= -lsb; /* c == 0xff00000000000000 */
1978 while ((lsb >>= 1) != 0)
1979 shift++; /* shift == 44 on exit from loop */
1980 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1981 m1 >>= shift; /* m1 == 0x0000000000000fff */
1982 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1985 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1986 masks will be all 1's. We are guaranteed more than one transition. */
1987 out[0] = GEN_INT (64 - shift);
1988 out[1] = GEN_INT (m1);
1989 out[2] = GEN_INT (shift);
1990 out[3] = GEN_INT (m2);
1998 /* Return 1 if the operand is either a non-special register or a constant
1999 that can be used as the operand of a PowerPC64 logical AND insn. */
2002 and64_operand (op, mode)
2004 enum machine_mode mode;
2006 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2007 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2009 return (logical_operand (op, mode) || mask64_operand (op, mode));
2012 /* Like the above, but also match constants that can be implemented
2013 with two rldicl or rldicr insns. */
2016 and64_2_operand (op, mode)
2018 enum machine_mode mode;
2020 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2021 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2023 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2026 /* Return 1 if the operand is either a non-special register or a
2027 constant that can be used as the operand of an RS/6000 logical AND insn. */
2030 and_operand (op, mode)
2032 enum machine_mode mode;
2034 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2035 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2037 return (logical_operand (op, mode) || mask_operand (op, mode));
2040 /* Return 1 if the operand is a general register or memory operand. */
2043 reg_or_mem_operand (op, mode)
2045 enum machine_mode mode;
2047 return (gpc_reg_operand (op, mode)
2048 || memory_operand (op, mode)
2049 || volatile_mem_operand (op, mode));
2052 /* Return 1 if the operand is a general register or memory operand without
2053 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2057 lwa_operand (op, mode)
2059 enum machine_mode mode;
2063 if (reload_completed && GET_CODE (inner) == SUBREG)
2064 inner = SUBREG_REG (inner);
2066 return gpc_reg_operand (inner, mode)
2067 || (memory_operand (inner, mode)
2068 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2069 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2070 && (GET_CODE (XEXP (inner, 0)) != PLUS
2071 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2072 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2075 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2078 symbol_ref_operand (op, mode)
2080 enum machine_mode mode;
2082 if (mode != VOIDmode && GET_MODE (op) != mode)
2085 return (GET_CODE (op) == SYMBOL_REF
2086 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2089 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2090 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2093 call_operand (op, mode)
2095 enum machine_mode mode;
2097 if (mode != VOIDmode && GET_MODE (op) != mode)
2100 return (GET_CODE (op) == SYMBOL_REF
2101 || (GET_CODE (op) == REG
2102 && (REGNO (op) == LINK_REGISTER_REGNUM
2103 || REGNO (op) == COUNT_REGISTER_REGNUM
2104 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2107 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2111 current_file_function_operand (op, mode)
2113 enum machine_mode mode ATTRIBUTE_UNUSED;
2115 return (GET_CODE (op) == SYMBOL_REF
2116 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2117 && (SYMBOL_REF_LOCAL_P (op)
2118 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2121 /* Return 1 if this operand is a valid input for a move insn. */
2124 input_operand (op, mode)
2126 enum machine_mode mode;
2128 /* Memory is always valid. */
2129 if (memory_operand (op, mode))
2132 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2133 if (GET_CODE (op) == CONSTANT_P_RTX)
2136 /* For floating-point, easy constants are valid. */
2137 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2139 && easy_fp_constant (op, mode))
2142 /* Allow any integer constant. */
2143 if (GET_MODE_CLASS (mode) == MODE_INT
2144 && (GET_CODE (op) == CONST_INT
2145 || GET_CODE (op) == CONST_DOUBLE))
2148 /* Allow easy vector constants. */
2149 if (GET_CODE (op) == CONST_VECTOR
2150 && easy_vector_constant (op, mode))
2153 /* For floating-point or multi-word mode, the only remaining valid type
2155 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2156 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2157 return register_operand (op, mode);
2159 /* The only cases left are integral modes one word or smaller (we
2160 do not get called for MODE_CC values). These can be in any
2162 if (register_operand (op, mode))
2165 /* A SYMBOL_REF referring to the TOC is valid. */
2166 if (legitimate_constant_pool_address_p (op))
2169 /* A constant pool expression (relative to the TOC) is valid */
2170 if (toc_relative_expr_p (op))
2173 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2175 if (DEFAULT_ABI == ABI_V4
2176 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2177 && small_data_operand (op, Pmode))
2183 /* Return 1 for an operand in small memory on V.4/eabi. */
2186 small_data_operand (op, mode)
2187 rtx op ATTRIBUTE_UNUSED;
2188 enum machine_mode mode ATTRIBUTE_UNUSED;
2193 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2196 if (DEFAULT_ABI != ABI_V4)
2199 if (GET_CODE (op) == SYMBOL_REF)
2202 else if (GET_CODE (op) != CONST
2203 || GET_CODE (XEXP (op, 0)) != PLUS
2204 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2205 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2210 rtx sum = XEXP (op, 0);
2211 HOST_WIDE_INT summand;
2213 /* We have to be careful here, because it is the referenced address
2214 that must be 32k from _SDA_BASE_, not just the symbol. */
2215 summand = INTVAL (XEXP (sum, 1));
2216 if (summand < 0 || summand > g_switch_value)
2219 sym_ref = XEXP (sum, 0);
2222 return SYMBOL_REF_SMALL_P (sym_ref);
2228 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2231 constant_pool_expr_1 (op, have_sym, have_toc)
2236 switch (GET_CODE(op))
2239 if (CONSTANT_POOL_ADDRESS_P (op))
2241 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2249 else if (! strcmp (XSTR (op, 0), toc_label_name))
2258 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2259 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2261 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2270 constant_pool_expr_p (op)
2275 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2279 toc_relative_expr_p (op)
2284 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2287 /* SPE offset addressing is limited to 5-bits worth of double words. */
2288 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2291 legitimate_constant_pool_address_p (x)
2295 && GET_CODE (x) == PLUS
2296 && GET_CODE (XEXP (x, 0)) == REG
2297 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2298 && constant_pool_expr_p (XEXP (x, 1)));
2302 legitimate_small_data_p (mode, x)
2303 enum machine_mode mode;
2306 return (DEFAULT_ABI == ABI_V4
2307 && !flag_pic && !TARGET_TOC
2308 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2309 && small_data_operand (x, mode));
2313 legitimate_offset_address_p (mode, x, strict)
2314 enum machine_mode mode;
2318 unsigned HOST_WIDE_INT offset, extra;
2320 if (GET_CODE (x) != PLUS)
2322 if (GET_CODE (XEXP (x, 0)) != REG)
2324 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2326 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2329 offset = INTVAL (XEXP (x, 1));
2337 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2338 which leaves the only valid constant offset of zero, which by
2339 canonicalization rules is also invalid. */
2346 /* SPE vector modes. */
2347 return SPE_CONST_OFFSET_OK (offset);
2353 else if (offset & 3)
2361 else if (offset & 3)
2371 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2375 legitimate_indexed_address_p (x, strict)
2381 if (GET_CODE (x) != PLUS)
2386 if (!REG_P (op0) || !REG_P (op1))
2389 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2390 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2391 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2392 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2396 legitimate_indirect_address_p (x, strict)
2400 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2404 legitimate_lo_sum_address_p (mode, x, strict)
2405 enum machine_mode mode;
2409 if (GET_CODE (x) != LO_SUM)
2411 if (GET_CODE (XEXP (x, 0)) != REG)
2413 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2419 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2423 if (GET_MODE_NUNITS (mode) != 1)
2425 if (GET_MODE_BITSIZE (mode) > 32
2426 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2429 return CONSTANT_P (x);
2436 /* Try machine-dependent ways of modifying an illegitimate address
2437 to be legitimate. If we find one, return the new, valid address.
2438 This is used from only one place: `memory_address' in explow.c.
2440 OLDX is the address as it was before break_out_memory_refs was
2441 called. In some cases it is useful to look at this to decide what
2444 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2446 It is always safe for this function to do nothing. It exists to
2447 recognize opportunities to optimize the output.
2449 On RS/6000, first check for the sum of a register with a constant
2450 integer that is out of range. If so, generate code to add the
2451 constant with the low-order 16 bits masked to the register and force
2452 this result into another register (this can be done with `cau').
2453 Then generate an address of REG+(CONST&0xffff), allowing for the
2454 possibility of bit 16 being a one.
2456 Then check for the sum of a register and something not constant, try to
2457 load the other things into a register and return the sum. */
2460 rs6000_legitimize_address (x, oldx, mode)
2462 rtx oldx ATTRIBUTE_UNUSED;
2463 enum machine_mode mode;
2465 if (GET_CODE (x) == PLUS
2466 && GET_CODE (XEXP (x, 0)) == REG
2467 && GET_CODE (XEXP (x, 1)) == CONST_INT
2468 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2470 HOST_WIDE_INT high_int, low_int;
2472 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2473 high_int = INTVAL (XEXP (x, 1)) - low_int;
2474 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2475 GEN_INT (high_int)), 0);
2476 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2478 else if (GET_CODE (x) == PLUS
2479 && GET_CODE (XEXP (x, 0)) == REG
2480 && GET_CODE (XEXP (x, 1)) != CONST_INT
2481 && GET_MODE_NUNITS (mode) == 1
2482 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2484 || (mode != DFmode && mode != TFmode))
2485 && (TARGET_POWERPC64 || mode != DImode)
2488 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2489 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2491 else if (ALTIVEC_VECTOR_MODE (mode))
2495 /* Make sure both operands are registers. */
2496 if (GET_CODE (x) == PLUS)
2497 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2498 force_reg (Pmode, XEXP (x, 1)));
2500 reg = force_reg (Pmode, x);
2503 else if (SPE_VECTOR_MODE (mode))
2505 /* We accept [reg + reg] and [reg + OFFSET]. */
2507 if (GET_CODE (x) == PLUS)
2509 rtx op1 = XEXP (x, 0);
2510 rtx op2 = XEXP (x, 1);
2512 op1 = force_reg (Pmode, op1);
2514 if (GET_CODE (op2) != REG
2515 && (GET_CODE (op2) != CONST_INT
2516 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2517 op2 = force_reg (Pmode, op2);
2519 return gen_rtx_PLUS (Pmode, op1, op2);
2522 return force_reg (Pmode, x);
2528 && GET_CODE (x) != CONST_INT
2529 && GET_CODE (x) != CONST_DOUBLE
2531 && GET_MODE_NUNITS (mode) == 1
2532 && (GET_MODE_BITSIZE (mode) <= 32
2533 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2535 rtx reg = gen_reg_rtx (Pmode);
2536 emit_insn (gen_elf_high (reg, (x)));
2537 return gen_rtx_LO_SUM (Pmode, reg, (x));
2539 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2542 && ! MACHO_DYNAMIC_NO_PIC_P
2544 && GET_CODE (x) != CONST_INT
2545 && GET_CODE (x) != CONST_DOUBLE
2547 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2551 rtx reg = gen_reg_rtx (Pmode);
2552 emit_insn (gen_macho_high (reg, (x)));
2553 return gen_rtx_LO_SUM (Pmode, reg, (x));
2556 && constant_pool_expr_p (x)
2557 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2559 return create_TOC_reference (x);
2565 /* The convention appears to be to define this wherever it is used.
2566 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2567 is now used here. */
2568 #ifndef REG_MODE_OK_FOR_BASE_P
2569 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2572 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2573 replace the input X, or the original X if no replacement is called for.
2574 The output parameter *WIN is 1 if the calling macro should goto WIN,
2577 For RS/6000, we wish to handle large displacements off a base
2578 register by splitting the addend across an addiu/addis and the mem insn.
2579 This cuts number of extra insns needed from 3 to 1.
2581 On Darwin, we use this to generate code for floating point constants.
2582 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2583 The Darwin code is inside #if TARGET_MACHO because only then is
2584 machopic_function_base_name() defined. */
2586 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2588 enum machine_mode mode;
2591 int ind_levels ATTRIBUTE_UNUSED;
2594 /* We must recognize output that we have already generated ourselves. */
2595 if (GET_CODE (x) == PLUS
2596 && GET_CODE (XEXP (x, 0)) == PLUS
2597 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2598 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2599 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2601 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2602 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2603 opnum, (enum reload_type)type);
2609 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2610 && GET_CODE (x) == LO_SUM
2611 && GET_CODE (XEXP (x, 0)) == PLUS
2612 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2613 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2614 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2615 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2616 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2617 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2618 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2620 /* Result of previous invocation of this function on Darwin
2621 floating point constant. */
2622 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2623 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2624 opnum, (enum reload_type)type);
2629 if (GET_CODE (x) == PLUS
2630 && GET_CODE (XEXP (x, 0)) == REG
2631 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2632 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2633 && GET_CODE (XEXP (x, 1)) == CONST_INT
2634 && !SPE_VECTOR_MODE (mode)
2635 && !ALTIVEC_VECTOR_MODE (mode))
2637 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2638 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2640 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2642 /* Check for 32-bit overflow. */
2643 if (high + low != val)
2649 /* Reload the high part into a base reg; leave the low part
2650 in the mem directly. */
2652 x = gen_rtx_PLUS (GET_MODE (x),
2653 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2657 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2658 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2659 opnum, (enum reload_type)type);
2664 if (GET_CODE (x) == SYMBOL_REF
2665 && DEFAULT_ABI == ABI_DARWIN
2666 && !ALTIVEC_VECTOR_MODE (mode)
2669 /* Darwin load of floating point constant. */
2670 rtx offset = gen_rtx (CONST, Pmode,
2671 gen_rtx (MINUS, Pmode, x,
2672 gen_rtx (SYMBOL_REF, Pmode,
2673 machopic_function_base_name ())));
2674 x = gen_rtx (LO_SUM, GET_MODE (x),
2675 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2676 gen_rtx (HIGH, Pmode, offset)), offset);
2677 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2678 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2679 opnum, (enum reload_type)type);
2683 if (GET_CODE (x) == SYMBOL_REF
2684 && DEFAULT_ABI == ABI_DARWIN
2685 && !ALTIVEC_VECTOR_MODE (mode)
2686 && MACHO_DYNAMIC_NO_PIC_P)
2688 /* Darwin load of floating point constant. */
2689 x = gen_rtx (LO_SUM, GET_MODE (x),
2690 gen_rtx (HIGH, Pmode, x), x);
2691 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2692 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2693 opnum, (enum reload_type)type);
2699 && constant_pool_expr_p (x)
2700 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2702 (x) = create_TOC_reference (x);
2710 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2711 that is a valid memory address for an instruction.
2712 The MODE argument is the machine mode for the MEM expression
2713 that wants to use this address.
2715 On the RS/6000, there are four valid address: a SYMBOL_REF that
2716 refers to a constant pool entry of an address (or the sum of it
2717 plus a constant), a short (16-bit signed) constant plus a register,
2718 the sum of two registers, or a register indirect, possibly with an
2719 auto-increment. For DFmode and DImode with a constant plus register,
2720 we must ensure that both words are addressable or PowerPC64 with offset
2723 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2724 32-bit DImode, TImode), indexed addressing cannot be used because
2725 adjacent memory cells are accessed by adding word-sized offsets
2726 during assembly output. */
2728 rs6000_legitimate_address (mode, x, reg_ok_strict)
2729 enum machine_mode mode;
2733 if (legitimate_indirect_address_p (x, reg_ok_strict))
2735 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2736 && !ALTIVEC_VECTOR_MODE (mode)
2737 && !SPE_VECTOR_MODE (mode)
2739 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
2741 if (legitimate_small_data_p (mode, x))
2743 if (legitimate_constant_pool_address_p (x))
2745 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2747 && GET_CODE (x) == PLUS
2748 && GET_CODE (XEXP (x, 0)) == REG
2749 && XEXP (x, 0) == virtual_stack_vars_rtx
2750 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2752 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
2755 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2757 || (mode != DFmode && mode != TFmode))
2758 && (TARGET_POWERPC64 || mode != DImode)
2759 && legitimate_indexed_address_p (x, reg_ok_strict))
2761 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
2766 /* Go to LABEL if ADDR (a legitimate address expression)
2767 has an effect that depends on the machine mode it is used for.
2769 On the RS/6000 this is true of all integral offsets (since AltiVec
2770 modes don't allow them) or is a pre-increment or decrement.
2772 ??? Except that due to conceptual problems in offsettable_address_p
2773 we can't really report the problems of integral offsets. So leave
2774 this assuming that the adjustable offset must be valid for the
2775 sub-words of a TFmode operand, which is what we had before. */
2778 rs6000_mode_dependent_address (addr)
2781 switch (GET_CODE (addr))
2784 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
2786 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
2787 return val + 12 + 0x8000 >= 0x10000;
2796 return TARGET_UPDATE;
2805 /* Try to output insns to set TARGET equal to the constant C if it can
2806 be done in less than N insns. Do all computations in MODE.
2807 Returns the place where the output has been placed if it can be
2808 done and the insns have been emitted. If it would take more than N
2809 insns, zero is returned and no insns and emitted. */
2812 rs6000_emit_set_const (dest, mode, source, n)
2814 enum machine_mode mode;
2815 int n ATTRIBUTE_UNUSED;
2817 rtx result, insn, set;
2818 HOST_WIDE_INT c0, c1;
2820 if (mode == QImode || mode == HImode)
2823 dest = gen_reg_rtx (mode);
2824 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2827 else if (mode == SImode)
2829 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2831 emit_insn (gen_rtx_SET (VOIDmode, result,
2832 GEN_INT (INTVAL (source)
2833 & (~ (HOST_WIDE_INT) 0xffff))));
2834 emit_insn (gen_rtx_SET (VOIDmode, dest,
2835 gen_rtx_IOR (SImode, result,
2836 GEN_INT (INTVAL (source) & 0xffff))));
2839 else if (mode == DImode)
2841 if (GET_CODE (source) == CONST_INT)
2843 c0 = INTVAL (source);
2846 else if (GET_CODE (source) == CONST_DOUBLE)
2848 #if HOST_BITS_PER_WIDE_INT >= 64
2849 c0 = CONST_DOUBLE_LOW (source);
2852 c0 = CONST_DOUBLE_LOW (source);
2853 c1 = CONST_DOUBLE_HIGH (source);
2859 result = rs6000_emit_set_long_const (dest, c0, c1);
2864 insn = get_last_insn ();
2865 set = single_set (insn);
2866 if (! CONSTANT_P (SET_SRC (set)))
2867 set_unique_reg_note (insn, REG_EQUAL, source);
2872 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2873 fall back to a straight forward decomposition. We do this to avoid
2874 exponential run times encountered when looking for longer sequences
2875 with rs6000_emit_set_const. */
2877 rs6000_emit_set_long_const (dest, c1, c2)
2879 HOST_WIDE_INT c1, c2;
2881 if (!TARGET_POWERPC64)
2883 rtx operand1, operand2;
2885 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2887 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2889 emit_move_insn (operand1, GEN_INT (c1));
2890 emit_move_insn (operand2, GEN_INT (c2));
2894 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2897 ud2 = (c1 & 0xffff0000) >> 16;
2898 #if HOST_BITS_PER_WIDE_INT >= 64
2902 ud4 = (c2 & 0xffff0000) >> 16;
2904 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2905 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2908 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2910 emit_move_insn (dest, GEN_INT (ud1));
2913 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2914 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2917 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2920 emit_move_insn (dest, GEN_INT (ud2 << 16));
2922 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2924 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2925 || (ud4 == 0 && ! (ud3 & 0x8000)))
2928 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2931 emit_move_insn (dest, GEN_INT (ud3 << 16));
2934 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2935 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2937 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2942 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2945 emit_move_insn (dest, GEN_INT (ud4 << 16));
2948 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2950 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2952 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2953 GEN_INT (ud2 << 16)));
2955 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2961 /* Emit a move from SOURCE to DEST in mode MODE. */
2963 rs6000_emit_move (dest, source, mode)
2966 enum machine_mode mode;
2970 operands[1] = source;
2972 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2973 if (GET_CODE (operands[1]) == CONST_DOUBLE
2974 && ! FLOAT_MODE_P (mode)
2975 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2977 /* FIXME. This should never happen. */
2978 /* Since it seems that it does, do the safe thing and convert
2980 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2982 if (GET_CODE (operands[1]) == CONST_DOUBLE
2983 && ! FLOAT_MODE_P (mode)
2984 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2985 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2986 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2987 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2990 /* Check if GCC is setting up a block move that will end up using FP
2991 registers as temporaries. We must make sure this is acceptable. */
2992 if (GET_CODE (operands[0]) == MEM
2993 && GET_CODE (operands[1]) == MEM
2995 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2996 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2997 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2998 ? 32 : MEM_ALIGN (operands[0])))
2999 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3001 : MEM_ALIGN (operands[1]))))
3002 && ! MEM_VOLATILE_P (operands [0])
3003 && ! MEM_VOLATILE_P (operands [1]))
3005 emit_move_insn (adjust_address (operands[0], SImode, 0),
3006 adjust_address (operands[1], SImode, 0));
3007 emit_move_insn (adjust_address (operands[0], SImode, 4),
3008 adjust_address (operands[1], SImode, 4));
3012 if (!no_new_pseudos)
3014 if (GET_CODE (operands[1]) == MEM && optimize > 0
3015 && (mode == QImode || mode == HImode || mode == SImode)
3016 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3018 rtx reg = gen_reg_rtx (word_mode);
3020 emit_insn (gen_rtx_SET (word_mode, reg,
3021 gen_rtx_ZERO_EXTEND (word_mode,
3023 operands[1] = gen_lowpart (mode, reg);
3025 if (GET_CODE (operands[0]) != REG)
3026 operands[1] = force_reg (mode, operands[1]);
3029 if (mode == SFmode && ! TARGET_POWERPC
3030 && TARGET_HARD_FLOAT && TARGET_FPRS
3031 && GET_CODE (operands[0]) == MEM)
3035 if (reload_in_progress || reload_completed)
3036 regnum = true_regnum (operands[1]);
3037 else if (GET_CODE (operands[1]) == REG)
3038 regnum = REGNO (operands[1]);
3042 /* If operands[1] is a register, on POWER it may have
3043 double-precision data in it, so truncate it to single
3045 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3048 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3049 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3050 operands[1] = newreg;
3054 /* Handle the case where reload calls us with an invalid address. */
3055 if (reload_in_progress && mode == Pmode
3056 && (! general_operand (operands[1], mode)
3057 || ! nonimmediate_operand (operands[0], mode)))
3060 /* Handle the case of CONSTANT_P_RTX. */
3061 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3064 /* FIXME: In the long term, this switch statement should go away
3065 and be replaced by a sequence of tests based on things like
3071 if (CONSTANT_P (operands[1])
3072 && GET_CODE (operands[1]) != CONST_INT)
3073 operands[1] = force_const_mem (mode, operands[1]);
3079 if (CONSTANT_P (operands[1])
3080 && ! easy_fp_constant (operands[1], mode))
3081 operands[1] = force_const_mem (mode, operands[1]);
3092 if (CONSTANT_P (operands[1])
3093 && !easy_vector_constant (operands[1], mode))
3094 operands[1] = force_const_mem (mode, operands[1]);
3099 /* Use default pattern for address of ELF small data */
3102 && DEFAULT_ABI == ABI_V4
3103 && (GET_CODE (operands[1]) == SYMBOL_REF
3104 || GET_CODE (operands[1]) == CONST)
3105 && small_data_operand (operands[1], mode))
3107 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3111 if (DEFAULT_ABI == ABI_V4
3112 && mode == Pmode && mode == SImode
3113 && flag_pic == 1 && got_operand (operands[1], mode))
3115 emit_insn (gen_movsi_got (operands[0], operands[1]));
3119 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3123 && CONSTANT_P (operands[1])
3124 && GET_CODE (operands[1]) != HIGH
3125 && GET_CODE (operands[1]) != CONST_INT)
3127 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3129 /* If this is a function address on -mcall-aixdesc,
3130 convert it to the address of the descriptor. */
3131 if (DEFAULT_ABI == ABI_AIX
3132 && GET_CODE (operands[1]) == SYMBOL_REF
3133 && XSTR (operands[1], 0)[0] == '.')
3135 const char *name = XSTR (operands[1], 0);
3137 while (*name == '.')
3139 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3140 CONSTANT_POOL_ADDRESS_P (new_ref)
3141 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3142 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3143 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3144 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3145 operands[1] = new_ref;
3148 if (DEFAULT_ABI == ABI_DARWIN)
3151 if (MACHO_DYNAMIC_NO_PIC_P)
3153 /* Take care of any required data indirection. */
3154 operands[1] = rs6000_machopic_legitimize_pic_address (
3155 operands[1], mode, operands[0]);
3156 if (operands[0] != operands[1])
3157 emit_insn (gen_rtx_SET (VOIDmode,
3158 operands[0], operands[1]));
3162 emit_insn (gen_macho_high (target, operands[1]));
3163 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3167 emit_insn (gen_elf_high (target, operands[1]));
3168 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3172 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3173 and we have put it in the TOC, we just need to make a TOC-relative
3176 && GET_CODE (operands[1]) == SYMBOL_REF
3177 && constant_pool_expr_p (operands[1])
3178 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3179 get_pool_mode (operands[1])))
3181 operands[1] = create_TOC_reference (operands[1]);
3183 else if (mode == Pmode
3184 && CONSTANT_P (operands[1])
3185 && ((GET_CODE (operands[1]) != CONST_INT
3186 && ! easy_fp_constant (operands[1], mode))
3187 || (GET_CODE (operands[1]) == CONST_INT
3188 && num_insns_constant (operands[1], mode) > 2)
3189 || (GET_CODE (operands[0]) == REG
3190 && FP_REGNO_P (REGNO (operands[0]))))
3191 && GET_CODE (operands[1]) != HIGH
3192 && ! legitimate_constant_pool_address_p (operands[1])
3193 && ! toc_relative_expr_p (operands[1]))
3195 /* Emit a USE operation so that the constant isn't deleted if
3196 expensive optimizations are turned on because nobody
3197 references it. This should only be done for operands that
3198 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3199 This should not be done for operands that contain LABEL_REFs.
3200 For now, we just handle the obvious case. */
3201 if (GET_CODE (operands[1]) != LABEL_REF)
3202 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3205 /* Darwin uses a special PIC legitimizer. */
3206 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3209 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3211 if (operands[0] != operands[1])
3212 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3217 /* If we are to limit the number of things we put in the TOC and
3218 this is a symbol plus a constant we can add in one insn,
3219 just put the symbol in the TOC and add the constant. Don't do
3220 this if reload is in progress. */
3221 if (GET_CODE (operands[1]) == CONST
3222 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3223 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3224 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3225 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3226 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3227 && ! side_effects_p (operands[0]))
3230 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3231 rtx other = XEXP (XEXP (operands[1], 0), 1);
3233 sym = force_reg (mode, sym);
3235 emit_insn (gen_addsi3 (operands[0], sym, other));
3237 emit_insn (gen_adddi3 (operands[0], sym, other));
3241 operands[1] = force_const_mem (mode, operands[1]);
3244 && constant_pool_expr_p (XEXP (operands[1], 0))
3245 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3246 get_pool_constant (XEXP (operands[1], 0)),
3247 get_pool_mode (XEXP (operands[1], 0))))
3250 = gen_rtx_MEM (mode,
3251 create_TOC_reference (XEXP (operands[1], 0)));
3252 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3253 RTX_UNCHANGING_P (operands[1]) = 1;
3259 if (GET_CODE (operands[0]) == MEM
3260 && GET_CODE (XEXP (operands[0], 0)) != REG
3261 && ! reload_in_progress)
3263 = replace_equiv_address (operands[0],
3264 copy_addr_to_reg (XEXP (operands[0], 0)));
3266 if (GET_CODE (operands[1]) == MEM
3267 && GET_CODE (XEXP (operands[1], 0)) != REG
3268 && ! reload_in_progress)
3270 = replace_equiv_address (operands[1],
3271 copy_addr_to_reg (XEXP (operands[1], 0)));
3274 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3276 gen_rtx_SET (VOIDmode,
3277 operands[0], operands[1]),
3278 gen_rtx_CLOBBER (VOIDmode,
3279 gen_rtx_SCRATCH (SImode)))));
3288 /* Above, we may have called force_const_mem which may have returned
3289 an invalid address. If we can, fix this up; otherwise, reload will
3290 have to deal with it. */
3291 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3292 operands[1] = validize_mem (operands[1]);
3295 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3298 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3299 for a call to a function whose data type is FNTYPE.
3300 For a library call, FNTYPE is 0.
3302 For incoming args we set the number of arguments in the prototype large
3303 so we never return a PARALLEL. */
3306 init_cumulative_args (cum, fntype, libname, incoming)
3307 CUMULATIVE_ARGS *cum;
3309 rtx libname ATTRIBUTE_UNUSED;
3312 static CUMULATIVE_ARGS zero_cumulative;
3314 *cum = zero_cumulative;
3316 cum->fregno = FP_ARG_MIN_REG;
3317 cum->vregno = ALTIVEC_ARG_MIN_REG;
3318 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3319 cum->call_cookie = CALL_NORMAL;
3320 cum->sysv_gregno = GP_ARG_MIN_REG;
3323 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3325 else if (cum->prototype)
3326 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3327 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3328 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3331 cum->nargs_prototype = 0;
3333 cum->orig_nargs = cum->nargs_prototype;
3335 /* Check for a longcall attribute. */
3337 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3338 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3339 cum->call_cookie = CALL_LONG;
3341 if (TARGET_DEBUG_ARG)
3343 fprintf (stderr, "\ninit_cumulative_args:");
3346 tree ret_type = TREE_TYPE (fntype);
3347 fprintf (stderr, " ret code = %s,",
3348 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3351 if (cum->call_cookie & CALL_LONG)
3352 fprintf (stderr, " longcall,");
3354 fprintf (stderr, " proto = %d, nargs = %d\n",
3355 cum->prototype, cum->nargs_prototype);
3359 /* If defined, a C expression which determines whether, and in which
3360 direction, to pad out an argument with extra space. The value
3361 should be of type `enum direction': either `upward' to pad above
3362 the argument, `downward' to pad below, or `none' to inhibit
3365 For the AIX ABI structs are always stored left shifted in their
3369 function_arg_padding (mode, type)
3370 enum machine_mode mode;
3373 if (type != 0 && AGGREGATE_TYPE_P (type))
3376 /* This is the default definition. */
3377 return (! BYTES_BIG_ENDIAN
3380 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3381 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3382 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3383 ? downward : upward));
3386 /* If defined, a C expression that gives the alignment boundary, in bits,
3387 of an argument with the specified mode and type. If it is not defined,
3388 PARM_BOUNDARY is used for all arguments.
3390 V.4 wants long longs to be double word aligned. */
3393 function_arg_boundary (mode, type)
3394 enum machine_mode mode;
3395 tree type ATTRIBUTE_UNUSED;
3397 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3399 else if (SPE_VECTOR_MODE (mode))
3401 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3404 return PARM_BOUNDARY;
3407 /* Update the data in CUM to advance over an argument
3408 of mode MODE and data type TYPE.
3409 (TYPE is null for libcalls where that information may not be available.) */
3412 function_arg_advance (cum, mode, type, named)
3413 CUMULATIVE_ARGS *cum;
3414 enum machine_mode mode;
3418 cum->nargs_prototype--;
3420 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3422 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3425 cum->words += RS6000_ARG_SIZE (mode, type);
3427 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3428 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3430 else if (DEFAULT_ABI == ABI_V4)
3432 if (TARGET_HARD_FLOAT && TARGET_FPRS
3433 && (mode == SFmode || mode == DFmode))
3435 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3440 cum->words += cum->words & 1;
3441 cum->words += RS6000_ARG_SIZE (mode, type);
3447 int gregno = cum->sysv_gregno;
3449 /* Aggregates and IEEE quad get passed by reference. */
3450 if ((type && AGGREGATE_TYPE_P (type))
3454 n_words = RS6000_ARG_SIZE (mode, type);
3456 /* Long long and SPE vectors are put in odd registers. */
3457 if (n_words == 2 && (gregno & 1) == 0)
3460 /* Long long and SPE vectors are not split between registers
3462 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3464 /* Long long is aligned on the stack. */
3466 cum->words += cum->words & 1;
3467 cum->words += n_words;
3470 /* Note: continuing to accumulate gregno past when we've started
3471 spilling to the stack indicates the fact that we've started
3472 spilling to the stack to expand_builtin_saveregs. */
3473 cum->sysv_gregno = gregno + n_words;
3476 if (TARGET_DEBUG_ARG)
3478 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3479 cum->words, cum->fregno);
3480 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3481 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3482 fprintf (stderr, "mode = %4s, named = %d\n",
3483 GET_MODE_NAME (mode), named);
3488 int align = (TARGET_32BIT && (cum->words & 1) != 0
3489 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3491 cum->words += align + RS6000_ARG_SIZE (mode, type);
3493 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3494 && TARGET_HARD_FLOAT && TARGET_FPRS)
3495 cum->fregno += (mode == TFmode ? 2 : 1);
3497 if (TARGET_DEBUG_ARG)
3499 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3500 cum->words, cum->fregno);
3501 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3502 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3503 fprintf (stderr, "named = %d, align = %d\n", named, align);
3508 /* Determine where to put an argument to a function.
3509 Value is zero to push the argument on the stack,
3510 or a hard register in which to store the argument.
3512 MODE is the argument's machine mode.
3513 TYPE is the data type of the argument (as a tree).
3514 This is null for libcalls where that information may
3516 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3517 the preceding args and about the function being called.
3518 NAMED is nonzero if this argument is a named parameter
3519 (otherwise it is an extra parameter matching an ellipsis).
3521 On RS/6000 the first eight words of non-FP are normally in registers
3522 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3523 Under V.4, the first 8 FP args are in registers.
3525 If this is floating-point and no prototype is specified, we use
3526 both an FP and integer register (or possibly FP reg and stack). Library
3527 functions (when TYPE is zero) always have the proper types for args,
3528 so we can pass the FP value just in one register. emit_library_function
3529 doesn't support PARALLEL anyway. */
3532 function_arg (cum, mode, type, named)
3533 CUMULATIVE_ARGS *cum;
3534 enum machine_mode mode;
3538 enum rs6000_abi abi = DEFAULT_ABI;
3540 /* Return a marker to indicate whether CR1 needs to set or clear the
3541 bit that V.4 uses to say fp args were passed in registers.
3542 Assume that we don't need the marker for software floating point,
3543 or compiler generated library calls. */
3544 if (mode == VOIDmode)
3547 && cum->nargs_prototype < 0
3548 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3550 /* For the SPE, we need to crxor CR6 always. */
3552 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3553 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3554 return GEN_INT (cum->call_cookie
3555 | ((cum->fregno == FP_ARG_MIN_REG)
3556 ? CALL_V4_SET_FP_ARGS
3557 : CALL_V4_CLEAR_FP_ARGS));
3560 return GEN_INT (cum->call_cookie);
3563 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3565 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3566 return gen_rtx_REG (mode, cum->vregno);
3570 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3572 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3573 return gen_rtx_REG (mode, cum->sysv_gregno);
3577 else if (abi == ABI_V4)
3579 if (TARGET_HARD_FLOAT && TARGET_FPRS
3580 && (mode == SFmode || mode == DFmode))
3582 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3583 return gen_rtx_REG (mode, cum->fregno);
3590 int gregno = cum->sysv_gregno;
3592 /* Aggregates and IEEE quad get passed by reference. */
3593 if ((type && AGGREGATE_TYPE_P (type))
3597 n_words = RS6000_ARG_SIZE (mode, type);
3599 /* Long long and SPE vectors are put in odd registers. */
3600 if (n_words == 2 && (gregno & 1) == 0)
3603 /* Long long and SPE vectors are not split between registers
3605 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3607 /* SPE vectors in ... get split into 2 registers. */
3608 if (TARGET_SPE && TARGET_SPE_ABI
3609 && SPE_VECTOR_MODE (mode) && !named)
3612 enum machine_mode m = SImode;
3614 r1 = gen_rtx_REG (m, gregno);
3615 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3616 r2 = gen_rtx_REG (m, gregno + 1);
3617 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3618 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3620 return gen_rtx_REG (mode, gregno);
3628 int align = (TARGET_32BIT && (cum->words & 1) != 0
3629 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3630 int align_words = cum->words + align;
3632 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3635 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3638 || ((cum->nargs_prototype > 0)
3639 /* IBM AIX extended its linkage convention definition always
3640 to require FP args after register save area hole on the
3642 && (DEFAULT_ABI != ABI_AIX
3644 || (align_words < GP_ARG_NUM_REG))))
3645 return gen_rtx_REG (mode, cum->fregno);
3647 return gen_rtx_PARALLEL (mode,
3649 gen_rtx_EXPR_LIST (VOIDmode,
3650 ((align_words >= GP_ARG_NUM_REG)
3653 + RS6000_ARG_SIZE (mode, type)
3655 /* If this is partially on the stack, then
3656 we only include the portion actually
3657 in registers here. */
3658 ? gen_rtx_REG (SImode,
3659 GP_ARG_MIN_REG + align_words)
3660 : gen_rtx_REG (mode,
3661 GP_ARG_MIN_REG + align_words))),
3663 gen_rtx_EXPR_LIST (VOIDmode,
3664 gen_rtx_REG (mode, cum->fregno),
3667 else if (align_words < GP_ARG_NUM_REG)
3668 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3674 /* For an arg passed partly in registers and partly in memory,
3675 this is the number of registers used.
3676 For args passed entirely in registers or entirely in memory, zero. */
3679 function_arg_partial_nregs (cum, mode, type, named)
3680 CUMULATIVE_ARGS *cum;
3681 enum machine_mode mode;
3683 int named ATTRIBUTE_UNUSED;
3685 if (DEFAULT_ABI == ABI_V4)
3688 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3689 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3691 if (cum->nargs_prototype >= 0)
3695 if (cum->words < GP_ARG_NUM_REG
3696 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3698 int ret = GP_ARG_NUM_REG - cum->words;
3699 if (ret && TARGET_DEBUG_ARG)
3700 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3708 /* A C expression that indicates when an argument must be passed by
3709 reference. If nonzero for an argument, a copy of that argument is
3710 made in memory and a pointer to the argument is passed instead of
3711 the argument itself. The pointer is passed in whatever way is
3712 appropriate for passing a pointer to that type.
3714 Under V.4, structures and unions are passed by reference.
3716 As an extension to all ABIs, variable sized types are passed by
3720 function_arg_pass_by_reference (cum, mode, type, named)
3721 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3722 enum machine_mode mode ATTRIBUTE_UNUSED;
3724 int named ATTRIBUTE_UNUSED;
3726 if (DEFAULT_ABI == ABI_V4
3727 && ((type && AGGREGATE_TYPE_P (type))
3730 if (TARGET_DEBUG_ARG)
3731 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3735 return type && int_size_in_bytes (type) <= 0;
3738 /* Perform any needed actions needed for a function that is receiving a
3739 variable number of arguments.
3743 MODE and TYPE are the mode and type of the current parameter.
3745 PRETEND_SIZE is a variable that should be set to the amount of stack
3746 that must be pushed by the prolog to pretend that our caller pushed
3749 Normally, this macro will push all remaining incoming registers on the
3750 stack and set PRETEND_SIZE to the length of the registers pushed. */
3753 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3754 CUMULATIVE_ARGS *cum;
3755 enum machine_mode mode;
3757 int *pretend_size ATTRIBUTE_UNUSED;
3761 CUMULATIVE_ARGS next_cum;
3762 int reg_size = TARGET_32BIT ? 4 : 8;
3763 rtx save_area = NULL_RTX, mem;
3764 int first_reg_offset, set;
3768 fntype = TREE_TYPE (current_function_decl);
3769 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3770 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3771 != void_type_node));
3773 /* For varargs, we do not want to skip the dummy va_dcl argument.
3774 For stdargs, we do want to skip the last named argument. */
3777 function_arg_advance (&next_cum, mode, type, 1);
3779 if (DEFAULT_ABI == ABI_V4)
3781 /* Indicate to allocate space on the stack for varargs save area. */
3782 cfun->machine->sysv_varargs_p = 1;
3784 save_area = plus_constant (virtual_stack_vars_rtx,
3785 - RS6000_VARARGS_SIZE);
3787 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3791 first_reg_offset = next_cum.words;
3792 save_area = virtual_incoming_args_rtx;
3793 cfun->machine->sysv_varargs_p = 0;
3795 if (MUST_PASS_IN_STACK (mode, type))
3796 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3799 set = get_varargs_alias_set ();
3800 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3802 mem = gen_rtx_MEM (BLKmode,
3803 plus_constant (save_area,
3804 first_reg_offset * reg_size)),
3805 set_mem_alias_set (mem, set);
3806 set_mem_align (mem, BITS_PER_WORD);
3808 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
3809 GP_ARG_NUM_REG - first_reg_offset);
3812 /* Save FP registers if needed. */
3813 if (DEFAULT_ABI == ABI_V4
3814 && TARGET_HARD_FLOAT && TARGET_FPRS
3816 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3818 int fregno = next_cum.fregno;
3819 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3820 rtx lab = gen_label_rtx ();
3821 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3823 emit_jump_insn (gen_rtx_SET (VOIDmode,
3825 gen_rtx_IF_THEN_ELSE (VOIDmode,
3826 gen_rtx_NE (VOIDmode, cr1,
3828 gen_rtx_LABEL_REF (VOIDmode, lab),
3831 while (fregno <= FP_ARG_V4_MAX_REG)
3833 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3834 set_mem_alias_set (mem, set);
3835 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3844 /* Create the va_list data type. */
3847 rs6000_build_va_list ()
3849 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3851 /* For AIX, prefer 'char *' because that's what the system
3852 header files like. */
3853 if (DEFAULT_ABI != ABI_V4)
3854 return build_pointer_type (char_type_node);
3856 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3857 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3859 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3860 unsigned_char_type_node);
3861 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3862 unsigned_char_type_node);
3863 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3865 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3868 DECL_FIELD_CONTEXT (f_gpr) = record;
3869 DECL_FIELD_CONTEXT (f_fpr) = record;
3870 DECL_FIELD_CONTEXT (f_ovf) = record;
3871 DECL_FIELD_CONTEXT (f_sav) = record;
3873 TREE_CHAIN (record) = type_decl;
3874 TYPE_NAME (record) = type_decl;
3875 TYPE_FIELDS (record) = f_gpr;
3876 TREE_CHAIN (f_gpr) = f_fpr;
3877 TREE_CHAIN (f_fpr) = f_ovf;
3878 TREE_CHAIN (f_ovf) = f_sav;
3880 layout_type (record);
3882 /* The correct type is an array type of one element. */
3883 return build_array_type (record, build_index_type (size_zero_node));
3886 /* Implement va_start. */
3889 rs6000_va_start (valist, nextarg)
3893 HOST_WIDE_INT words, n_gpr, n_fpr;
3894 tree f_gpr, f_fpr, f_ovf, f_sav;
3895 tree gpr, fpr, ovf, sav, t;
3897 /* Only SVR4 needs something special. */
3898 if (DEFAULT_ABI != ABI_V4)
3900 std_expand_builtin_va_start (valist, nextarg);
3904 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3905 f_fpr = TREE_CHAIN (f_gpr);
3906 f_ovf = TREE_CHAIN (f_fpr);
3907 f_sav = TREE_CHAIN (f_ovf);
3909 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3910 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3911 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3912 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3913 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3915 /* Count number of gp and fp argument registers used. */
3916 words = current_function_args_info.words;
3917 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3918 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3920 if (TARGET_DEBUG_ARG)
3922 fputs ("va_start: words = ", stderr);
3923 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3924 fputs (", n_gpr = ", stderr);
3925 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3926 fputs (", n_fpr = ", stderr);
3927 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3928 putc ('\n', stderr);
3931 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3932 TREE_SIDE_EFFECTS (t) = 1;
3933 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3935 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3936 TREE_SIDE_EFFECTS (t) = 1;
3937 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3939 /* Find the overflow area. */
3940 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3942 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3943 build_int_2 (words * UNITS_PER_WORD, 0));
3944 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3945 TREE_SIDE_EFFECTS (t) = 1;
3946 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3948 /* Find the register save area. */
3949 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3950 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3951 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3952 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3953 TREE_SIDE_EFFECTS (t) = 1;
3954 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3957 /* Implement va_arg. */
3960 rs6000_va_arg (valist, type)
3963 tree f_gpr, f_fpr, f_ovf, f_sav;
3964 tree gpr, fpr, ovf, sav, reg, t, u;
3965 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3966 rtx lab_false, lab_over, addr_rtx, r;
3968 if (DEFAULT_ABI != ABI_V4)
3970 /* Variable sized types are passed by reference. */
3971 if (int_size_in_bytes (type) <= 0)
3973 u = build_pointer_type (type);
3975 /* Args grow upward. */
3976 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3977 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3978 TREE_SIDE_EFFECTS (t) = 1;
3980 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3981 TREE_SIDE_EFFECTS (t) = 1;
3983 t = build1 (INDIRECT_REF, u, t);
3984 TREE_SIDE_EFFECTS (t) = 1;
3986 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3989 return std_expand_builtin_va_arg (valist, type);
3992 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3993 f_fpr = TREE_CHAIN (f_gpr);
3994 f_ovf = TREE_CHAIN (f_fpr);
3995 f_sav = TREE_CHAIN (f_ovf);
3997 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3998 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3999 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4000 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4001 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4003 size = int_size_in_bytes (type);
4004 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4006 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4008 /* Aggregates and long doubles are passed by reference. */
4014 size = UNITS_PER_WORD;
4017 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4019 /* FP args go in FP registers, if present. */
4028 /* Otherwise into GP registers. */
4036 /* Pull the value out of the saved registers ... */
4038 lab_false = gen_label_rtx ();
4039 lab_over = gen_label_rtx ();
4040 addr_rtx = gen_reg_rtx (Pmode);
4042 /* AltiVec vectors never go in registers. */
4043 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4045 TREE_THIS_VOLATILE (reg) = 1;
4046 emit_cmp_and_jump_insns
4047 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4048 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4051 /* Long long is aligned in the registers. */
4054 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4055 build_int_2 (n_reg - 1, 0));
4056 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4057 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4058 TREE_SIDE_EFFECTS (u) = 1;
4059 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4063 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4067 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4068 build_int_2 (n_reg, 0));
4069 TREE_SIDE_EFFECTS (u) = 1;
4071 u = build1 (CONVERT_EXPR, integer_type_node, u);
4072 TREE_SIDE_EFFECTS (u) = 1;
4074 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4075 TREE_SIDE_EFFECTS (u) = 1;
4077 t = build (PLUS_EXPR, ptr_type_node, t, u);
4078 TREE_SIDE_EFFECTS (t) = 1;
4080 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4082 emit_move_insn (addr_rtx, r);
4084 emit_jump_insn (gen_jump (lab_over));
4088 emit_label (lab_false);
4090 /* ... otherwise out of the overflow area. */
4092 /* Make sure we don't find reg 7 for the next int arg.
4094 All AltiVec vectors go in the overflow area. So in the AltiVec
4095 case we need to get the vectors from the overflow area, but
4096 remember where the GPRs and FPRs are. */
4097 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4098 || !TARGET_ALTIVEC))
4100 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4101 TREE_SIDE_EFFECTS (t) = 1;
4102 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4105 /* Care for on-stack alignment if needed. */
4112 /* AltiVec vectors are 16 byte aligned. */
4113 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4118 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4119 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4123 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4125 emit_move_insn (addr_rtx, r);
4127 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4128 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4129 TREE_SIDE_EFFECTS (t) = 1;
4130 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4132 emit_label (lab_over);
4136 r = gen_rtx_MEM (Pmode, addr_rtx);
4137 set_mem_alias_set (r, get_varargs_alias_set ());
4138 emit_move_insn (addr_rtx, r);
4146 #define def_builtin(MASK, NAME, TYPE, CODE) \
4148 if ((MASK) & target_flags) \
4149 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4153 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4155 static const struct builtin_description bdesc_3arg[] =
4157 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4158 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4159 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4160 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4161 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4162 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4163 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4164 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4165 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4166 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4167 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4168 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4169 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4170 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4171 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4172 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4173 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4174 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4175 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4176 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4177 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4178 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4179 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4182 /* DST operations: void foo (void *, const int, const char). */
4184 static const struct builtin_description bdesc_dst[] =
4186 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4187 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4188 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4189 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4192 /* Simple binary operations: VECc = foo (VECa, VECb). */
4194 static struct builtin_description bdesc_2arg[] =
4196 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4197 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4198 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4199 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4200 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4201 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4202 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4203 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4204 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4205 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4206 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4207 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4208 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4209 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4210 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4211 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4212 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4213 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4214 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4215 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4216 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4217 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4218 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4219 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4220 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4221 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4222 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4223 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4224 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4225 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4226 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4227 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4228 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4229 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4230 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4231 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4232 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4233 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4234 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4235 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4236 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4237 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4238 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4239 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4240 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4241 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4242 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4243 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4244 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4245 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4246 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4247 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4248 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4249 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4250 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4251 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4252 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4253 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4254 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4255 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4256 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4257 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4258 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4259 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4260 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4261 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4262 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4263 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4264 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4265 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4266 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4267 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4268 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4269 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4270 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4271 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4272 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4273 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4274 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4275 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4276 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4277 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4278 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4279 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4280 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4281 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4282 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4283 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4284 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4285 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4286 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4287 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4288 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4289 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4290 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4291 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4292 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4293 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4294 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4295 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4296 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4297 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4298 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4299 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4300 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4301 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4302 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4303 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4304 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4305 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4306 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4307 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4308 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4310 /* Place holder, leave as first spe builtin. */
4311 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4312 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4313 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4314 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4315 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4316 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4317 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4318 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4319 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4320 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4321 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4322 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4323 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4324 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4325 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4326 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4327 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4328 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4329 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4330 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4331 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4332 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4333 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4334 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4335 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4336 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4337 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4338 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4339 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4340 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4341 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4342 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4343 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4344 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4345 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4346 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4347 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4348 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4349 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4350 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4351 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4352 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4353 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4354 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4355 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4356 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4357 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4358 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4359 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4360 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4361 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4362 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4363 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4364 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4365 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4366 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4367 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4368 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4369 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4370 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4371 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4372 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4373 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4374 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4375 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4376 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4377 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4378 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4379 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4380 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4381 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4382 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4383 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4384 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4385 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4386 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4387 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4388 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4389 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4390 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4391 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4392 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4393 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4394 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4395 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4396 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4397 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4398 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4399 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4400 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4401 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4402 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4403 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4404 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4405 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4406 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4407 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4408 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4409 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4410 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4411 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4412 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4413 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4414 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4415 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4416 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4417 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4418 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4419 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4421 /* SPE binary operations expecting a 5-bit unsigned literal. */
4422 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4424 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4425 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4426 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4427 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4428 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4429 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4430 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4431 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4432 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4433 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4434 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4435 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4436 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4437 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4438 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4439 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4440 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4441 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4442 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4443 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4444 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4445 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4446 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4447 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4448 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4449 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4451 /* Place-holder. Leave as last binary SPE builtin. */
4452 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4455 /* AltiVec predicates. */
4457 struct builtin_description_predicates
4459 const unsigned int mask;
4460 const enum insn_code icode;
4462 const char *const name;
4463 const enum rs6000_builtins code;
4466 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4468 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4469 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4470 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4471 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4472 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4473 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4474 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4475 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4476 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4477 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4478 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4479 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4480 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4483 /* SPE predicates. */
4484 static struct builtin_description bdesc_spe_predicates[] =
4486 /* Place-holder. Leave as first. */
4487 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4488 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4489 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4490 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4491 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4492 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4493 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4494 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4495 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4496 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4497 /* Place-holder. Leave as last. */
4498 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4501 /* SPE evsel predicates. */
4502 static struct builtin_description bdesc_spe_evsel[] =
4504 /* Place-holder. Leave as first. */
4505 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4506 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4507 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4508 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4509 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4510 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4511 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4512 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4513 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4514 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4515 /* Place-holder. Leave as last. */
4516 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4519 /* ABS* operations. */
4521 static const struct builtin_description bdesc_abs[] =
4523 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4524 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4525 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4526 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4527 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4528 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4529 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4532 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4535 static struct builtin_description bdesc_1arg[] =
4537 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4538 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4539 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4540 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4541 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4542 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4543 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4544 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4545 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4546 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4547 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4548 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4549 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4550 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4551 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4552 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4553 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4555 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4556 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4557 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4558 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4559 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4560 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4561 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4562 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4563 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4564 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4565 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4566 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4567 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4568 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4569 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4570 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4571 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4572 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4573 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4574 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4575 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4576 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4577 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4578 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4579 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4580 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4581 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4582 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4583 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4584 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4585 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4586 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4588 /* Place-holder. Leave as last unary SPE builtin. */
4589 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4593 rs6000_expand_unop_builtin (icode, arglist, target)
4594 enum insn_code icode;
4599 tree arg0 = TREE_VALUE (arglist);
4600 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4601 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4602 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4604 if (icode == CODE_FOR_nothing)
4605 /* Builtin not supported on this processor. */
4608 /* If we got invalid arguments bail out before generating bad rtl. */
4609 if (arg0 == error_mark_node)
4612 if (icode == CODE_FOR_altivec_vspltisb
4613 || icode == CODE_FOR_altivec_vspltish
4614 || icode == CODE_FOR_altivec_vspltisw
4615 || icode == CODE_FOR_spe_evsplatfi
4616 || icode == CODE_FOR_spe_evsplati)
4618 /* Only allow 5-bit *signed* literals. */
4619 if (GET_CODE (op0) != CONST_INT
4620 || INTVAL (op0) > 0x1f
4621 || INTVAL (op0) < -0x1f)
4623 error ("argument 1 must be a 5-bit signed literal");
4629 || GET_MODE (target) != tmode
4630 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4631 target = gen_reg_rtx (tmode);
4633 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4634 op0 = copy_to_mode_reg (mode0, op0);
4636 pat = GEN_FCN (icode) (target, op0);
4645 altivec_expand_abs_builtin (icode, arglist, target)
4646 enum insn_code icode;
4650 rtx pat, scratch1, scratch2;
4651 tree arg0 = TREE_VALUE (arglist);
4652 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4653 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4654 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4656 /* If we have invalid arguments, bail out before generating bad rtl. */
4657 if (arg0 == error_mark_node)
4661 || GET_MODE (target) != tmode
4662 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4663 target = gen_reg_rtx (tmode);
4665 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4666 op0 = copy_to_mode_reg (mode0, op0);
4668 scratch1 = gen_reg_rtx (mode0);
4669 scratch2 = gen_reg_rtx (mode0);
4671 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4680 rs6000_expand_binop_builtin (icode, arglist, target)
4681 enum insn_code icode;
4686 tree arg0 = TREE_VALUE (arglist);
4687 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4688 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4689 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4690 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4691 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4692 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4694 if (icode == CODE_FOR_nothing)
4695 /* Builtin not supported on this processor. */
4698 /* If we got invalid arguments bail out before generating bad rtl. */
4699 if (arg0 == error_mark_node || arg1 == error_mark_node)
4702 if (icode == CODE_FOR_altivec_vcfux
4703 || icode == CODE_FOR_altivec_vcfsx
4704 || icode == CODE_FOR_altivec_vctsxs
4705 || icode == CODE_FOR_altivec_vctuxs
4706 || icode == CODE_FOR_altivec_vspltb
4707 || icode == CODE_FOR_altivec_vsplth
4708 || icode == CODE_FOR_altivec_vspltw
4709 || icode == CODE_FOR_spe_evaddiw
4710 || icode == CODE_FOR_spe_evldd
4711 || icode == CODE_FOR_spe_evldh
4712 || icode == CODE_FOR_spe_evldw
4713 || icode == CODE_FOR_spe_evlhhesplat
4714 || icode == CODE_FOR_spe_evlhhossplat
4715 || icode == CODE_FOR_spe_evlhhousplat
4716 || icode == CODE_FOR_spe_evlwhe
4717 || icode == CODE_FOR_spe_evlwhos
4718 || icode == CODE_FOR_spe_evlwhou
4719 || icode == CODE_FOR_spe_evlwhsplat
4720 || icode == CODE_FOR_spe_evlwwsplat
4721 || icode == CODE_FOR_spe_evrlwi
4722 || icode == CODE_FOR_spe_evslwi
4723 || icode == CODE_FOR_spe_evsrwis
4724 || icode == CODE_FOR_spe_evsubifw
4725 || icode == CODE_FOR_spe_evsrwiu)
4727 /* Only allow 5-bit unsigned literals. */
4728 if (TREE_CODE (arg1) != INTEGER_CST
4729 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4731 error ("argument 2 must be a 5-bit unsigned literal");
4737 || GET_MODE (target) != tmode
4738 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4739 target = gen_reg_rtx (tmode);
4741 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4742 op0 = copy_to_mode_reg (mode0, op0);
4743 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4744 op1 = copy_to_mode_reg (mode1, op1);
4746 pat = GEN_FCN (icode) (target, op0, op1);
4755 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4756 enum insn_code icode;
4762 tree cr6_form = TREE_VALUE (arglist);
4763 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4764 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4765 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4766 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4767 enum machine_mode tmode = SImode;
4768 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4769 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4772 if (TREE_CODE (cr6_form) != INTEGER_CST)
4774 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4778 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4783 /* If we have invalid arguments, bail out before generating bad rtl. */
4784 if (arg0 == error_mark_node || arg1 == error_mark_node)
4788 || GET_MODE (target) != tmode
4789 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4790 target = gen_reg_rtx (tmode);
4792 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4793 op0 = copy_to_mode_reg (mode0, op0);
4794 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4795 op1 = copy_to_mode_reg (mode1, op1);
4797 scratch = gen_reg_rtx (mode0);
4799 pat = GEN_FCN (icode) (scratch, op0, op1,
4800 gen_rtx (SYMBOL_REF, Pmode, opcode));
4805 /* The vec_any* and vec_all* predicates use the same opcodes for two
4806 different operations, but the bits in CR6 will be different
4807 depending on what information we want. So we have to play tricks
4808 with CR6 to get the right bits out.
4810 If you think this is disgusting, look at the specs for the
4811 AltiVec predicates. */
4813 switch (cr6_form_int)
4816 emit_insn (gen_cr6_test_for_zero (target));
4819 emit_insn (gen_cr6_test_for_zero_reverse (target));
4822 emit_insn (gen_cr6_test_for_lt (target));
4825 emit_insn (gen_cr6_test_for_lt_reverse (target));
4828 error ("argument 1 of __builtin_altivec_predicate is out of range");
4836 altivec_expand_stv_builtin (icode, arglist)
4837 enum insn_code icode;
4840 tree arg0 = TREE_VALUE (arglist);
4841 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4842 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4843 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4844 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4845 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4847 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4848 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4849 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4851 /* Invalid arguments. Bail before doing anything stoopid! */
4852 if (arg0 == error_mark_node
4853 || arg1 == error_mark_node
4854 || arg2 == error_mark_node)
4857 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4858 op0 = copy_to_mode_reg (mode2, op0);
4859 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4860 op1 = copy_to_mode_reg (mode0, op1);
4861 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4862 op2 = copy_to_mode_reg (mode1, op2);
4864 pat = GEN_FCN (icode) (op1, op2, op0);
4871 rs6000_expand_ternop_builtin (icode, arglist, target)
4872 enum insn_code icode;
4877 tree arg0 = TREE_VALUE (arglist);
4878 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4879 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4880 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4881 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4882 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4883 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4884 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4885 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4886 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4888 if (icode == CODE_FOR_nothing)
4889 /* Builtin not supported on this processor. */
4892 /* If we got invalid arguments bail out before generating bad rtl. */
4893 if (arg0 == error_mark_node
4894 || arg1 == error_mark_node
4895 || arg2 == error_mark_node)
4898 if (icode == CODE_FOR_altivec_vsldoi_4sf
4899 || icode == CODE_FOR_altivec_vsldoi_4si
4900 || icode == CODE_FOR_altivec_vsldoi_8hi
4901 || icode == CODE_FOR_altivec_vsldoi_16qi)
4903 /* Only allow 4-bit unsigned literals. */
4904 if (TREE_CODE (arg2) != INTEGER_CST
4905 || TREE_INT_CST_LOW (arg2) & ~0xf)
4907 error ("argument 3 must be a 4-bit unsigned literal");
4913 || GET_MODE (target) != tmode
4914 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4915 target = gen_reg_rtx (tmode);
4917 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4918 op0 = copy_to_mode_reg (mode0, op0);
4919 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4920 op1 = copy_to_mode_reg (mode1, op1);
4921 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4922 op2 = copy_to_mode_reg (mode2, op2);
4924 pat = GEN_FCN (icode) (target, op0, op1, op2);
4932 /* Expand the lvx builtins. */
4934 altivec_expand_ld_builtin (exp, target, expandedp)
4939 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4940 tree arglist = TREE_OPERAND (exp, 1);
4941 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4943 enum machine_mode tmode, mode0;
4945 enum insn_code icode;
4949 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4950 icode = CODE_FOR_altivec_lvx_16qi;
4952 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4953 icode = CODE_FOR_altivec_lvx_8hi;
4955 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4956 icode = CODE_FOR_altivec_lvx_4si;
4958 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4959 icode = CODE_FOR_altivec_lvx_4sf;
4968 arg0 = TREE_VALUE (arglist);
4969 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4970 tmode = insn_data[icode].operand[0].mode;
4971 mode0 = insn_data[icode].operand[1].mode;
4974 || GET_MODE (target) != tmode
4975 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4976 target = gen_reg_rtx (tmode);
4978 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4979 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4981 pat = GEN_FCN (icode) (target, op0);
4988 /* Expand the stvx builtins. */
4990 altivec_expand_st_builtin (exp, target, expandedp)
4992 rtx target ATTRIBUTE_UNUSED;
4995 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4996 tree arglist = TREE_OPERAND (exp, 1);
4997 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4999 enum machine_mode mode0, mode1;
5001 enum insn_code icode;
5005 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5006 icode = CODE_FOR_altivec_stvx_16qi;
5008 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5009 icode = CODE_FOR_altivec_stvx_8hi;
5011 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5012 icode = CODE_FOR_altivec_stvx_4si;
5014 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5015 icode = CODE_FOR_altivec_stvx_4sf;
5022 arg0 = TREE_VALUE (arglist);
5023 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5024 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5025 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5026 mode0 = insn_data[icode].operand[0].mode;
5027 mode1 = insn_data[icode].operand[1].mode;
5029 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5030 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5031 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5032 op1 = copy_to_mode_reg (mode1, op1);
5034 pat = GEN_FCN (icode) (op0, op1);
5042 /* Expand the dst builtins. */
5044 altivec_expand_dst_builtin (exp, target, expandedp)
5046 rtx target ATTRIBUTE_UNUSED;
5049 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5050 tree arglist = TREE_OPERAND (exp, 1);
5051 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5052 tree arg0, arg1, arg2;
5053 enum machine_mode mode0, mode1, mode2;
5054 rtx pat, op0, op1, op2;
5055 struct builtin_description *d;
5060 /* Handle DST variants. */
5061 d = (struct builtin_description *) bdesc_dst;
5062 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5063 if (d->code == fcode)
5065 arg0 = TREE_VALUE (arglist);
5066 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5067 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5068 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5069 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5070 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5071 mode0 = insn_data[d->icode].operand[0].mode;
5072 mode1 = insn_data[d->icode].operand[1].mode;
5073 mode2 = insn_data[d->icode].operand[2].mode;
5075 /* Invalid arguments, bail out before generating bad rtl. */
5076 if (arg0 == error_mark_node
5077 || arg1 == error_mark_node
5078 || arg2 == error_mark_node)
5081 if (TREE_CODE (arg2) != INTEGER_CST
5082 || TREE_INT_CST_LOW (arg2) & ~0x3)
5084 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5088 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5089 op0 = copy_to_mode_reg (mode0, op0);
5090 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5091 op1 = copy_to_mode_reg (mode1, op1);
5093 pat = GEN_FCN (d->icode) (op0, op1, op2);
5104 /* Expand the builtin in EXP and store the result in TARGET. Store
5105 true in *EXPANDEDP if we found a builtin to expand. */
5107 altivec_expand_builtin (exp, target, expandedp)
5112 struct builtin_description *d;
5113 struct builtin_description_predicates *dp;
5115 enum insn_code icode;
5116 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5117 tree arglist = TREE_OPERAND (exp, 1);
5120 enum machine_mode tmode, mode0;
5121 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5123 target = altivec_expand_ld_builtin (exp, target, expandedp);
5127 target = altivec_expand_st_builtin (exp, target, expandedp);
5131 target = altivec_expand_dst_builtin (exp, target, expandedp);
5139 case ALTIVEC_BUILTIN_STVX:
5140 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5141 case ALTIVEC_BUILTIN_STVEBX:
5142 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5143 case ALTIVEC_BUILTIN_STVEHX:
5144 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5145 case ALTIVEC_BUILTIN_STVEWX:
5146 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5147 case ALTIVEC_BUILTIN_STVXL:
5148 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5150 case ALTIVEC_BUILTIN_MFVSCR:
5151 icode = CODE_FOR_altivec_mfvscr;
5152 tmode = insn_data[icode].operand[0].mode;
5155 || GET_MODE (target) != tmode
5156 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5157 target = gen_reg_rtx (tmode);
5159 pat = GEN_FCN (icode) (target);
5165 case ALTIVEC_BUILTIN_MTVSCR:
5166 icode = CODE_FOR_altivec_mtvscr;
5167 arg0 = TREE_VALUE (arglist);
5168 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5169 mode0 = insn_data[icode].operand[0].mode;
5171 /* If we got invalid arguments bail out before generating bad rtl. */
5172 if (arg0 == error_mark_node)
5175 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5176 op0 = copy_to_mode_reg (mode0, op0);
5178 pat = GEN_FCN (icode) (op0);
5183 case ALTIVEC_BUILTIN_DSSALL:
5184 emit_insn (gen_altivec_dssall ());
5187 case ALTIVEC_BUILTIN_DSS:
5188 icode = CODE_FOR_altivec_dss;
5189 arg0 = TREE_VALUE (arglist);
5190 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5191 mode0 = insn_data[icode].operand[0].mode;
5193 /* If we got invalid arguments bail out before generating bad rtl. */
5194 if (arg0 == error_mark_node)
5197 if (TREE_CODE (arg0) != INTEGER_CST
5198 || TREE_INT_CST_LOW (arg0) & ~0x3)
5200 error ("argument to dss must be a 2-bit unsigned literal");
5204 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5205 op0 = copy_to_mode_reg (mode0, op0);
5207 emit_insn (gen_altivec_dss (op0));
5211 /* Expand abs* operations. */
5212 d = (struct builtin_description *) bdesc_abs;
5213 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5214 if (d->code == fcode)
5215 return altivec_expand_abs_builtin (d->icode, arglist, target);
5217 /* Expand the AltiVec predicates. */
5218 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5219 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5220 if (dp->code == fcode)
5221 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5223 /* LV* are funky. We initialized them differently. */
5226 case ALTIVEC_BUILTIN_LVSL:
5227 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5229 case ALTIVEC_BUILTIN_LVSR:
5230 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5232 case ALTIVEC_BUILTIN_LVEBX:
5233 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5235 case ALTIVEC_BUILTIN_LVEHX:
5236 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5238 case ALTIVEC_BUILTIN_LVEWX:
5239 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5241 case ALTIVEC_BUILTIN_LVXL:
5242 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5244 case ALTIVEC_BUILTIN_LVX:
5245 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5256 /* Binops that need to be initialized manually, but can be expanded
5257 automagically by rs6000_expand_binop_builtin. */
5258 static struct builtin_description bdesc_2arg_spe[] =
5260 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5261 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5262 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5263 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5264 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5265 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5266 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5267 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5268 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5269 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5270 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5271 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5272 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5273 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5274 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5275 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5276 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5277 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5278 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5279 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5280 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5281 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5284 /* Expand the builtin in EXP and store the result in TARGET. Store
5285 true in *EXPANDEDP if we found a builtin to expand.
5287 This expands the SPE builtins that are not simple unary and binary
5290 spe_expand_builtin (exp, target, expandedp)
5295 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5296 tree arglist = TREE_OPERAND (exp, 1);
5298 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5299 enum insn_code icode;
5300 enum machine_mode tmode, mode0;
5302 struct builtin_description *d;
5307 /* Syntax check for a 5-bit unsigned immediate. */
5310 case SPE_BUILTIN_EVSTDD:
5311 case SPE_BUILTIN_EVSTDH:
5312 case SPE_BUILTIN_EVSTDW:
5313 case SPE_BUILTIN_EVSTWHE:
5314 case SPE_BUILTIN_EVSTWHO:
5315 case SPE_BUILTIN_EVSTWWE:
5316 case SPE_BUILTIN_EVSTWWO:
5317 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5318 if (TREE_CODE (arg1) != INTEGER_CST
5319 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5321 error ("argument 2 must be a 5-bit unsigned literal");
5329 d = (struct builtin_description *) bdesc_2arg_spe;
5330 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5331 if (d->code == fcode)
5332 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5334 d = (struct builtin_description *) bdesc_spe_predicates;
5335 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5336 if (d->code == fcode)
5337 return spe_expand_predicate_builtin (d->icode, arglist, target);
5339 d = (struct builtin_description *) bdesc_spe_evsel;
5340 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5341 if (d->code == fcode)
5342 return spe_expand_evsel_builtin (d->icode, arglist, target);
5346 case SPE_BUILTIN_EVSTDDX:
5347 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5348 case SPE_BUILTIN_EVSTDHX:
5349 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5350 case SPE_BUILTIN_EVSTDWX:
5351 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5352 case SPE_BUILTIN_EVSTWHEX:
5353 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5354 case SPE_BUILTIN_EVSTWHOX:
5355 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5356 case SPE_BUILTIN_EVSTWWEX:
5357 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5358 case SPE_BUILTIN_EVSTWWOX:
5359 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5360 case SPE_BUILTIN_EVSTDD:
5361 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5362 case SPE_BUILTIN_EVSTDH:
5363 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5364 case SPE_BUILTIN_EVSTDW:
5365 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5366 case SPE_BUILTIN_EVSTWHE:
5367 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5368 case SPE_BUILTIN_EVSTWHO:
5369 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5370 case SPE_BUILTIN_EVSTWWE:
5371 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5372 case SPE_BUILTIN_EVSTWWO:
5373 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5374 case SPE_BUILTIN_MFSPEFSCR:
5375 icode = CODE_FOR_spe_mfspefscr;
5376 tmode = insn_data[icode].operand[0].mode;
5379 || GET_MODE (target) != tmode
5380 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5381 target = gen_reg_rtx (tmode);
5383 pat = GEN_FCN (icode) (target);
5388 case SPE_BUILTIN_MTSPEFSCR:
5389 icode = CODE_FOR_spe_mtspefscr;
5390 arg0 = TREE_VALUE (arglist);
5391 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5392 mode0 = insn_data[icode].operand[0].mode;
5394 if (arg0 == error_mark_node)
5397 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5398 op0 = copy_to_mode_reg (mode0, op0);
5400 pat = GEN_FCN (icode) (op0);
5413 spe_expand_predicate_builtin (icode, arglist, target)
5414 enum insn_code icode;
5418 rtx pat, scratch, tmp;
5419 tree form = TREE_VALUE (arglist);
5420 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5421 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5422 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5423 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5424 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5425 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5429 if (TREE_CODE (form) != INTEGER_CST)
5431 error ("argument 1 of __builtin_spe_predicate must be a constant");
5435 form_int = TREE_INT_CST_LOW (form);
5440 if (arg0 == error_mark_node || arg1 == error_mark_node)
5444 || GET_MODE (target) != SImode
5445 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5446 target = gen_reg_rtx (SImode);
5448 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5449 op0 = copy_to_mode_reg (mode0, op0);
5450 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5451 op1 = copy_to_mode_reg (mode1, op1);
5453 scratch = gen_reg_rtx (CCmode);
5455 pat = GEN_FCN (icode) (scratch, op0, op1);
5460 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5461 _lower_. We use one compare, but look in different bits of the
5462 CR for each variant.
5464 There are 2 elements in each SPE simd type (upper/lower). The CR
5465 bits are set as follows:
5467 BIT0 | BIT 1 | BIT 2 | BIT 3
5468 U | L | (U | L) | (U & L)
5470 So, for an "all" relationship, BIT 3 would be set.
5471 For an "any" relationship, BIT 2 would be set. Etc.
5473 Following traditional nomenclature, these bits map to:
5475 BIT0 | BIT 1 | BIT 2 | BIT 3
5478 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5483 /* All variant. OV bit. */
5485 /* We need to get to the OV bit, which is the ORDERED bit. We
5486 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5487 that's ugly and will trigger a validate_condition_mode abort.
5488 So let's just use another pattern. */
5489 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5491 /* Any variant. EQ bit. */
5495 /* Upper variant. LT bit. */
5499 /* Lower variant. GT bit. */
5504 error ("argument 1 of __builtin_spe_predicate is out of range");
5508 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5509 emit_move_insn (target, tmp);
5514 /* The evsel builtins look like this:
5516 e = __builtin_spe_evsel_OP (a, b, c, d);
5520 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5521 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5525 spe_expand_evsel_builtin (icode, arglist, target)
5526 enum insn_code icode;
5531 tree arg0 = TREE_VALUE (arglist);
5532 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5533 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5534 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5535 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5536 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5537 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5538 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5539 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5540 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5545 if (arg0 == error_mark_node || arg1 == error_mark_node
5546 || arg2 == error_mark_node || arg3 == error_mark_node)
5550 || GET_MODE (target) != mode0
5551 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5552 target = gen_reg_rtx (mode0);
5554 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5555 op0 = copy_to_mode_reg (mode0, op0);
5556 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5557 op1 = copy_to_mode_reg (mode0, op1);
5558 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5559 op2 = copy_to_mode_reg (mode0, op2);
5560 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5561 op3 = copy_to_mode_reg (mode0, op3);
5563 /* Generate the compare. */
5564 scratch = gen_reg_rtx (CCmode);
5565 pat = GEN_FCN (icode) (scratch, op0, op1);
5570 if (mode0 == V2SImode)
5571 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5573 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5578 /* Expand an expression EXP that calls a built-in function,
5579 with result going to TARGET if that's convenient
5580 (and in mode MODE if that's convenient).
5581 SUBTARGET may be used as the target for computing one of EXP's operands.
5582 IGNORE is nonzero if the value is to be ignored. */
5585 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5588 rtx subtarget ATTRIBUTE_UNUSED;
5589 enum machine_mode mode ATTRIBUTE_UNUSED;
5590 int ignore ATTRIBUTE_UNUSED;
5592 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5593 tree arglist = TREE_OPERAND (exp, 1);
5594 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5595 struct builtin_description *d;
5602 ret = altivec_expand_builtin (exp, target, &success);
5609 ret = spe_expand_builtin (exp, target, &success);
5615 if (TARGET_ALTIVEC || TARGET_SPE)
5617 /* Handle simple unary operations. */
5618 d = (struct builtin_description *) bdesc_1arg;
5619 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5620 if (d->code == fcode)
5621 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5623 /* Handle simple binary operations. */
5624 d = (struct builtin_description *) bdesc_2arg;
5625 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5626 if (d->code == fcode)
5627 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5629 /* Handle simple ternary operations. */
5630 d = (struct builtin_description *) bdesc_3arg;
5631 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5632 if (d->code == fcode)
5633 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5641 rs6000_init_builtins ()
5643 opaque_V2SI_type_node = copy_node (V2SI_type_node);
5644 opaque_V2SF_type_node = copy_node (V2SF_type_node);
5645 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
5648 spe_init_builtins ();
5650 altivec_init_builtins ();
5651 if (TARGET_ALTIVEC || TARGET_SPE)
5652 rs6000_common_init_builtins ();
5655 /* Search through a set of builtins and enable the mask bits.
5656 DESC is an array of builtins.
5657 SIZE is the total number of builtins.
5658 START is the builtin enum at which to start.
5659 END is the builtin enum at which to end. */
5661 enable_mask_for_builtins (desc, size, start, end)
5662 struct builtin_description *desc;
5664 enum rs6000_builtins start, end;
5668 for (i = 0; i < size; ++i)
5669 if (desc[i].code == start)
5675 for (; i < size; ++i)
5677 /* Flip all the bits on. */
5678 desc[i].mask = target_flags;
5679 if (desc[i].code == end)
5685 spe_init_builtins ()
5687 tree endlink = void_list_node;
5688 tree puint_type_node = build_pointer_type (unsigned_type_node);
5689 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5690 struct builtin_description *d;
5693 tree v2si_ftype_4_v2si
5694 = build_function_type
5695 (opaque_V2SI_type_node,
5696 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5697 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5698 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5699 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5702 tree v2sf_ftype_4_v2sf
5703 = build_function_type
5704 (opaque_V2SF_type_node,
5705 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5706 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5707 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5708 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5711 tree int_ftype_int_v2si_v2si
5712 = build_function_type
5714 tree_cons (NULL_TREE, integer_type_node,
5715 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5716 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5719 tree int_ftype_int_v2sf_v2sf
5720 = build_function_type
5722 tree_cons (NULL_TREE, integer_type_node,
5723 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5724 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5727 tree void_ftype_v2si_puint_int
5728 = build_function_type (void_type_node,
5729 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5730 tree_cons (NULL_TREE, puint_type_node,
5731 tree_cons (NULL_TREE,
5735 tree void_ftype_v2si_puint_char
5736 = build_function_type (void_type_node,
5737 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5738 tree_cons (NULL_TREE, puint_type_node,
5739 tree_cons (NULL_TREE,
5743 tree void_ftype_v2si_pv2si_int
5744 = build_function_type (void_type_node,
5745 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5746 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
5747 tree_cons (NULL_TREE,
5751 tree void_ftype_v2si_pv2si_char
5752 = build_function_type (void_type_node,
5753 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5754 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
5755 tree_cons (NULL_TREE,
5760 = build_function_type (void_type_node,
5761 tree_cons (NULL_TREE, integer_type_node, endlink));
5764 = build_function_type (integer_type_node,
5765 tree_cons (NULL_TREE, void_type_node, endlink));
5767 tree v2si_ftype_pv2si_int
5768 = build_function_type (opaque_V2SI_type_node,
5769 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
5770 tree_cons (NULL_TREE, integer_type_node,
5773 tree v2si_ftype_puint_int
5774 = build_function_type (opaque_V2SI_type_node,
5775 tree_cons (NULL_TREE, puint_type_node,
5776 tree_cons (NULL_TREE, integer_type_node,
5779 tree v2si_ftype_pushort_int
5780 = build_function_type (opaque_V2SI_type_node,
5781 tree_cons (NULL_TREE, pushort_type_node,
5782 tree_cons (NULL_TREE, integer_type_node,
5785 /* The initialization of the simple binary and unary builtins is
5786 done in rs6000_common_init_builtins, but we have to enable the
5787 mask bits here manually because we have run out of `target_flags'
5788 bits. We really need to redesign this mask business. */
5790 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5791 ARRAY_SIZE (bdesc_2arg),
5794 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5795 ARRAY_SIZE (bdesc_1arg),
5797 SPE_BUILTIN_EVSUBFUSIAAW);
5798 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5799 ARRAY_SIZE (bdesc_spe_predicates),
5800 SPE_BUILTIN_EVCMPEQ,
5801 SPE_BUILTIN_EVFSTSTLT);
5802 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5803 ARRAY_SIZE (bdesc_spe_evsel),
5804 SPE_BUILTIN_EVSEL_CMPGTS,
5805 SPE_BUILTIN_EVSEL_FSTSTEQ);
5807 /* Initialize irregular SPE builtins. */
5809 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5810 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5811 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5812 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5813 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5814 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5815 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5816 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5817 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5818 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5819 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5820 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5821 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5822 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5823 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5824 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5827 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5828 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5829 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5830 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5831 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5832 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5833 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5834 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5835 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5836 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5837 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5838 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5839 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5840 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5841 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5842 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5843 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5844 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5845 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5846 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5847 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5848 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5851 d = (struct builtin_description *) bdesc_spe_predicates;
5852 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5856 switch (insn_data[d->icode].operand[1].mode)
5859 type = int_ftype_int_v2si_v2si;
5862 type = int_ftype_int_v2sf_v2sf;
5868 def_builtin (d->mask, d->name, type, d->code);
5871 /* Evsel predicates. */
5872 d = (struct builtin_description *) bdesc_spe_evsel;
5873 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5877 switch (insn_data[d->icode].operand[1].mode)
5880 type = v2si_ftype_4_v2si;
5883 type = v2sf_ftype_4_v2sf;
5889 def_builtin (d->mask, d->name, type, d->code);
5894 altivec_init_builtins ()
5896 struct builtin_description *d;
5897 struct builtin_description_predicates *dp;
5899 tree pfloat_type_node = build_pointer_type (float_type_node);
5900 tree pint_type_node = build_pointer_type (integer_type_node);
5901 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5902 tree pchar_type_node = build_pointer_type (char_type_node);
5904 tree pvoid_type_node = build_pointer_type (void_type_node);
5906 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5907 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5908 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5909 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5911 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5913 tree int_ftype_int_v4si_v4si
5914 = build_function_type_list (integer_type_node,
5915 integer_type_node, V4SI_type_node,
5916 V4SI_type_node, NULL_TREE);
5917 tree v4sf_ftype_pcfloat
5918 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5919 tree void_ftype_pfloat_v4sf
5920 = build_function_type_list (void_type_node,
5921 pfloat_type_node, V4SF_type_node, NULL_TREE);
5922 tree v4si_ftype_pcint
5923 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5924 tree void_ftype_pint_v4si
5925 = build_function_type_list (void_type_node,
5926 pint_type_node, V4SI_type_node, NULL_TREE);
5927 tree v8hi_ftype_pcshort
5928 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5929 tree void_ftype_pshort_v8hi
5930 = build_function_type_list (void_type_node,
5931 pshort_type_node, V8HI_type_node, NULL_TREE);
5932 tree v16qi_ftype_pcchar
5933 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5934 tree void_ftype_pchar_v16qi
5935 = build_function_type_list (void_type_node,
5936 pchar_type_node, V16QI_type_node, NULL_TREE);
5937 tree void_ftype_v4si
5938 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5939 tree v8hi_ftype_void
5940 = build_function_type (V8HI_type_node, void_list_node);
5941 tree void_ftype_void
5942 = build_function_type (void_type_node, void_list_node);
5944 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5946 tree v16qi_ftype_int_pcvoid
5947 = build_function_type_list (V16QI_type_node,
5948 integer_type_node, pcvoid_type_node, NULL_TREE);
5949 tree v8hi_ftype_int_pcvoid
5950 = build_function_type_list (V8HI_type_node,
5951 integer_type_node, pcvoid_type_node, NULL_TREE);
5952 tree v4si_ftype_int_pcvoid
5953 = build_function_type_list (V4SI_type_node,
5954 integer_type_node, pcvoid_type_node, NULL_TREE);
5956 tree void_ftype_v4si_int_pvoid
5957 = build_function_type_list (void_type_node,
5958 V4SI_type_node, integer_type_node,
5959 pvoid_type_node, NULL_TREE);
5960 tree void_ftype_v16qi_int_pvoid
5961 = build_function_type_list (void_type_node,
5962 V16QI_type_node, integer_type_node,
5963 pvoid_type_node, NULL_TREE);
5964 tree void_ftype_v8hi_int_pvoid
5965 = build_function_type_list (void_type_node,
5966 V8HI_type_node, integer_type_node,
5967 pvoid_type_node, NULL_TREE);
5968 tree int_ftype_int_v8hi_v8hi
5969 = build_function_type_list (integer_type_node,
5970 integer_type_node, V8HI_type_node,
5971 V8HI_type_node, NULL_TREE);
5972 tree int_ftype_int_v16qi_v16qi
5973 = build_function_type_list (integer_type_node,
5974 integer_type_node, V16QI_type_node,
5975 V16QI_type_node, NULL_TREE);
5976 tree int_ftype_int_v4sf_v4sf
5977 = build_function_type_list (integer_type_node,
5978 integer_type_node, V4SF_type_node,
5979 V4SF_type_node, NULL_TREE);
5980 tree v4si_ftype_v4si
5981 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5982 tree v8hi_ftype_v8hi
5983 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5984 tree v16qi_ftype_v16qi
5985 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5986 tree v4sf_ftype_v4sf
5987 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5988 tree void_ftype_pcvoid_int_char
5989 = build_function_type_list (void_type_node,
5990 pcvoid_type_node, integer_type_node,
5991 char_type_node, NULL_TREE);
5993 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5994 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5995 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5996 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5997 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5998 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5999 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6000 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6001 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6002 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6003 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6004 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6005 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6006 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6007 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6008 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6009 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6010 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6011 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6012 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6013 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6014 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6015 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6016 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6017 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6018 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6019 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6020 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6021 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6022 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6023 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6024 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6026 /* Add the DST variants. */
6027 d = (struct builtin_description *) bdesc_dst;
6028 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6029 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6031 /* Initialize the predicates. */
6032 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6033 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6035 enum machine_mode mode1;
6038 mode1 = insn_data[dp->icode].operand[1].mode;
6043 type = int_ftype_int_v4si_v4si;
6046 type = int_ftype_int_v8hi_v8hi;
6049 type = int_ftype_int_v16qi_v16qi;
6052 type = int_ftype_int_v4sf_v4sf;
6058 def_builtin (dp->mask, dp->name, type, dp->code);
6061 /* Initialize the abs* operators. */
6062 d = (struct builtin_description *) bdesc_abs;
6063 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6065 enum machine_mode mode0;
6068 mode0 = insn_data[d->icode].operand[0].mode;
6073 type = v4si_ftype_v4si;
6076 type = v8hi_ftype_v8hi;
6079 type = v16qi_ftype_v16qi;
6082 type = v4sf_ftype_v4sf;
6088 def_builtin (d->mask, d->name, type, d->code);
6093 rs6000_common_init_builtins ()
6095 struct builtin_description *d;
6098 tree v4sf_ftype_v4sf_v4sf_v16qi
6099 = build_function_type_list (V4SF_type_node,
6100 V4SF_type_node, V4SF_type_node,
6101 V16QI_type_node, NULL_TREE);
6102 tree v4si_ftype_v4si_v4si_v16qi
6103 = build_function_type_list (V4SI_type_node,
6104 V4SI_type_node, V4SI_type_node,
6105 V16QI_type_node, NULL_TREE);
6106 tree v8hi_ftype_v8hi_v8hi_v16qi
6107 = build_function_type_list (V8HI_type_node,
6108 V8HI_type_node, V8HI_type_node,
6109 V16QI_type_node, NULL_TREE);
6110 tree v16qi_ftype_v16qi_v16qi_v16qi
6111 = build_function_type_list (V16QI_type_node,
6112 V16QI_type_node, V16QI_type_node,
6113 V16QI_type_node, NULL_TREE);
6114 tree v4si_ftype_char
6115 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6116 tree v8hi_ftype_char
6117 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6118 tree v16qi_ftype_char
6119 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6120 tree v8hi_ftype_v16qi
6121 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6122 tree v4sf_ftype_v4sf
6123 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6125 tree v2si_ftype_v2si_v2si
6126 = build_function_type_list (opaque_V2SI_type_node,
6127 opaque_V2SI_type_node,
6128 opaque_V2SI_type_node, NULL_TREE);
6130 tree v2sf_ftype_v2sf_v2sf
6131 = build_function_type_list (opaque_V2SF_type_node,
6132 opaque_V2SF_type_node,
6133 opaque_V2SF_type_node, NULL_TREE);
6135 tree v2si_ftype_int_int
6136 = build_function_type_list (opaque_V2SI_type_node,
6137 integer_type_node, integer_type_node,
6140 tree v2si_ftype_v2si
6141 = build_function_type_list (opaque_V2SI_type_node,
6142 opaque_V2SI_type_node, NULL_TREE);
6144 tree v2sf_ftype_v2sf
6145 = build_function_type_list (opaque_V2SF_type_node,
6146 opaque_V2SF_type_node, NULL_TREE);
6148 tree v2sf_ftype_v2si
6149 = build_function_type_list (opaque_V2SF_type_node,
6150 opaque_V2SI_type_node, NULL_TREE);
6152 tree v2si_ftype_v2sf
6153 = build_function_type_list (opaque_V2SI_type_node,
6154 opaque_V2SF_type_node, NULL_TREE);
6156 tree v2si_ftype_v2si_char
6157 = build_function_type_list (opaque_V2SI_type_node,
6158 opaque_V2SI_type_node,
6159 char_type_node, NULL_TREE);
6161 tree v2si_ftype_int_char
6162 = build_function_type_list (opaque_V2SI_type_node,
6163 integer_type_node, char_type_node, NULL_TREE);
6165 tree v2si_ftype_char
6166 = build_function_type_list (opaque_V2SI_type_node,
6167 char_type_node, NULL_TREE);
6169 tree int_ftype_int_int
6170 = build_function_type_list (integer_type_node,
6171 integer_type_node, integer_type_node,
6174 tree v4si_ftype_v4si_v4si
6175 = build_function_type_list (V4SI_type_node,
6176 V4SI_type_node, V4SI_type_node, NULL_TREE);
6177 tree v4sf_ftype_v4si_char
6178 = build_function_type_list (V4SF_type_node,
6179 V4SI_type_node, char_type_node, NULL_TREE);
6180 tree v4si_ftype_v4sf_char
6181 = build_function_type_list (V4SI_type_node,
6182 V4SF_type_node, char_type_node, NULL_TREE);
6183 tree v4si_ftype_v4si_char
6184 = build_function_type_list (V4SI_type_node,
6185 V4SI_type_node, char_type_node, NULL_TREE);
6186 tree v8hi_ftype_v8hi_char
6187 = build_function_type_list (V8HI_type_node,
6188 V8HI_type_node, char_type_node, NULL_TREE);
6189 tree v16qi_ftype_v16qi_char
6190 = build_function_type_list (V16QI_type_node,
6191 V16QI_type_node, char_type_node, NULL_TREE);
6192 tree v16qi_ftype_v16qi_v16qi_char
6193 = build_function_type_list (V16QI_type_node,
6194 V16QI_type_node, V16QI_type_node,
6195 char_type_node, NULL_TREE);
6196 tree v8hi_ftype_v8hi_v8hi_char
6197 = build_function_type_list (V8HI_type_node,
6198 V8HI_type_node, V8HI_type_node,
6199 char_type_node, NULL_TREE);
6200 tree v4si_ftype_v4si_v4si_char
6201 = build_function_type_list (V4SI_type_node,
6202 V4SI_type_node, V4SI_type_node,
6203 char_type_node, NULL_TREE);
6204 tree v4sf_ftype_v4sf_v4sf_char
6205 = build_function_type_list (V4SF_type_node,
6206 V4SF_type_node, V4SF_type_node,
6207 char_type_node, NULL_TREE);
6208 tree v4sf_ftype_v4sf_v4sf
6209 = build_function_type_list (V4SF_type_node,
6210 V4SF_type_node, V4SF_type_node, NULL_TREE);
6211 tree v4sf_ftype_v4sf_v4sf_v4si
6212 = build_function_type_list (V4SF_type_node,
6213 V4SF_type_node, V4SF_type_node,
6214 V4SI_type_node, NULL_TREE);
6215 tree v4sf_ftype_v4sf_v4sf_v4sf
6216 = build_function_type_list (V4SF_type_node,
6217 V4SF_type_node, V4SF_type_node,
6218 V4SF_type_node, NULL_TREE);
6219 tree v4si_ftype_v4si_v4si_v4si
6220 = build_function_type_list (V4SI_type_node,
6221 V4SI_type_node, V4SI_type_node,
6222 V4SI_type_node, NULL_TREE);
6223 tree v8hi_ftype_v8hi_v8hi
6224 = build_function_type_list (V8HI_type_node,
6225 V8HI_type_node, V8HI_type_node, NULL_TREE);
6226 tree v8hi_ftype_v8hi_v8hi_v8hi
6227 = build_function_type_list (V8HI_type_node,
6228 V8HI_type_node, V8HI_type_node,
6229 V8HI_type_node, NULL_TREE);
6230 tree v4si_ftype_v8hi_v8hi_v4si
6231 = build_function_type_list (V4SI_type_node,
6232 V8HI_type_node, V8HI_type_node,
6233 V4SI_type_node, NULL_TREE);
6234 tree v4si_ftype_v16qi_v16qi_v4si
6235 = build_function_type_list (V4SI_type_node,
6236 V16QI_type_node, V16QI_type_node,
6237 V4SI_type_node, NULL_TREE);
6238 tree v16qi_ftype_v16qi_v16qi
6239 = build_function_type_list (V16QI_type_node,
6240 V16QI_type_node, V16QI_type_node, NULL_TREE);
6241 tree v4si_ftype_v4sf_v4sf
6242 = build_function_type_list (V4SI_type_node,
6243 V4SF_type_node, V4SF_type_node, NULL_TREE);
6244 tree v8hi_ftype_v16qi_v16qi
6245 = build_function_type_list (V8HI_type_node,
6246 V16QI_type_node, V16QI_type_node, NULL_TREE);
6247 tree v4si_ftype_v8hi_v8hi
6248 = build_function_type_list (V4SI_type_node,
6249 V8HI_type_node, V8HI_type_node, NULL_TREE);
6250 tree v8hi_ftype_v4si_v4si
6251 = build_function_type_list (V8HI_type_node,
6252 V4SI_type_node, V4SI_type_node, NULL_TREE);
6253 tree v16qi_ftype_v8hi_v8hi
6254 = build_function_type_list (V16QI_type_node,
6255 V8HI_type_node, V8HI_type_node, NULL_TREE);
6256 tree v4si_ftype_v16qi_v4si
6257 = build_function_type_list (V4SI_type_node,
6258 V16QI_type_node, V4SI_type_node, NULL_TREE);
6259 tree v4si_ftype_v16qi_v16qi
6260 = build_function_type_list (V4SI_type_node,
6261 V16QI_type_node, V16QI_type_node, NULL_TREE);
6262 tree v4si_ftype_v8hi_v4si
6263 = build_function_type_list (V4SI_type_node,
6264 V8HI_type_node, V4SI_type_node, NULL_TREE);
6265 tree v4si_ftype_v8hi
6266 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6267 tree int_ftype_v4si_v4si
6268 = build_function_type_list (integer_type_node,
6269 V4SI_type_node, V4SI_type_node, NULL_TREE);
6270 tree int_ftype_v4sf_v4sf
6271 = build_function_type_list (integer_type_node,
6272 V4SF_type_node, V4SF_type_node, NULL_TREE);
6273 tree int_ftype_v16qi_v16qi
6274 = build_function_type_list (integer_type_node,
6275 V16QI_type_node, V16QI_type_node, NULL_TREE);
6276 tree int_ftype_v8hi_v8hi
6277 = build_function_type_list (integer_type_node,
6278 V8HI_type_node, V8HI_type_node, NULL_TREE);
6280 /* Add the simple ternary operators. */
6281 d = (struct builtin_description *) bdesc_3arg;
6282 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6285 enum machine_mode mode0, mode1, mode2, mode3;
6288 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6291 mode0 = insn_data[d->icode].operand[0].mode;
6292 mode1 = insn_data[d->icode].operand[1].mode;
6293 mode2 = insn_data[d->icode].operand[2].mode;
6294 mode3 = insn_data[d->icode].operand[3].mode;
6296 /* When all four are of the same mode. */
6297 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6302 type = v4si_ftype_v4si_v4si_v4si;
6305 type = v4sf_ftype_v4sf_v4sf_v4sf;
6308 type = v8hi_ftype_v8hi_v8hi_v8hi;
6311 type = v16qi_ftype_v16qi_v16qi_v16qi;
6317 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6322 type = v4si_ftype_v4si_v4si_v16qi;
6325 type = v4sf_ftype_v4sf_v4sf_v16qi;
6328 type = v8hi_ftype_v8hi_v8hi_v16qi;
6331 type = v16qi_ftype_v16qi_v16qi_v16qi;
6337 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6338 && mode3 == V4SImode)
6339 type = v4si_ftype_v16qi_v16qi_v4si;
6340 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6341 && mode3 == V4SImode)
6342 type = v4si_ftype_v8hi_v8hi_v4si;
6343 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6344 && mode3 == V4SImode)
6345 type = v4sf_ftype_v4sf_v4sf_v4si;
6347 /* vchar, vchar, vchar, 4 bit literal. */
6348 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6350 type = v16qi_ftype_v16qi_v16qi_char;
6352 /* vshort, vshort, vshort, 4 bit literal. */
6353 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6355 type = v8hi_ftype_v8hi_v8hi_char;
6357 /* vint, vint, vint, 4 bit literal. */
6358 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6360 type = v4si_ftype_v4si_v4si_char;
6362 /* vfloat, vfloat, vfloat, 4 bit literal. */
6363 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6365 type = v4sf_ftype_v4sf_v4sf_char;
6370 def_builtin (d->mask, d->name, type, d->code);
6373 /* Add the simple binary operators. */
6374 d = (struct builtin_description *) bdesc_2arg;
6375 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6377 enum machine_mode mode0, mode1, mode2;
6380 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6383 mode0 = insn_data[d->icode].operand[0].mode;
6384 mode1 = insn_data[d->icode].operand[1].mode;
6385 mode2 = insn_data[d->icode].operand[2].mode;
6387 /* When all three operands are of the same mode. */
6388 if (mode0 == mode1 && mode1 == mode2)
6393 type = v4sf_ftype_v4sf_v4sf;
6396 type = v4si_ftype_v4si_v4si;
6399 type = v16qi_ftype_v16qi_v16qi;
6402 type = v8hi_ftype_v8hi_v8hi;
6405 type = v2si_ftype_v2si_v2si;
6408 type = v2sf_ftype_v2sf_v2sf;
6411 type = int_ftype_int_int;
6418 /* A few other combos we really don't want to do manually. */
6420 /* vint, vfloat, vfloat. */
6421 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6422 type = v4si_ftype_v4sf_v4sf;
6424 /* vshort, vchar, vchar. */
6425 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6426 type = v8hi_ftype_v16qi_v16qi;
6428 /* vint, vshort, vshort. */
6429 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6430 type = v4si_ftype_v8hi_v8hi;
6432 /* vshort, vint, vint. */
6433 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6434 type = v8hi_ftype_v4si_v4si;
6436 /* vchar, vshort, vshort. */
6437 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6438 type = v16qi_ftype_v8hi_v8hi;
6440 /* vint, vchar, vint. */
6441 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6442 type = v4si_ftype_v16qi_v4si;
6444 /* vint, vchar, vchar. */
6445 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6446 type = v4si_ftype_v16qi_v16qi;
6448 /* vint, vshort, vint. */
6449 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6450 type = v4si_ftype_v8hi_v4si;
6452 /* vint, vint, 5 bit literal. */
6453 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6454 type = v4si_ftype_v4si_char;
6456 /* vshort, vshort, 5 bit literal. */
6457 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6458 type = v8hi_ftype_v8hi_char;
6460 /* vchar, vchar, 5 bit literal. */
6461 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6462 type = v16qi_ftype_v16qi_char;
6464 /* vfloat, vint, 5 bit literal. */
6465 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6466 type = v4sf_ftype_v4si_char;
6468 /* vint, vfloat, 5 bit literal. */
6469 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6470 type = v4si_ftype_v4sf_char;
6472 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6473 type = v2si_ftype_int_int;
6475 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6476 type = v2si_ftype_v2si_char;
6478 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6479 type = v2si_ftype_int_char;
6482 else if (mode0 == SImode)
6487 type = int_ftype_v4si_v4si;
6490 type = int_ftype_v4sf_v4sf;
6493 type = int_ftype_v16qi_v16qi;
6496 type = int_ftype_v8hi_v8hi;
6506 def_builtin (d->mask, d->name, type, d->code);
6509 /* Add the simple unary operators. */
6510 d = (struct builtin_description *) bdesc_1arg;
6511 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6513 enum machine_mode mode0, mode1;
6516 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6519 mode0 = insn_data[d->icode].operand[0].mode;
6520 mode1 = insn_data[d->icode].operand[1].mode;
6522 if (mode0 == V4SImode && mode1 == QImode)
6523 type = v4si_ftype_char;
6524 else if (mode0 == V8HImode && mode1 == QImode)
6525 type = v8hi_ftype_char;
6526 else if (mode0 == V16QImode && mode1 == QImode)
6527 type = v16qi_ftype_char;
6528 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6529 type = v4sf_ftype_v4sf;
6530 else if (mode0 == V8HImode && mode1 == V16QImode)
6531 type = v8hi_ftype_v16qi;
6532 else if (mode0 == V4SImode && mode1 == V8HImode)
6533 type = v4si_ftype_v8hi;
6534 else if (mode0 == V2SImode && mode1 == V2SImode)
6535 type = v2si_ftype_v2si;
6536 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6537 type = v2sf_ftype_v2sf;
6538 else if (mode0 == V2SFmode && mode1 == V2SImode)
6539 type = v2sf_ftype_v2si;
6540 else if (mode0 == V2SImode && mode1 == V2SFmode)
6541 type = v2si_ftype_v2sf;
6542 else if (mode0 == V2SImode && mode1 == QImode)
6543 type = v2si_ftype_char;
6547 def_builtin (d->mask, d->name, type, d->code);
6552 /* Expand a block move operation, and return 1 if successful. Return 0
6553 if we should let the compiler generate normal code.
6555 operands[0] is the destination
6556 operands[1] is the source
6557 operands[2] is the length
6558 operands[3] is the alignment */
6560 #define MAX_MOVE_REG 4
6563 expand_block_move (operands)
6566 rtx orig_dest = operands[0];
6567 rtx orig_src = operands[1];
6568 rtx bytes_rtx = operands[2];
6569 rtx align_rtx = operands[3];
6570 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6576 /* If this is not a fixed size move, just call memcpy */
6580 /* If this is not a fixed size alignment, abort */
6581 if (GET_CODE (align_rtx) != CONST_INT)
6583 align = INTVAL (align_rtx);
6585 /* Anything to move? */
6586 bytes = INTVAL (bytes_rtx);
6590 /* store_one_arg depends on expand_block_move to handle at least the size of
6591 reg_parm_stack_space. */
6592 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6595 if (TARGET_STRING) /* string instructions are available */
6597 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6600 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6601 rtx (*mov) PARAMS ((rtx, rtx));
6603 enum machine_mode mode = BLKmode;
6606 if (bytes > 24 /* move up to 32 bytes at a time */
6614 && ! fixed_regs[12])
6616 move_bytes = (bytes > 32) ? 32 : bytes;
6617 gen_func.movstrsi = gen_movstrsi_8reg;
6619 else if (bytes > 16 /* move up to 24 bytes at a time */
6625 && ! fixed_regs[10])
6627 move_bytes = (bytes > 24) ? 24 : bytes;
6628 gen_func.movstrsi = gen_movstrsi_6reg;
6630 else if (bytes > 8 /* move up to 16 bytes at a time */
6636 move_bytes = (bytes > 16) ? 16 : bytes;
6637 gen_func.movstrsi = gen_movstrsi_4reg;
6639 else if (bytes >= 8 && TARGET_POWERPC64
6640 /* 64-bit loads and stores require word-aligned
6642 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6646 gen_func.mov = gen_movdi;
6648 else if (bytes > 4 && !TARGET_POWERPC64)
6649 { /* move up to 8 bytes at a time */
6650 move_bytes = (bytes > 8) ? 8 : bytes;
6651 gen_func.movstrsi = gen_movstrsi_2reg;
6653 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6654 { /* move 4 bytes */
6657 gen_func.mov = gen_movsi;
6659 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6660 { /* move 2 bytes */
6663 gen_func.mov = gen_movhi;
6665 else if (bytes == 1) /* move 1 byte */
6669 gen_func.mov = gen_movqi;
6672 { /* move up to 4 bytes at a time */
6673 move_bytes = (bytes > 4) ? 4 : bytes;
6674 gen_func.movstrsi = gen_movstrsi_1reg;
6677 src = adjust_address (orig_src, mode, offset);
6678 dest = adjust_address (orig_dest, mode, offset);
6680 if (mode == BLKmode)
6682 /* Move the address into scratch registers. The movstrsi
6683 patterns require zero offset. */
6684 if (!REG_P (XEXP (src, 0)))
6686 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6687 src = replace_equiv_address (src, src_reg);
6689 set_mem_size (src, GEN_INT (move_bytes));
6691 if (!REG_P (XEXP (dest, 0)))
6693 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6694 dest = replace_equiv_address (dest, dest_reg);
6696 set_mem_size (dest, GEN_INT (move_bytes));
6698 emit_insn ((*gen_func.movstrsi) (dest, src,
6699 GEN_INT (move_bytes & 31),
6704 rtx tmp_reg = gen_reg_rtx (mode);
6706 emit_insn ((*gen_func.mov) (tmp_reg, src));
6707 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6712 else /* string instructions not available */
6714 rtx stores[MAX_MOVE_REG];
6718 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6720 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6721 enum machine_mode mode;
6722 rtx src, dest, tmp_reg;
6724 /* Generate the appropriate load and store, saving the stores
6726 if (bytes >= 8 && TARGET_POWERPC64
6727 /* 64-bit loads and stores require word-aligned
6729 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6733 gen_mov_func = gen_movdi;
6735 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6739 gen_mov_func = gen_movsi;
6741 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6745 gen_mov_func = gen_movhi;
6751 gen_mov_func = gen_movqi;
6754 src = adjust_address (orig_src, mode, offset);
6755 dest = adjust_address (orig_dest, mode, offset);
6756 tmp_reg = gen_reg_rtx (mode);
6758 emit_insn ((*gen_mov_func) (tmp_reg, src));
6759 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6761 if (num_reg >= MAX_MOVE_REG)
6763 for (i = 0; i < num_reg; i++)
6764 emit_insn (stores[i]);
6769 for (i = 0; i < num_reg; i++)
6770 emit_insn (stores[i]);
6777 /* Return 1 if OP is a load multiple operation. It is known to be a
6778 PARALLEL and the first section will be tested. */
6781 load_multiple_operation (op, mode)
6783 enum machine_mode mode ATTRIBUTE_UNUSED;
6785 int count = XVECLEN (op, 0);
6786 unsigned int dest_regno;
6790 /* Perform a quick check so we don't blow up below. */
6792 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6793 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6794 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6797 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6798 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6800 for (i = 1; i < count; i++)
6802 rtx elt = XVECEXP (op, 0, i);
6804 if (GET_CODE (elt) != SET
6805 || GET_CODE (SET_DEST (elt)) != REG
6806 || GET_MODE (SET_DEST (elt)) != SImode
6807 || REGNO (SET_DEST (elt)) != dest_regno + i
6808 || GET_CODE (SET_SRC (elt)) != MEM
6809 || GET_MODE (SET_SRC (elt)) != SImode
6810 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6811 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6812 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6813 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6820 /* Similar, but tests for store multiple. Here, the second vector element
6821 is a CLOBBER. It will be tested later. */
6824 store_multiple_operation (op, mode)
6826 enum machine_mode mode ATTRIBUTE_UNUSED;
6828 int count = XVECLEN (op, 0) - 1;
6829 unsigned int src_regno;
6833 /* Perform a quick check so we don't blow up below. */
6835 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6836 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6837 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6840 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6841 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6843 for (i = 1; i < count; i++)
6845 rtx elt = XVECEXP (op, 0, i + 1);
6847 if (GET_CODE (elt) != SET
6848 || GET_CODE (SET_SRC (elt)) != REG
6849 || GET_MODE (SET_SRC (elt)) != SImode
6850 || REGNO (SET_SRC (elt)) != src_regno + i
6851 || GET_CODE (SET_DEST (elt)) != MEM
6852 || GET_MODE (SET_DEST (elt)) != SImode
6853 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6854 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6855 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6856 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6863 /* Return a string to perform a load_multiple operation.
6864 operands[0] is the vector.
6865 operands[1] is the source address.
6866 operands[2] is the first destination register. */
6869 rs6000_output_load_multiple (operands)
6872 /* We have to handle the case where the pseudo used to contain the address
6873 is assigned to one of the output registers. */
6875 int words = XVECLEN (operands[0], 0);
6878 if (XVECLEN (operands[0], 0) == 1)
6879 return "{l|lwz} %2,0(%1)";
6881 for (i = 0; i < words; i++)
6882 if (refers_to_regno_p (REGNO (operands[2]) + i,
6883 REGNO (operands[2]) + i + 1, operands[1], 0))
6887 xop[0] = GEN_INT (4 * (words-1));
6888 xop[1] = operands[1];
6889 xop[2] = operands[2];
6890 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6895 xop[0] = GEN_INT (4 * (words-1));
6896 xop[1] = operands[1];
6897 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6898 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6903 for (j = 0; j < words; j++)
6906 xop[0] = GEN_INT (j * 4);
6907 xop[1] = operands[1];
6908 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6909 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6911 xop[0] = GEN_INT (i * 4);
6912 xop[1] = operands[1];
6913 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6918 return "{lsi|lswi} %2,%1,%N0";
6921 /* Return 1 for a parallel vrsave operation. */
6924 vrsave_operation (op, mode)
6926 enum machine_mode mode ATTRIBUTE_UNUSED;
6928 int count = XVECLEN (op, 0);
6929 unsigned int dest_regno, src_regno;
6933 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6934 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6935 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6938 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6939 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6941 if (dest_regno != VRSAVE_REGNO
6942 && src_regno != VRSAVE_REGNO)
6945 for (i = 1; i < count; i++)
6947 rtx elt = XVECEXP (op, 0, i);
6949 if (GET_CODE (elt) != CLOBBER
6950 && GET_CODE (elt) != SET)
6957 /* Return 1 for an PARALLEL suitable for mtcrf. */
6960 mtcrf_operation (op, mode)
6962 enum machine_mode mode ATTRIBUTE_UNUSED;
6964 int count = XVECLEN (op, 0);
6968 /* Perform a quick check so we don't blow up below. */
6970 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6971 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6972 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6974 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6976 if (GET_CODE (src_reg) != REG
6977 || GET_MODE (src_reg) != SImode
6978 || ! INT_REGNO_P (REGNO (src_reg)))
6981 for (i = 0; i < count; i++)
6983 rtx exp = XVECEXP (op, 0, i);
6987 if (GET_CODE (exp) != SET
6988 || GET_CODE (SET_DEST (exp)) != REG
6989 || GET_MODE (SET_DEST (exp)) != CCmode
6990 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6992 unspec = SET_SRC (exp);
6993 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6995 if (GET_CODE (unspec) != UNSPEC
6996 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
6997 || XVECLEN (unspec, 0) != 2
6998 || XVECEXP (unspec, 0, 0) != src_reg
6999 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7000 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7006 /* Return 1 for an PARALLEL suitable for lmw. */
7009 lmw_operation (op, mode)
7011 enum machine_mode mode ATTRIBUTE_UNUSED;
7013 int count = XVECLEN (op, 0);
7014 unsigned int dest_regno;
7016 unsigned int base_regno;
7017 HOST_WIDE_INT offset;
7020 /* Perform a quick check so we don't blow up below. */
7022 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7023 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7024 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7027 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7028 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7031 || count != 32 - (int) dest_regno)
7034 if (legitimate_indirect_address_p (src_addr, 0))
7037 base_regno = REGNO (src_addr);
7038 if (base_regno == 0)
7041 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7043 offset = INTVAL (XEXP (src_addr, 1));
7044 base_regno = REGNO (XEXP (src_addr, 0));
7049 for (i = 0; i < count; i++)
7051 rtx elt = XVECEXP (op, 0, i);
7054 HOST_WIDE_INT newoffset;
7056 if (GET_CODE (elt) != SET
7057 || GET_CODE (SET_DEST (elt)) != REG
7058 || GET_MODE (SET_DEST (elt)) != SImode
7059 || REGNO (SET_DEST (elt)) != dest_regno + i
7060 || GET_CODE (SET_SRC (elt)) != MEM
7061 || GET_MODE (SET_SRC (elt)) != SImode)
7063 newaddr = XEXP (SET_SRC (elt), 0);
7064 if (legitimate_indirect_address_p (newaddr, 0))
7069 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7071 addr_reg = XEXP (newaddr, 0);
7072 newoffset = INTVAL (XEXP (newaddr, 1));
7076 if (REGNO (addr_reg) != base_regno
7077 || newoffset != offset + 4 * i)
7084 /* Return 1 for an PARALLEL suitable for stmw. */
7087 stmw_operation (op, mode)
7089 enum machine_mode mode ATTRIBUTE_UNUSED;
7091 int count = XVECLEN (op, 0);
7092 unsigned int src_regno;
7094 unsigned int base_regno;
7095 HOST_WIDE_INT offset;
7098 /* Perform a quick check so we don't blow up below. */
7100 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7101 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7102 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7105 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7106 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7109 || count != 32 - (int) src_regno)
7112 if (legitimate_indirect_address_p (dest_addr, 0))
7115 base_regno = REGNO (dest_addr);
7116 if (base_regno == 0)
7119 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7121 offset = INTVAL (XEXP (dest_addr, 1));
7122 base_regno = REGNO (XEXP (dest_addr, 0));
7127 for (i = 0; i < count; i++)
7129 rtx elt = XVECEXP (op, 0, i);
7132 HOST_WIDE_INT newoffset;
7134 if (GET_CODE (elt) != SET
7135 || GET_CODE (SET_SRC (elt)) != REG
7136 || GET_MODE (SET_SRC (elt)) != SImode
7137 || REGNO (SET_SRC (elt)) != src_regno + i
7138 || GET_CODE (SET_DEST (elt)) != MEM
7139 || GET_MODE (SET_DEST (elt)) != SImode)
7141 newaddr = XEXP (SET_DEST (elt), 0);
7142 if (legitimate_indirect_address_p (newaddr, 0))
7147 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7149 addr_reg = XEXP (newaddr, 0);
7150 newoffset = INTVAL (XEXP (newaddr, 1));
7154 if (REGNO (addr_reg) != base_regno
7155 || newoffset != offset + 4 * i)
7162 /* A validation routine: say whether CODE, a condition code, and MODE
7163 match. The other alternatives either don't make sense or should
7164 never be generated. */
7167 validate_condition_mode (code, mode)
7169 enum machine_mode mode;
7171 if (GET_RTX_CLASS (code) != '<'
7172 || GET_MODE_CLASS (mode) != MODE_CC)
7175 /* These don't make sense. */
7176 if ((code == GT || code == LT || code == GE || code == LE)
7177 && mode == CCUNSmode)
7180 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7181 && mode != CCUNSmode)
7184 if (mode != CCFPmode
7185 && (code == ORDERED || code == UNORDERED
7186 || code == UNEQ || code == LTGT
7187 || code == UNGT || code == UNLT
7188 || code == UNGE || code == UNLE))
7191 /* These should never be generated except for
7192 flag_finite_math_only. */
7193 if (mode == CCFPmode
7194 && ! flag_finite_math_only
7195 && (code == LE || code == GE
7196 || code == UNEQ || code == LTGT
7197 || code == UNGT || code == UNLT))
7200 /* These are invalid; the information is not there. */
7201 if (mode == CCEQmode
7202 && code != EQ && code != NE)
7206 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7207 We only check the opcode against the mode of the CC value here. */
7210 branch_comparison_operator (op, mode)
7212 enum machine_mode mode ATTRIBUTE_UNUSED;
7214 enum rtx_code code = GET_CODE (op);
7215 enum machine_mode cc_mode;
7217 if (GET_RTX_CLASS (code) != '<')
7220 cc_mode = GET_MODE (XEXP (op, 0));
7221 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7224 validate_condition_mode (code, cc_mode);
7229 /* Return 1 if OP is a comparison operation that is valid for a branch
7230 insn and which is true if the corresponding bit in the CC register
7234 branch_positive_comparison_operator (op, mode)
7236 enum machine_mode mode;
7240 if (! branch_comparison_operator (op, mode))
7243 code = GET_CODE (op);
7244 return (code == EQ || code == LT || code == GT
7245 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7246 || code == LTU || code == GTU
7247 || code == UNORDERED);
7250 /* Return 1 if OP is a comparison operation that is valid for an scc
7251 insn: it must be a positive comparison. */
7254 scc_comparison_operator (op, mode)
7256 enum machine_mode mode;
7258 return branch_positive_comparison_operator (op, mode);
7262 trap_comparison_operator (op, mode)
7264 enum machine_mode mode;
7266 if (mode != VOIDmode && mode != GET_MODE (op))
7268 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7272 boolean_operator (op, mode)
7274 enum machine_mode mode ATTRIBUTE_UNUSED;
7276 enum rtx_code code = GET_CODE (op);
7277 return (code == AND || code == IOR || code == XOR);
7281 boolean_or_operator (op, mode)
7283 enum machine_mode mode ATTRIBUTE_UNUSED;
7285 enum rtx_code code = GET_CODE (op);
7286 return (code == IOR || code == XOR);
7290 min_max_operator (op, mode)
7292 enum machine_mode mode ATTRIBUTE_UNUSED;
7294 enum rtx_code code = GET_CODE (op);
7295 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7298 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7299 mask required to convert the result of a rotate insn into a shift
7300 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7303 includes_lshift_p (shiftop, andop)
7307 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7309 shift_mask <<= INTVAL (shiftop);
7311 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7314 /* Similar, but for right shift. */
7317 includes_rshift_p (shiftop, andop)
7321 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7323 shift_mask >>= INTVAL (shiftop);
7325 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7328 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7329 to perform a left shift. It must have exactly SHIFTOP least
7330 significant 0's, then one or more 1's, then zero or more 0's. */
7333 includes_rldic_lshift_p (shiftop, andop)
7337 if (GET_CODE (andop) == CONST_INT)
7339 HOST_WIDE_INT c, lsb, shift_mask;
7342 if (c == 0 || c == ~0)
7346 shift_mask <<= INTVAL (shiftop);
7348 /* Find the least significant one bit. */
7351 /* It must coincide with the LSB of the shift mask. */
7352 if (-lsb != shift_mask)
7355 /* Invert to look for the next transition (if any). */
7358 /* Remove the low group of ones (originally low group of zeros). */
7361 /* Again find the lsb, and check we have all 1's above. */
7365 else if (GET_CODE (andop) == CONST_DOUBLE
7366 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7368 HOST_WIDE_INT low, high, lsb;
7369 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7371 low = CONST_DOUBLE_LOW (andop);
7372 if (HOST_BITS_PER_WIDE_INT < 64)
7373 high = CONST_DOUBLE_HIGH (andop);
7375 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7376 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7379 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7381 shift_mask_high = ~0;
7382 if (INTVAL (shiftop) > 32)
7383 shift_mask_high <<= INTVAL (shiftop) - 32;
7387 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7394 return high == -lsb;
7397 shift_mask_low = ~0;
7398 shift_mask_low <<= INTVAL (shiftop);
7402 if (-lsb != shift_mask_low)
7405 if (HOST_BITS_PER_WIDE_INT < 64)
7410 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7413 return high == -lsb;
7417 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7423 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7424 to perform a left shift. It must have SHIFTOP or more least
7425 signifigant 0's, with the remainder of the word 1's. */
7428 includes_rldicr_lshift_p (shiftop, andop)
7432 if (GET_CODE (andop) == CONST_INT)
7434 HOST_WIDE_INT c, lsb, shift_mask;
7437 shift_mask <<= INTVAL (shiftop);
7440 /* Find the least signifigant one bit. */
7443 /* It must be covered by the shift mask.
7444 This test also rejects c == 0. */
7445 if ((lsb & shift_mask) == 0)
7448 /* Check we have all 1's above the transition, and reject all 1's. */
7449 return c == -lsb && lsb != 1;
7451 else if (GET_CODE (andop) == CONST_DOUBLE
7452 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7454 HOST_WIDE_INT low, lsb, shift_mask_low;
7456 low = CONST_DOUBLE_LOW (andop);
7458 if (HOST_BITS_PER_WIDE_INT < 64)
7460 HOST_WIDE_INT high, shift_mask_high;
7462 high = CONST_DOUBLE_HIGH (andop);
7466 shift_mask_high = ~0;
7467 if (INTVAL (shiftop) > 32)
7468 shift_mask_high <<= INTVAL (shiftop) - 32;
7472 if ((lsb & shift_mask_high) == 0)
7475 return high == -lsb;
7481 shift_mask_low = ~0;
7482 shift_mask_low <<= INTVAL (shiftop);
7486 if ((lsb & shift_mask_low) == 0)
7489 return low == -lsb && lsb != 1;
7495 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7496 for lfq and stfq insns.
7498 Note reg1 and reg2 *must* be hard registers. To be sure we will
7499 abort if we are passed pseudo registers. */
7502 registers_ok_for_quad_peep (reg1, reg2)
7505 /* We might have been passed a SUBREG. */
7506 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7509 return (REGNO (reg1) == REGNO (reg2) - 1);
7512 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7513 addr1 and addr2 must be in consecutive memory locations
7514 (addr2 == addr1 + 8). */
7517 addrs_ok_for_quad_peep (addr1, addr2)
7524 /* Extract an offset (if used) from the first addr. */
7525 if (GET_CODE (addr1) == PLUS)
7527 /* If not a REG, return zero. */
7528 if (GET_CODE (XEXP (addr1, 0)) != REG)
7532 reg1 = REGNO (XEXP (addr1, 0));
7533 /* The offset must be constant! */
7534 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7536 offset1 = INTVAL (XEXP (addr1, 1));
7539 else if (GET_CODE (addr1) != REG)
7543 reg1 = REGNO (addr1);
7544 /* This was a simple (mem (reg)) expression. Offset is 0. */
7548 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7549 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7550 register as addr1. */
7551 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7553 if (GET_CODE (addr2) != PLUS)
7556 if (GET_CODE (XEXP (addr2, 0)) != REG
7557 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7560 if (reg1 != REGNO (XEXP (addr2, 0)))
7563 /* The offset for the second addr must be 8 more than the first addr. */
7564 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7567 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7572 /* Return the register class of a scratch register needed to copy IN into
7573 or out of a register in CLASS in MODE. If it can be done directly,
7574 NO_REGS is returned. */
7577 secondary_reload_class (class, mode, in)
7578 enum reg_class class;
7579 enum machine_mode mode ATTRIBUTE_UNUSED;
7584 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7586 && MACHOPIC_INDIRECT
7590 /* We cannot copy a symbolic operand directly into anything
7591 other than BASE_REGS for TARGET_ELF. So indicate that a
7592 register from BASE_REGS is needed as an intermediate
7595 On Darwin, pic addresses require a load from memory, which
7596 needs a base register. */
7597 if (class != BASE_REGS
7598 && (GET_CODE (in) == SYMBOL_REF
7599 || GET_CODE (in) == HIGH
7600 || GET_CODE (in) == LABEL_REF
7601 || GET_CODE (in) == CONST))
7605 if (GET_CODE (in) == REG)
7608 if (regno >= FIRST_PSEUDO_REGISTER)
7610 regno = true_regnum (in);
7611 if (regno >= FIRST_PSEUDO_REGISTER)
7615 else if (GET_CODE (in) == SUBREG)
7617 regno = true_regnum (in);
7618 if (regno >= FIRST_PSEUDO_REGISTER)
7624 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7626 if (class == GENERAL_REGS || class == BASE_REGS
7627 || (regno >= 0 && INT_REGNO_P (regno)))
7630 /* Constants, memory, and FP registers can go into FP registers. */
7631 if ((regno == -1 || FP_REGNO_P (regno))
7632 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7635 /* Memory, and AltiVec registers can go into AltiVec registers. */
7636 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7637 && class == ALTIVEC_REGS)
7640 /* We can copy among the CR registers. */
7641 if ((class == CR_REGS || class == CR0_REGS)
7642 && regno >= 0 && CR_REGNO_P (regno))
7645 /* Otherwise, we need GENERAL_REGS. */
7646 return GENERAL_REGS;
7649 /* Given a comparison operation, return the bit number in CCR to test. We
7650 know this is a valid comparison.
7652 SCC_P is 1 if this is for an scc. That means that %D will have been
7653 used instead of %C, so the bits will be in different places.
7655 Return -1 if OP isn't a valid comparison for some reason. */
7662 enum rtx_code code = GET_CODE (op);
7663 enum machine_mode cc_mode;
7668 if (GET_RTX_CLASS (code) != '<')
7673 if (GET_CODE (reg) != REG
7674 || ! CR_REGNO_P (REGNO (reg)))
7677 cc_mode = GET_MODE (reg);
7678 cc_regnum = REGNO (reg);
7679 base_bit = 4 * (cc_regnum - CR0_REGNO);
7681 validate_condition_mode (code, cc_mode);
7683 /* When generating a sCOND operation, only positive conditions are
7685 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
7686 && code != GTU && code != LTU)
7692 if (TARGET_E500 && !TARGET_FPRS
7693 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7694 return base_bit + 1;
7695 return scc_p ? base_bit + 3 : base_bit + 2;
7697 if (TARGET_E500 && !TARGET_FPRS
7698 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7699 return base_bit + 1;
7700 return base_bit + 2;
7701 case GT: case GTU: case UNLE:
7702 return base_bit + 1;
7703 case LT: case LTU: case UNGE:
7705 case ORDERED: case UNORDERED:
7706 return base_bit + 3;
7709 /* If scc, we will have done a cror to put the bit in the
7710 unordered position. So test that bit. For integer, this is ! LT
7711 unless this is an scc insn. */
7712 return scc_p ? base_bit + 3 : base_bit;
7715 return scc_p ? base_bit + 3 : base_bit + 1;
7722 /* Return the GOT register. */
7725 rs6000_got_register (value)
7726 rtx value ATTRIBUTE_UNUSED;
7728 /* The second flow pass currently (June 1999) can't update
7729 regs_ever_live without disturbing other parts of the compiler, so
7730 update it here to make the prolog/epilogue code happy. */
7731 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7732 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7734 current_function_uses_pic_offset_table = 1;
7736 return pic_offset_table_rtx;
7739 /* Function to init struct machine_function.
7740 This will be called, via a pointer variable,
7741 from push_function_context. */
7743 static struct machine_function *
7744 rs6000_init_machine_status ()
7746 return ggc_alloc_cleared (sizeof (machine_function));
7749 /* These macros test for integers and extract the low-order bits. */
7751 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7752 && GET_MODE (X) == VOIDmode)
7754 #define INT_LOWPART(X) \
7755 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7762 unsigned long val = INT_LOWPART (op);
7764 /* If the high bit is zero, the value is the first 1 bit we find
7766 if ((val & 0x80000000) == 0)
7768 if ((val & 0xffffffff) == 0)
7772 while (((val <<= 1) & 0x80000000) == 0)
7777 /* If the high bit is set and the low bit is not, or the mask is all
7778 1's, the value is zero. */
7779 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7782 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7785 while (((val >>= 1) & 1) != 0)
7796 unsigned long val = INT_LOWPART (op);
7798 /* If the low bit is zero, the value is the first 1 bit we find from
7802 if ((val & 0xffffffff) == 0)
7806 while (((val >>= 1) & 1) == 0)
7812 /* If the low bit is set and the high bit is not, or the mask is all
7813 1's, the value is 31. */
7814 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7817 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7820 while (((val <<= 1) & 0x80000000) != 0)
7826 /* Print an operand. Recognize special options, documented below. */
7829 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7830 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7832 #define SMALL_DATA_RELOC "sda21"
7833 #define SMALL_DATA_REG 0
7837 print_operand (file, x, code)
7844 unsigned HOST_WIDE_INT uval;
7849 /* Write out an instruction after the call which may be replaced
7850 with glue code by the loader. This depends on the AIX version. */
7851 asm_fprintf (file, RS6000_CALL_GLUE);
7854 /* %a is output_address. */
7857 /* If X is a constant integer whose low-order 5 bits are zero,
7858 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7859 in the AIX assembler where "sri" with a zero shift count
7860 writes a trash instruction. */
7861 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7868 /* If constant, low-order 16 bits of constant, unsigned.
7869 Otherwise, write normally. */
7871 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7873 print_operand (file, x, 0);
7877 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7878 for 64-bit mask direction. */
7879 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7882 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7886 /* X is a CR register. Print the number of the EQ bit of the CR */
7887 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7888 output_operand_lossage ("invalid %%E value");
7890 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7894 /* X is a CR register. Print the shift count needed to move it
7895 to the high-order four bits. */
7896 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7897 output_operand_lossage ("invalid %%f value");
7899 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7903 /* Similar, but print the count for the rotate in the opposite
7905 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7906 output_operand_lossage ("invalid %%F value");
7908 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7912 /* X is a constant integer. If it is negative, print "m",
7913 otherwise print "z". This is to make an aze or ame insn. */
7914 if (GET_CODE (x) != CONST_INT)
7915 output_operand_lossage ("invalid %%G value");
7916 else if (INTVAL (x) >= 0)
7923 /* If constant, output low-order five bits. Otherwise, write
7926 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7928 print_operand (file, x, 0);
7932 /* If constant, output low-order six bits. Otherwise, write
7935 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7937 print_operand (file, x, 0);
7941 /* Print `i' if this is a constant, else nothing. */
7947 /* Write the bit number in CCR for jump. */
7950 output_operand_lossage ("invalid %%j code");
7952 fprintf (file, "%d", i);
7956 /* Similar, but add one for shift count in rlinm for scc and pass
7957 scc flag to `ccr_bit'. */
7960 output_operand_lossage ("invalid %%J code");
7962 /* If we want bit 31, write a shift count of zero, not 32. */
7963 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7967 /* X must be a constant. Write the 1's complement of the
7970 output_operand_lossage ("invalid %%k value");
7972 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7976 /* X must be a symbolic constant on ELF. Write an
7977 expression suitable for an 'addi' that adds in the low 16
7979 if (GET_CODE (x) != CONST)
7981 print_operand_address (file, x);
7986 if (GET_CODE (XEXP (x, 0)) != PLUS
7987 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7988 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7989 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7990 output_operand_lossage ("invalid %%K value");
7991 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7993 /* For GNU as, there must be a non-alphanumeric character
7994 between 'l' and the number. The '-' is added by
7995 print_operand() already. */
7996 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7998 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8002 /* %l is output_asm_label. */
8005 /* Write second word of DImode or DFmode reference. Works on register
8006 or non-indexed memory only. */
8007 if (GET_CODE (x) == REG)
8008 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8009 else if (GET_CODE (x) == MEM)
8011 /* Handle possible auto-increment. Since it is pre-increment and
8012 we have already done it, we can just use an offset of word. */
8013 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8014 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8015 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8018 output_address (XEXP (adjust_address_nv (x, SImode,
8022 if (small_data_operand (x, GET_MODE (x)))
8023 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8024 reg_names[SMALL_DATA_REG]);
8029 /* MB value for a mask operand. */
8030 if (! mask_operand (x, SImode))
8031 output_operand_lossage ("invalid %%m value");
8033 fprintf (file, "%d", extract_MB (x));
8037 /* ME value for a mask operand. */
8038 if (! mask_operand (x, SImode))
8039 output_operand_lossage ("invalid %%M value");
8041 fprintf (file, "%d", extract_ME (x));
8044 /* %n outputs the negative of its operand. */
8047 /* Write the number of elements in the vector times 4. */
8048 if (GET_CODE (x) != PARALLEL)
8049 output_operand_lossage ("invalid %%N value");
8051 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8055 /* Similar, but subtract 1 first. */
8056 if (GET_CODE (x) != PARALLEL)
8057 output_operand_lossage ("invalid %%O value");
8059 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8063 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8065 || INT_LOWPART (x) < 0
8066 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8067 output_operand_lossage ("invalid %%p value");
8069 fprintf (file, "%d", i);
8073 /* The operand must be an indirect memory reference. The result
8074 is the register number. */
8075 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8076 || REGNO (XEXP (x, 0)) >= 32)
8077 output_operand_lossage ("invalid %%P value");
8079 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8083 /* This outputs the logical code corresponding to a boolean
8084 expression. The expression may have one or both operands
8085 negated (if one, only the first one). For condition register
8086 logical operations, it will also treat the negated
8087 CR codes as NOTs, but not handle NOTs of them. */
8089 const char *const *t = 0;
8091 enum rtx_code code = GET_CODE (x);
8092 static const char * const tbl[3][3] = {
8093 { "and", "andc", "nor" },
8094 { "or", "orc", "nand" },
8095 { "xor", "eqv", "xor" } };
8099 else if (code == IOR)
8101 else if (code == XOR)
8104 output_operand_lossage ("invalid %%q value");
8106 if (GET_CODE (XEXP (x, 0)) != NOT)
8110 if (GET_CODE (XEXP (x, 1)) == NOT)
8121 /* X is a CR register. Print the mask for `mtcrf'. */
8122 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8123 output_operand_lossage ("invalid %%R value");
8125 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8129 /* Low 5 bits of 32 - value */
8131 output_operand_lossage ("invalid %%s value");
8133 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8137 /* PowerPC64 mask position. All 0's is excluded.
8138 CONST_INT 32-bit mask is considered sign-extended so any
8139 transition must occur within the CONST_INT, not on the boundary. */
8140 if (! mask64_operand (x, DImode))
8141 output_operand_lossage ("invalid %%S value");
8143 uval = INT_LOWPART (x);
8145 if (uval & 1) /* Clear Left */
8147 #if HOST_BITS_PER_WIDE_INT > 64
8148 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8152 else /* Clear Right */
8155 #if HOST_BITS_PER_WIDE_INT > 64
8156 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8164 fprintf (file, "%d", i);
8168 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8169 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8172 /* Bit 3 is OV bit. */
8173 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8175 /* If we want bit 31, write a shift count of zero, not 32. */
8176 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8180 /* Print the symbolic name of a branch target register. */
8181 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8182 && REGNO (x) != COUNT_REGISTER_REGNUM))
8183 output_operand_lossage ("invalid %%T value");
8184 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8185 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8187 fputs ("ctr", file);
8191 /* High-order 16 bits of constant for use in unsigned operand. */
8193 output_operand_lossage ("invalid %%u value");
8195 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8196 (INT_LOWPART (x) >> 16) & 0xffff);
8200 /* High-order 16 bits of constant for use in signed operand. */
8202 output_operand_lossage ("invalid %%v value");
8204 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8205 (INT_LOWPART (x) >> 16) & 0xffff);
8209 /* Print `u' if this has an auto-increment or auto-decrement. */
8210 if (GET_CODE (x) == MEM
8211 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8212 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8217 /* Print the trap code for this operand. */
8218 switch (GET_CODE (x))
8221 fputs ("eq", file); /* 4 */
8224 fputs ("ne", file); /* 24 */
8227 fputs ("lt", file); /* 16 */
8230 fputs ("le", file); /* 20 */
8233 fputs ("gt", file); /* 8 */
8236 fputs ("ge", file); /* 12 */
8239 fputs ("llt", file); /* 2 */
8242 fputs ("lle", file); /* 6 */
8245 fputs ("lgt", file); /* 1 */
8248 fputs ("lge", file); /* 5 */
8256 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8259 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8260 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8262 print_operand (file, x, 0);
8266 /* MB value for a PowerPC64 rldic operand. */
8267 val = (GET_CODE (x) == CONST_INT
8268 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8273 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8274 if ((val <<= 1) < 0)
8277 #if HOST_BITS_PER_WIDE_INT == 32
8278 if (GET_CODE (x) == CONST_INT && i >= 0)
8279 i += 32; /* zero-extend high-part was all 0's */
8280 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8282 val = CONST_DOUBLE_LOW (x);
8289 for ( ; i < 64; i++)
8290 if ((val <<= 1) < 0)
8295 fprintf (file, "%d", i + 1);
8299 if (GET_CODE (x) == MEM
8300 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8305 /* Like 'L', for third word of TImode */
8306 if (GET_CODE (x) == REG)
8307 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8308 else if (GET_CODE (x) == MEM)
8310 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8311 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8312 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8314 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8315 if (small_data_operand (x, GET_MODE (x)))
8316 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8317 reg_names[SMALL_DATA_REG]);
8322 /* X is a SYMBOL_REF. Write out the name preceded by a
8323 period and without any trailing data in brackets. Used for function
8324 names. If we are configured for System V (or the embedded ABI) on
8325 the PowerPC, do not emit the period, since those systems do not use
8326 TOCs and the like. */
8327 if (GET_CODE (x) != SYMBOL_REF)
8330 if (XSTR (x, 0)[0] != '.')
8332 switch (DEFAULT_ABI)
8347 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8349 assemble_name (file, XSTR (x, 0));
8354 /* Like 'L', for last word of TImode. */
8355 if (GET_CODE (x) == REG)
8356 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8357 else if (GET_CODE (x) == MEM)
8359 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8360 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8361 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8363 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8364 if (small_data_operand (x, GET_MODE (x)))
8365 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8366 reg_names[SMALL_DATA_REG]);
8370 /* Print AltiVec or SPE memory operand. */
8375 if (GET_CODE (x) != MEM)
8383 if (GET_CODE (tmp) == REG)
8385 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8388 /* Handle [reg+UIMM]. */
8389 else if (GET_CODE (tmp) == PLUS &&
8390 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8394 if (GET_CODE (XEXP (tmp, 0)) != REG)
8397 x = INTVAL (XEXP (tmp, 1));
8398 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8402 /* Fall through. Must be [reg+reg]. */
8404 if (GET_CODE (tmp) == REG)
8405 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8406 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8408 if (REGNO (XEXP (tmp, 0)) == 0)
8409 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8410 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8412 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8413 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8421 if (GET_CODE (x) == REG)
8422 fprintf (file, "%s", reg_names[REGNO (x)]);
8423 else if (GET_CODE (x) == MEM)
8425 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8426 know the width from the mode. */
8427 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8428 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8429 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8430 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8431 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8432 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8434 output_address (XEXP (x, 0));
8437 output_addr_const (file, x);
8441 output_operand_lossage ("invalid %%xn code");
8445 /* Print the address of an operand. */
8448 print_operand_address (file, x)
8452 if (GET_CODE (x) == REG)
8453 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8454 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8455 || GET_CODE (x) == LABEL_REF)
8457 output_addr_const (file, x);
8458 if (small_data_operand (x, GET_MODE (x)))
8459 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8460 reg_names[SMALL_DATA_REG]);
8461 else if (TARGET_TOC)
8464 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8466 if (REGNO (XEXP (x, 0)) == 0)
8467 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8468 reg_names[ REGNO (XEXP (x, 0)) ]);
8470 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8471 reg_names[ REGNO (XEXP (x, 1)) ]);
8473 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8475 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8476 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8479 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8480 && CONSTANT_P (XEXP (x, 1)))
8482 output_addr_const (file, XEXP (x, 1));
8483 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8487 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8488 && CONSTANT_P (XEXP (x, 1)))
8490 fprintf (file, "lo16(");
8491 output_addr_const (file, XEXP (x, 1));
8492 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8495 else if (legitimate_constant_pool_address_p (x))
8497 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8499 rtx contains_minus = XEXP (x, 1);
8503 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8504 turn it into (sym) for output_addr_const. */
8505 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8506 contains_minus = XEXP (contains_minus, 0);
8508 minus = XEXP (contains_minus, 0);
8509 symref = XEXP (minus, 0);
8510 XEXP (contains_minus, 0) = symref;
8515 name = XSTR (symref, 0);
8516 newname = alloca (strlen (name) + sizeof ("@toc"));
8517 strcpy (newname, name);
8518 strcat (newname, "@toc");
8519 XSTR (symref, 0) = newname;
8521 output_addr_const (file, XEXP (x, 1));
8523 XSTR (symref, 0) = name;
8524 XEXP (contains_minus, 0) = minus;
8527 output_addr_const (file, XEXP (x, 1));
8529 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8535 /* Target hook for assembling integer objects. The PowerPC version has
8536 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8537 is defined. It also needs to handle DI-mode objects on 64-bit
8541 rs6000_assemble_integer (x, size, aligned_p)
8546 #ifdef RELOCATABLE_NEEDS_FIXUP
8547 /* Special handling for SI values. */
8548 if (size == 4 && aligned_p)
8550 extern int in_toc_section PARAMS ((void));
8551 static int recurse = 0;
8553 /* For -mrelocatable, we mark all addresses that need to be fixed up
8554 in the .fixup section. */
8555 if (TARGET_RELOCATABLE
8556 && !in_toc_section ()
8557 && !in_text_section ()
8559 && GET_CODE (x) != CONST_INT
8560 && GET_CODE (x) != CONST_DOUBLE
8566 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8568 ASM_OUTPUT_LABEL (asm_out_file, buf);
8569 fprintf (asm_out_file, "\t.long\t(");
8570 output_addr_const (asm_out_file, x);
8571 fprintf (asm_out_file, ")@fixup\n");
8572 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8573 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8574 fprintf (asm_out_file, "\t.long\t");
8575 assemble_name (asm_out_file, buf);
8576 fprintf (asm_out_file, "\n\t.previous\n");
8580 /* Remove initial .'s to turn a -mcall-aixdesc function
8581 address into the address of the descriptor, not the function
8583 else if (GET_CODE (x) == SYMBOL_REF
8584 && XSTR (x, 0)[0] == '.'
8585 && DEFAULT_ABI == ABI_AIX)
8587 const char *name = XSTR (x, 0);
8588 while (*name == '.')
8591 fprintf (asm_out_file, "\t.long\t%s\n", name);
8595 #endif /* RELOCATABLE_NEEDS_FIXUP */
8596 return default_assemble_integer (x, size, aligned_p);
8599 #ifdef HAVE_GAS_HIDDEN
8600 /* Emit an assembler directive to set symbol visibility for DECL to
8604 rs6000_assemble_visibility (decl, vis)
8608 /* Functions need to have their entry point symbol visibility set as
8609 well as their descriptor symbol visibility. */
8610 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8612 static const char * const visibility_types[] = {
8613 NULL, "internal", "hidden", "protected"
8616 const char *name, *type;
8618 name = ((* targetm.strip_name_encoding)
8619 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8620 type = visibility_types[vis];
8622 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8623 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8626 default_assemble_visibility (decl, vis);
8631 rs6000_reverse_condition (mode, code)
8632 enum machine_mode mode;
8635 /* Reversal of FP compares takes care -- an ordered compare
8636 becomes an unordered compare and vice versa. */
8637 if (mode == CCFPmode
8638 && (!flag_finite_math_only
8639 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
8640 || code == UNEQ || code == LTGT))
8641 return reverse_condition_maybe_unordered (code);
8643 return reverse_condition (code);
8646 /* Generate a compare for CODE. Return a brand-new rtx that
8647 represents the result of the compare. */
8650 rs6000_generate_compare (code)
8653 enum machine_mode comp_mode;
8656 if (rs6000_compare_fp_p)
8657 comp_mode = CCFPmode;
8658 else if (code == GTU || code == LTU
8659 || code == GEU || code == LEU)
8660 comp_mode = CCUNSmode;
8664 /* First, the compare. */
8665 compare_result = gen_reg_rtx (comp_mode);
8667 /* SPE FP compare instructions on the GPRs. Yuck! */
8668 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
8669 && rs6000_compare_fp_p)
8671 rtx cmp, or1, or2, or_result, compare_result2;
8679 cmp = flag_finite_math_only
8680 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8682 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8683 rs6000_compare_op1);
8691 cmp = flag_finite_math_only
8692 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8694 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8695 rs6000_compare_op1);
8703 cmp = flag_finite_math_only
8704 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8706 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8707 rs6000_compare_op1);
8713 /* Synthesize LE and GE from LT/GT || EQ. */
8714 if (code == LE || code == GE || code == LEU || code == GEU)
8716 /* Synthesize GE/LE frome GT/LT || EQ. */
8722 case LE: code = LT; break;
8723 case GE: code = GT; break;
8724 case LEU: code = LT; break;
8725 case GEU: code = GT; break;
8729 or1 = gen_reg_rtx (SImode);
8730 or2 = gen_reg_rtx (SImode);
8731 or_result = gen_reg_rtx (CCEQmode);
8732 compare_result2 = gen_reg_rtx (CCFPmode);
8735 cmp = flag_finite_math_only
8736 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8738 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8739 rs6000_compare_op1);
8742 /* The MC8540 FP compare instructions set the CR bits
8743 differently than other PPC compare instructions. For
8744 that matter, there is no generic test instruction, but a
8745 testgt, testlt, and testeq. For a true condition, bit 2
8746 is set (x1xx) in the CR. Following the traditional CR
8752 ... bit 2 would be a GT CR alias, so later on we
8753 look in the GT bits for the branch instructions.
8754 However, we must be careful to emit correct RTL in
8755 the meantime, so optimizations don't get confused. */
8757 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8758 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8760 /* OR them together. */
8761 cmp = gen_rtx_SET (VOIDmode, or_result,
8762 gen_rtx_COMPARE (CCEQmode,
8763 gen_rtx_IOR (SImode, or1, or2),
8765 compare_result = or_result;
8770 /* We only care about 1 bit (x1xx), so map everything to NE to
8771 maintain rtl sanity. We'll get to the right bit (x1xx) at
8772 code output time. */
8773 if (code == NE || code == LTGT)
8774 /* Do the inverse here because we have no cmpne
8775 instruction. We use the cmpeq instruction and expect
8776 to get a 0 instead. */
8785 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8786 gen_rtx_COMPARE (comp_mode,
8788 rs6000_compare_op1)));
8790 /* Some kinds of FP comparisons need an OR operation;
8791 under flag_finite_math_only we don't bother. */
8792 if (rs6000_compare_fp_p
8793 && ! flag_finite_math_only
8794 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
8795 && (code == LE || code == GE
8796 || code == UNEQ || code == LTGT
8797 || code == UNGT || code == UNLT))
8799 enum rtx_code or1, or2;
8800 rtx or1_rtx, or2_rtx, compare2_rtx;
8801 rtx or_result = gen_reg_rtx (CCEQmode);
8805 case LE: or1 = LT; or2 = EQ; break;
8806 case GE: or1 = GT; or2 = EQ; break;
8807 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8808 case LTGT: or1 = LT; or2 = GT; break;
8809 case UNGT: or1 = UNORDERED; or2 = GT; break;
8810 case UNLT: or1 = UNORDERED; or2 = LT; break;
8813 validate_condition_mode (or1, comp_mode);
8814 validate_condition_mode (or2, comp_mode);
8815 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8816 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8817 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8818 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8820 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8822 compare_result = or_result;
8826 validate_condition_mode (code, GET_MODE (compare_result));
8828 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8832 /* Emit the RTL for an sCOND pattern. */
8835 rs6000_emit_sCOND (code, result)
8840 enum machine_mode op_mode;
8841 enum rtx_code cond_code;
8843 condition_rtx = rs6000_generate_compare (code);
8844 cond_code = GET_CODE (condition_rtx);
8847 || cond_code == GE || cond_code == LE
8848 || cond_code == GEU || cond_code == LEU
8849 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
8851 rtx not_result = gen_reg_rtx (CCEQmode);
8852 rtx not_op, rev_cond_rtx;
8853 enum machine_mode cc_mode;
8855 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
8857 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
8858 SImode, XEXP (condition_rtx, 0), const0_rtx);
8859 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
8860 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
8861 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
8864 op_mode = GET_MODE (rs6000_compare_op0);
8865 if (op_mode == VOIDmode)
8866 op_mode = GET_MODE (rs6000_compare_op1);
8868 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8870 PUT_MODE (condition_rtx, DImode);
8871 convert_move (result, condition_rtx, 0);
8875 PUT_MODE (condition_rtx, SImode);
8876 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8880 /* Emit a branch of kind CODE to location LOC. */
8883 rs6000_emit_cbranch (code, loc)
8887 rtx condition_rtx, loc_ref;
8889 condition_rtx = rs6000_generate_compare (code);
8890 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8891 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8892 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8896 /* Return the string to output a conditional branch to LABEL, which is
8897 the operand number of the label, or -1 if the branch is really a
8900 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8901 condition code register and its mode specifies what kind of
8904 REVERSED is nonzero if we should reverse the sense of the comparison.
8906 INSN is the insn. */
8909 output_cbranch (op, label, reversed, insn)
8915 static char string[64];
8916 enum rtx_code code = GET_CODE (op);
8917 rtx cc_reg = XEXP (op, 0);
8918 enum machine_mode mode = GET_MODE (cc_reg);
8919 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8920 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8921 int really_reversed = reversed ^ need_longbranch;
8927 validate_condition_mode (code, mode);
8929 /* Work out which way this really branches. We could use
8930 reverse_condition_maybe_unordered here always but this
8931 makes the resulting assembler clearer. */
8932 if (really_reversed)
8934 /* Reversal of FP compares takes care -- an ordered compare
8935 becomes an unordered compare and vice versa. */
8936 if (mode == CCFPmode)
8937 code = reverse_condition_maybe_unordered (code);
8939 code = reverse_condition (code);
8942 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
8944 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8947 /* Opposite of GT. */
8949 else if (code == NE)
8957 /* Not all of these are actually distinct opcodes, but
8958 we distinguish them for clarity of the resulting assembler. */
8960 ccode = "ne"; break;
8962 ccode = "eq"; break;
8964 ccode = "ge"; break;
8965 case GT: case GTU: case UNGT:
8966 ccode = "gt"; break;
8968 ccode = "le"; break;
8969 case LT: case LTU: case UNLT:
8970 ccode = "lt"; break;
8971 case UNORDERED: ccode = "un"; break;
8972 case ORDERED: ccode = "nu"; break;
8973 case UNGE: ccode = "nl"; break;
8974 case UNLE: ccode = "ng"; break;
8979 /* Maybe we have a guess as to how likely the branch is.
8980 The old mnemonics don't have a way to specify this information. */
8982 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8983 if (note != NULL_RTX)
8985 /* PROB is the difference from 50%. */
8986 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8987 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8989 /* Only hint for highly probable/improbable branches on newer
8990 cpus as static prediction overrides processor dynamic
8991 prediction. For older cpus we may as well always hint, but
8992 assume not taken for branches that are very close to 50% as a
8993 mispredicted taken branch is more expensive than a
8994 mispredicted not-taken branch. */
8996 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8998 if (abs (prob) > REG_BR_PROB_BASE / 20
8999 && ((prob > 0) ^ need_longbranch))
9007 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9009 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9011 /* We need to escape any '%' characters in the reg_names string.
9012 Assume they'd only be the first character... */
9013 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9015 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9019 /* If the branch distance was too far, we may have to use an
9020 unconditional branch to go the distance. */
9021 if (need_longbranch)
9022 s += sprintf (s, ",$+8\n\tb %s", label);
9024 s += sprintf (s, ",%s", label);
9030 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9031 operands of the last comparison is nonzero/true, FALSE_COND if it
9032 is zero/false. Return 0 if the hardware has no such operation. */
9035 rs6000_emit_cmove (dest, op, true_cond, false_cond)
9041 enum rtx_code code = GET_CODE (op);
9042 rtx op0 = rs6000_compare_op0;
9043 rtx op1 = rs6000_compare_op1;
9045 enum machine_mode compare_mode = GET_MODE (op0);
9046 enum machine_mode result_mode = GET_MODE (dest);
9049 /* These modes should always match. */
9050 if (GET_MODE (op1) != compare_mode
9051 /* In the isel case however, we can use a compare immediate, so
9052 op1 may be a small constant. */
9053 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9055 if (GET_MODE (true_cond) != result_mode)
9057 if (GET_MODE (false_cond) != result_mode)
9060 /* First, work out if the hardware can do this at all, or
9061 if it's too slow... */
9062 if (! rs6000_compare_fp_p)
9065 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9069 /* Eliminate half of the comparisons by switching operands, this
9070 makes the remaining code simpler. */
9071 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9072 || code == LTGT || code == LT || code == UNLE)
9074 code = reverse_condition_maybe_unordered (code);
9076 true_cond = false_cond;
9080 /* UNEQ and LTGT take four instructions for a comparison with zero,
9081 it'll probably be faster to use a branch here too. */
9082 if (code == UNEQ && HONOR_NANS (compare_mode))
9085 if (GET_CODE (op1) == CONST_DOUBLE)
9086 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9088 /* We're going to try to implement comparisons by performing
9089 a subtract, then comparing against zero. Unfortunately,
9090 Inf - Inf is NaN which is not zero, and so if we don't
9091 know that the operand is finite and the comparison
9092 would treat EQ different to UNORDERED, we can't do it. */
9093 if (HONOR_INFINITIES (compare_mode)
9094 && code != GT && code != UNGE
9095 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9096 /* Constructs of the form (a OP b ? a : b) are safe. */
9097 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9098 || (! rtx_equal_p (op0, true_cond)
9099 && ! rtx_equal_p (op1, true_cond))))
9101 /* At this point we know we can use fsel. */
9103 /* Reduce the comparison to a comparison against zero. */
9104 temp = gen_reg_rtx (compare_mode);
9105 emit_insn (gen_rtx_SET (VOIDmode, temp,
9106 gen_rtx_MINUS (compare_mode, op0, op1)));
9108 op1 = CONST0_RTX (compare_mode);
9110 /* If we don't care about NaNs we can reduce some of the comparisons
9111 down to faster ones. */
9112 if (! HONOR_NANS (compare_mode))
9118 true_cond = false_cond;
9131 /* Now, reduce everything down to a GE. */
9138 temp = gen_reg_rtx (compare_mode);
9139 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9144 temp = gen_reg_rtx (compare_mode);
9145 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9150 temp = gen_reg_rtx (compare_mode);
9151 emit_insn (gen_rtx_SET (VOIDmode, temp,
9152 gen_rtx_NEG (compare_mode,
9153 gen_rtx_ABS (compare_mode, op0))));
9158 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9159 temp = gen_reg_rtx (result_mode);
9160 emit_insn (gen_rtx_SET (VOIDmode, temp,
9161 gen_rtx_IF_THEN_ELSE (result_mode,
9162 gen_rtx_GE (VOIDmode,
9164 true_cond, false_cond)));
9165 false_cond = true_cond;
9168 temp = gen_reg_rtx (compare_mode);
9169 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9174 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9175 temp = gen_reg_rtx (result_mode);
9176 emit_insn (gen_rtx_SET (VOIDmode, temp,
9177 gen_rtx_IF_THEN_ELSE (result_mode,
9178 gen_rtx_GE (VOIDmode,
9180 true_cond, false_cond)));
9181 true_cond = false_cond;
9184 temp = gen_reg_rtx (compare_mode);
9185 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9193 emit_insn (gen_rtx_SET (VOIDmode, dest,
9194 gen_rtx_IF_THEN_ELSE (result_mode,
9195 gen_rtx_GE (VOIDmode,
9197 true_cond, false_cond)));
9201 /* Same as above, but for ints (isel). */
9204 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9210 rtx condition_rtx, cr;
9212 /* All isel implementations thus far are 32-bits. */
9213 if (GET_MODE (rs6000_compare_op0) != SImode)
9216 /* We still have to do the compare, because isel doesn't do a
9217 compare, it just looks at the CRx bits set by a previous compare
9219 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9220 cr = XEXP (condition_rtx, 0);
9222 if (GET_MODE (cr) == CCmode)
9223 emit_insn (gen_isel_signed (dest, condition_rtx,
9224 true_cond, false_cond, cr));
9226 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9227 true_cond, false_cond, cr));
9233 output_isel (operands)
9238 code = GET_CODE (operands[1]);
9239 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9241 PUT_CODE (operands[1], reverse_condition (code));
9242 return "isel %0,%3,%2,%j1";
9245 return "isel %0,%2,%3,%j1";
9249 rs6000_emit_minmax (dest, code, op0, op1)
9255 enum machine_mode mode = GET_MODE (op0);
9259 if (code == SMAX || code == SMIN)
9264 if (code == SMAX || code == UMAX)
9265 target = emit_conditional_move (dest, c, op0, op1, mode,
9268 target = emit_conditional_move (dest, c, op0, op1, mode,
9270 if (target == NULL_RTX)
9273 emit_move_insn (dest, target);
9276 /* This page contains routines that are used to determine what the
9277 function prologue and epilogue code will do and write them out. */
9279 /* Return the first fixed-point register that is required to be
9280 saved. 32 if none. */
9283 first_reg_to_save ()
9287 /* Find lowest numbered live register. */
9288 for (first_reg = 13; first_reg <= 31; first_reg++)
9289 if (regs_ever_live[first_reg]
9290 && (! call_used_regs[first_reg]
9291 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9292 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9293 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9298 && current_function_uses_pic_offset_table
9299 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9300 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9306 /* Similar, for FP regs. */
9309 first_fp_reg_to_save ()
9313 /* Find lowest numbered live register. */
9314 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9315 if (regs_ever_live[first_reg])
9321 /* Similar, for AltiVec regs. */
9324 first_altivec_reg_to_save ()
9328 /* Stack frame remains as is unless we are in AltiVec ABI. */
9329 if (! TARGET_ALTIVEC_ABI)
9330 return LAST_ALTIVEC_REGNO + 1;
9332 /* Find lowest numbered live register. */
9333 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9334 if (regs_ever_live[i])
9340 /* Return a 32-bit mask of the AltiVec registers we need to set in
9341 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9342 the 32-bit word is 0. */
9345 compute_vrsave_mask ()
9347 unsigned int i, mask = 0;
9349 /* First, find out if we use _any_ altivec registers. */
9350 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9351 if (regs_ever_live[i])
9352 mask |= ALTIVEC_REG_BIT (i);
9357 /* Next, remove the argument registers from the set. These must
9358 be in the VRSAVE mask set by the caller, so we don't need to add
9359 them in again. More importantly, the mask we compute here is
9360 used to generate CLOBBERs in the set_vrsave insn, and we do not
9361 wish the argument registers to die. */
9362 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9363 mask &= ~ALTIVEC_REG_BIT (i);
9365 /* Similarly, remove the return value from the set. */
9368 diddle_return_value (is_altivec_return_reg, &yes);
9370 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9377 is_altivec_return_reg (reg, xyes)
9381 bool *yes = (bool *) xyes;
9382 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9387 /* Calculate the stack information for the current function. This is
9388 complicated by having two separate calling sequences, the AIX calling
9389 sequence and the V.4 calling sequence.
9391 AIX (and Darwin/Mac OS X) stack frames look like:
9393 SP----> +---------------------------------------+
9394 | back chain to caller | 0 0
9395 +---------------------------------------+
9396 | saved CR | 4 8 (8-11)
9397 +---------------------------------------+
9399 +---------------------------------------+
9400 | reserved for compilers | 12 24
9401 +---------------------------------------+
9402 | reserved for binders | 16 32
9403 +---------------------------------------+
9404 | saved TOC pointer | 20 40
9405 +---------------------------------------+
9406 | Parameter save area (P) | 24 48
9407 +---------------------------------------+
9408 | Alloca space (A) | 24+P etc.
9409 +---------------------------------------+
9410 | Local variable space (L) | 24+P+A
9411 +---------------------------------------+
9412 | Float/int conversion temporary (X) | 24+P+A+L
9413 +---------------------------------------+
9414 | Save area for AltiVec registers (W) | 24+P+A+L+X
9415 +---------------------------------------+
9416 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9417 +---------------------------------------+
9418 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9419 +---------------------------------------+
9420 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9421 +---------------------------------------+
9422 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9423 +---------------------------------------+
9424 old SP->| back chain to caller's caller |
9425 +---------------------------------------+
9427 The required alignment for AIX configurations is two words (i.e., 8
9431 V.4 stack frames look like:
9433 SP----> +---------------------------------------+
9434 | back chain to caller | 0
9435 +---------------------------------------+
9436 | caller's saved LR | 4
9437 +---------------------------------------+
9438 | Parameter save area (P) | 8
9439 +---------------------------------------+
9440 | Alloca space (A) | 8+P
9441 +---------------------------------------+
9442 | Varargs save area (V) | 8+P+A
9443 +---------------------------------------+
9444 | Local variable space (L) | 8+P+A+V
9445 +---------------------------------------+
9446 | Float/int conversion temporary (X) | 8+P+A+V+L
9447 +---------------------------------------+
9448 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9449 +---------------------------------------+
9450 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9451 +---------------------------------------+
9452 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9453 +---------------------------------------+
9454 | SPE: area for 64-bit GP registers |
9455 +---------------------------------------+
9456 | SPE alignment padding |
9457 +---------------------------------------+
9458 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9459 +---------------------------------------+
9460 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9461 +---------------------------------------+
9462 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9463 +---------------------------------------+
9464 old SP->| back chain to caller's caller |
9465 +---------------------------------------+
9467 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9468 given. (But note below and in sysv4.h that we require only 8 and
9469 may round up the size of our stack frame anyways. The historical
9470 reason is early versions of powerpc-linux which didn't properly
9471 align the stack at program startup. A happy side-effect is that
9472 -mno-eabi libraries can be used with -meabi programs.)
9474 The EABI configuration defaults to the V.4 layout. However,
9475 the stack alignment requirements may differ. If -mno-eabi is not
9476 given, the required stack alignment is 8 bytes; if -mno-eabi is
9477 given, the required alignment is 16 bytes. (But see V.4 comment
9480 #ifndef ABI_STACK_BOUNDARY
9481 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9485 rs6000_stack_info ()
9487 static rs6000_stack_t info, zero_info;
9488 rs6000_stack_t *info_ptr = &info;
9489 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9493 /* Zero all fields portably. */
9498 /* Cache value so we don't rescan instruction chain over and over. */
9499 if (cfun->machine->insn_chain_scanned_p == 0)
9501 cfun->machine->insn_chain_scanned_p = 1;
9502 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9506 /* Select which calling sequence. */
9507 info_ptr->abi = DEFAULT_ABI;
9509 /* Calculate which registers need to be saved & save area size. */
9510 info_ptr->first_gp_reg_save = first_reg_to_save ();
9511 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9512 even if it currently looks like we won't. */
9513 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9514 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9515 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9516 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9517 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9519 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9521 /* For the SPE, we have an additional upper 32-bits on each GPR.
9522 Ideally we should save the entire 64-bits only when the upper
9523 half is used in SIMD instructions. Since we only record
9524 registers live (not the size they are used in), this proves
9525 difficult because we'd have to traverse the instruction chain at
9526 the right time, taking reload into account. This is a real pain,
9527 so we opt to save the GPRs in 64-bits always if but one register
9528 gets used in 64-bits. Otherwise, all the registers in the frame
9529 get saved in 32-bits.
9531 So... since when we save all GPRs (except the SP) in 64-bits, the
9532 traditional GP save area will be empty. */
9533 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9534 info_ptr->gp_size = 0;
9536 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9537 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9539 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9540 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9541 - info_ptr->first_altivec_reg_save);
9543 /* Does this function call anything? */
9544 info_ptr->calls_p = (! current_function_is_leaf
9545 || cfun->machine->ra_needs_full_frame);
9547 /* Determine if we need to save the link register. */
9548 if (rs6000_ra_ever_killed ()
9549 || (DEFAULT_ABI == ABI_AIX
9550 && current_function_profile
9551 && !TARGET_PROFILE_KERNEL)
9552 #ifdef TARGET_RELOCATABLE
9553 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9555 || (info_ptr->first_fp_reg_save != 64
9556 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9557 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9558 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9559 || (DEFAULT_ABI == ABI_DARWIN
9561 && current_function_uses_pic_offset_table)
9562 || info_ptr->calls_p)
9564 info_ptr->lr_save_p = 1;
9565 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9568 /* Determine if we need to save the condition code registers. */
9569 if (regs_ever_live[CR2_REGNO]
9570 || regs_ever_live[CR3_REGNO]
9571 || regs_ever_live[CR4_REGNO])
9573 info_ptr->cr_save_p = 1;
9574 if (DEFAULT_ABI == ABI_V4)
9575 info_ptr->cr_size = reg_size;
9578 /* If the current function calls __builtin_eh_return, then we need
9579 to allocate stack space for registers that will hold data for
9580 the exception handler. */
9581 if (current_function_calls_eh_return)
9584 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9587 /* SPE saves EH registers in 64-bits. */
9588 ehrd_size = i * (TARGET_SPE_ABI
9589 && info_ptr->spe_64bit_regs_used != 0
9590 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9595 /* Determine various sizes. */
9596 info_ptr->reg_size = reg_size;
9597 info_ptr->fixed_size = RS6000_SAVE_AREA;
9598 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9599 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9600 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9603 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9604 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9606 info_ptr->spe_gp_size = 0;
9608 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9610 info_ptr->vrsave_mask = compute_vrsave_mask ();
9611 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9615 info_ptr->vrsave_mask = 0;
9616 info_ptr->vrsave_size = 0;
9619 /* Calculate the offsets. */
9620 switch (DEFAULT_ABI)
9628 info_ptr->fp_save_offset = - info_ptr->fp_size;
9629 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9631 if (TARGET_ALTIVEC_ABI)
9633 info_ptr->vrsave_save_offset
9634 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9636 /* Align stack so vector save area is on a quadword boundary. */
9637 if (info_ptr->altivec_size != 0)
9638 info_ptr->altivec_padding_size
9639 = 16 - (-info_ptr->vrsave_save_offset % 16);
9641 info_ptr->altivec_padding_size = 0;
9643 info_ptr->altivec_save_offset
9644 = info_ptr->vrsave_save_offset
9645 - info_ptr->altivec_padding_size
9646 - info_ptr->altivec_size;
9648 /* Adjust for AltiVec case. */
9649 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9652 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9653 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9654 info_ptr->lr_save_offset = 2*reg_size;
9658 info_ptr->fp_save_offset = - info_ptr->fp_size;
9659 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9660 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9662 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9664 /* Align stack so SPE GPR save area is aligned on a
9665 double-word boundary. */
9666 if (info_ptr->spe_gp_size != 0)
9667 info_ptr->spe_padding_size
9668 = 8 - (-info_ptr->cr_save_offset % 8);
9670 info_ptr->spe_padding_size = 0;
9672 info_ptr->spe_gp_save_offset
9673 = info_ptr->cr_save_offset
9674 - info_ptr->spe_padding_size
9675 - info_ptr->spe_gp_size;
9677 /* Adjust for SPE case. */
9678 info_ptr->toc_save_offset
9679 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9681 else if (TARGET_ALTIVEC_ABI)
9683 info_ptr->vrsave_save_offset
9684 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9686 /* Align stack so vector save area is on a quadword boundary. */
9687 if (info_ptr->altivec_size != 0)
9688 info_ptr->altivec_padding_size
9689 = 16 - (-info_ptr->vrsave_save_offset % 16);
9691 info_ptr->altivec_padding_size = 0;
9693 info_ptr->altivec_save_offset
9694 = info_ptr->vrsave_save_offset
9695 - info_ptr->altivec_padding_size
9696 - info_ptr->altivec_size;
9698 /* Adjust for AltiVec case. */
9699 info_ptr->toc_save_offset
9700 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9703 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9704 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9705 info_ptr->lr_save_offset = reg_size;
9709 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9711 + info_ptr->altivec_size
9712 + info_ptr->altivec_padding_size
9713 + info_ptr->vrsave_size
9714 + info_ptr->spe_gp_size
9715 + info_ptr->spe_padding_size
9719 + info_ptr->vrsave_size
9720 + info_ptr->toc_size,
9721 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9724 total_raw_size = (info_ptr->vars_size
9725 + info_ptr->parm_size
9726 + info_ptr->save_size
9727 + info_ptr->varargs_size
9728 + info_ptr->fixed_size);
9730 info_ptr->total_size =
9731 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9733 /* Determine if we need to allocate any stack frame:
9735 For AIX we need to push the stack if a frame pointer is needed
9736 (because the stack might be dynamically adjusted), if we are
9737 debugging, if we make calls, or if the sum of fp_save, gp_save,
9738 and local variables are more than the space needed to save all
9739 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9740 + 18*8 = 288 (GPR13 reserved).
9742 For V.4 we don't have the stack cushion that AIX uses, but assume
9743 that the debugger can handle stackless frames. */
9745 if (info_ptr->calls_p)
9746 info_ptr->push_p = 1;
9748 else if (DEFAULT_ABI == ABI_V4)
9749 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9751 else if (frame_pointer_needed)
9752 info_ptr->push_p = 1;
9754 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
9755 info_ptr->push_p = 1;
9759 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
9761 /* Zero offsets if we're not saving those registers. */
9762 if (info_ptr->fp_size == 0)
9763 info_ptr->fp_save_offset = 0;
9765 if (info_ptr->gp_size == 0)
9766 info_ptr->gp_save_offset = 0;
9768 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9769 info_ptr->altivec_save_offset = 0;
9771 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9772 info_ptr->vrsave_save_offset = 0;
9774 if (! TARGET_SPE_ABI
9775 || info_ptr->spe_64bit_regs_used == 0
9776 || info_ptr->spe_gp_size == 0)
9777 info_ptr->spe_gp_save_offset = 0;
9779 if (! info_ptr->lr_save_p)
9780 info_ptr->lr_save_offset = 0;
9782 if (! info_ptr->cr_save_p)
9783 info_ptr->cr_save_offset = 0;
9785 if (! info_ptr->toc_save_p)
9786 info_ptr->toc_save_offset = 0;
9791 /* Return true if the current function uses any GPRs in 64-bit SIMD
9795 spe_func_has_64bit_regs_p ()
9799 /* Functions that save and restore all the call-saved registers will
9800 need to save/restore the registers in 64-bits. */
9801 if (current_function_calls_eh_return
9802 || current_function_calls_setjmp
9803 || current_function_has_nonlocal_goto)
9806 insns = get_insns ();
9808 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
9815 if (GET_CODE (i) == SET
9816 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
9825 debug_stack_info (info)
9826 rs6000_stack_t *info;
9828 const char *abi_string;
9831 info = rs6000_stack_info ();
9833 fprintf (stderr, "\nStack information for function %s:\n",
9834 ((current_function_decl && DECL_NAME (current_function_decl))
9835 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9840 default: abi_string = "Unknown"; break;
9841 case ABI_NONE: abi_string = "NONE"; break;
9842 case ABI_AIX: abi_string = "AIX"; break;
9843 case ABI_DARWIN: abi_string = "Darwin"; break;
9844 case ABI_V4: abi_string = "V.4"; break;
9847 fprintf (stderr, "\tABI = %5s\n", abi_string);
9849 if (TARGET_ALTIVEC_ABI)
9850 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9853 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9855 if (info->first_gp_reg_save != 32)
9856 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9858 if (info->first_fp_reg_save != 64)
9859 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9861 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9862 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9863 info->first_altivec_reg_save);
9865 if (info->lr_save_p)
9866 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9868 if (info->cr_save_p)
9869 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9871 if (info->toc_save_p)
9872 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9874 if (info->vrsave_mask)
9875 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9878 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9881 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9883 if (info->gp_save_offset)
9884 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9886 if (info->fp_save_offset)
9887 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9889 if (info->altivec_save_offset)
9890 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9891 info->altivec_save_offset);
9893 if (info->spe_gp_save_offset)
9894 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9895 info->spe_gp_save_offset);
9897 if (info->vrsave_save_offset)
9898 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9899 info->vrsave_save_offset);
9901 if (info->lr_save_offset)
9902 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9904 if (info->cr_save_offset)
9905 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9907 if (info->toc_save_offset)
9908 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9910 if (info->varargs_save_offset)
9911 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9913 if (info->total_size)
9914 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9916 if (info->varargs_size)
9917 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9919 if (info->vars_size)
9920 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9922 if (info->parm_size)
9923 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9925 if (info->fixed_size)
9926 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9929 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9931 if (info->spe_gp_size)
9932 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9935 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9937 if (info->altivec_size)
9938 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9940 if (info->vrsave_size)
9941 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9943 if (info->altivec_padding_size)
9944 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9945 info->altivec_padding_size);
9947 if (info->spe_padding_size)
9948 fprintf (stderr, "\tspe_padding_size = %5d\n",
9949 info->spe_padding_size);
9952 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9955 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9958 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9960 if (info->save_size)
9961 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9963 if (info->reg_size != 4)
9964 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9966 fprintf (stderr, "\n");
9970 rs6000_return_addr (count, frame)
9974 /* Currently we don't optimize very well between prolog and body
9975 code and for PIC code the code can be actually quite bad, so
9976 don't try to be too clever here. */
9977 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
9979 cfun->machine->ra_needs_full_frame = 1;
9986 plus_constant (copy_to_reg
9987 (gen_rtx_MEM (Pmode,
9988 memory_address (Pmode, frame))),
9989 RETURN_ADDRESS_OFFSET)));
9992 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9995 /* Say whether a function is a candidate for sibcall handling or not.
9996 We do not allow indirect calls to be optimized into sibling calls.
9997 Also, we can't do it if there are any vector parameters; there's
9998 nowhere to put the VRsave code so it works; note that functions with
9999 vector parameters are required to have a prototype, so the argument
10000 type info must be available here. (The tail recursion case can work
10001 with vector parameters, but there's no way to distinguish here.) */
10003 rs6000_function_ok_for_sibcall (decl, exp)
10005 tree exp ATTRIBUTE_UNUSED;
10010 if (TARGET_ALTIVEC_VRSAVE)
10012 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10013 type; type = TREE_CHAIN (type))
10015 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10019 if (DEFAULT_ABI == ABI_DARWIN
10020 || (*targetm.binds_local_p) (decl))
10022 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10024 if (!lookup_attribute ("longcall", attr_list)
10025 || lookup_attribute ("shortcall", attr_list))
10033 rs6000_ra_ever_killed ()
10039 /* Irritatingly, there are two kinds of thunks -- those created with
10040 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10041 through the regular part of the compiler. This is a very hacky
10042 way to tell them apart. */
10043 if (current_function_is_thunk && !no_new_pseudos)
10046 /* regs_ever_live has LR marked as used if any sibcalls are present,
10047 but this should not force saving and restoring in the
10048 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10049 clobbers LR, so that is inappropriate. */
10051 /* Also, the prologue can generate a store into LR that
10052 doesn't really count, like this:
10055 bcl to set PIC register
10059 When we're called from the epilogue, we need to avoid counting
10060 this as a store. */
10062 push_topmost_sequence ();
10063 top = get_insns ();
10064 pop_topmost_sequence ();
10065 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10067 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10071 if (FIND_REG_INC_NOTE (insn, reg))
10073 else if (GET_CODE (insn) == CALL_INSN
10074 && !SIBLING_CALL_P (insn))
10076 else if (set_of (reg, insn) != NULL_RTX
10077 && !prologue_epilogue_contains (insn))
10084 /* Add a REG_MAYBE_DEAD note to the insn. */
10086 rs6000_maybe_dead (insn)
10089 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10094 /* Emit instructions needed to load the TOC register.
10095 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10096 a constant pool; or for SVR4 -fpic. */
10099 rs6000_emit_load_toc_table (fromprolog)
10103 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10105 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10107 rtx temp = (fromprolog
10108 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10109 : gen_reg_rtx (Pmode));
10110 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10112 rs6000_maybe_dead (insn);
10113 insn = emit_move_insn (dest, temp);
10115 rs6000_maybe_dead (insn);
10117 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10120 rtx tempLR = (fromprolog
10121 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10122 : gen_reg_rtx (Pmode));
10123 rtx temp0 = (fromprolog
10124 ? gen_rtx_REG (Pmode, 0)
10125 : gen_reg_rtx (Pmode));
10128 /* possibly create the toc section */
10129 if (! toc_initialized)
10132 function_section (current_function_decl);
10139 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10140 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10142 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10143 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10145 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10147 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10148 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10155 static int reload_toc_labelno = 0;
10157 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10159 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10160 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10162 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10163 emit_move_insn (dest, tempLR);
10164 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10166 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10168 rs6000_maybe_dead (insn);
10170 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10172 /* This is for AIX code running in non-PIC ELF32. */
10175 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10176 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10178 insn = emit_insn (gen_elf_high (dest, realsym));
10180 rs6000_maybe_dead (insn);
10181 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10183 rs6000_maybe_dead (insn);
10185 else if (DEFAULT_ABI == ABI_AIX)
10188 insn = emit_insn (gen_load_toc_aix_si (dest));
10190 insn = emit_insn (gen_load_toc_aix_di (dest));
10192 rs6000_maybe_dead (insn);
10199 get_TOC_alias_set ()
10201 static int set = -1;
10203 set = new_alias_set ();
10207 /* This retuns nonzero if the current function uses the TOC. This is
10208 determined by the presence of (unspec ... UNSPEC_TOC), which is
10209 generated by the various load_toc_* patterns. */
10216 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10219 rtx pat = PATTERN (insn);
10222 if (GET_CODE (pat) == PARALLEL)
10223 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10224 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
10225 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == UNSPEC_TOC)
10232 create_TOC_reference (symbol)
10235 return gen_rtx_PLUS (Pmode,
10236 gen_rtx_REG (Pmode, TOC_REGISTER),
10237 gen_rtx_CONST (Pmode,
10238 gen_rtx_MINUS (Pmode, symbol,
10239 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10243 /* __throw will restore its own return address to be the same as the
10244 return address of the function that the throw is being made to.
10245 This is unfortunate, because we want to check the original
10246 return address to see if we need to restore the TOC.
10247 So we have to squirrel it away here.
10248 This is used only in compiling __throw and __rethrow.
10250 Most of this code should be removed by CSE. */
10251 static rtx insn_after_throw;
10253 /* This does the saving... */
10255 rs6000_aix_emit_builtin_unwind_init ()
10258 rtx stack_top = gen_reg_rtx (Pmode);
10259 rtx opcode_addr = gen_reg_rtx (Pmode);
10261 insn_after_throw = gen_reg_rtx (SImode);
10263 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10264 emit_move_insn (stack_top, mem);
10266 mem = gen_rtx_MEM (Pmode,
10267 gen_rtx_PLUS (Pmode, stack_top,
10268 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10269 emit_move_insn (opcode_addr, mem);
10270 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10273 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10274 in _eh.o). Only used on AIX.
10276 The idea is that on AIX, function calls look like this:
10277 bl somefunction-trampoline
10281 somefunction-trampoline:
10283 ... load function address in the count register ...
10285 or like this, if the linker determines that this is not a cross-module call
10286 and so the TOC need not be restored:
10289 or like this, if the compiler could determine that this is not a
10292 now, the tricky bit here is that register 2 is saved and restored
10293 by the _linker_, so we can't readily generate debugging information
10294 for it. So we need to go back up the call chain looking at the
10295 insns at return addresses to see which calls saved the TOC register
10296 and so see where it gets restored from.
10298 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10299 just before the actual epilogue.
10301 On the bright side, this incurs no space or time overhead unless an
10302 exception is thrown, except for the extra code in libgcc.a.
10304 The parameter STACKSIZE is a register containing (at runtime)
10305 the amount to be popped off the stack in addition to the stack frame
10306 of this routine (which will be __throw or __rethrow, and so is
10307 guaranteed to have a stack frame). */
10310 rs6000_emit_eh_toc_restore (stacksize)
10314 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10315 rtx tocompare = gen_reg_rtx (SImode);
10316 rtx opcode = gen_reg_rtx (SImode);
10317 rtx opcode_addr = gen_reg_rtx (Pmode);
10319 rtx loop_start = gen_label_rtx ();
10320 rtx no_toc_restore_needed = gen_label_rtx ();
10321 rtx loop_exit = gen_label_rtx ();
10323 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10324 set_mem_alias_set (mem, rs6000_sr_alias_set);
10325 emit_move_insn (bottom_of_stack, mem);
10327 top_of_stack = expand_binop (Pmode, add_optab,
10328 bottom_of_stack, stacksize,
10329 NULL_RTX, 1, OPTAB_WIDEN);
10331 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10332 : 0xE8410028, SImode));
10334 if (insn_after_throw == NULL_RTX)
10336 emit_move_insn (opcode, insn_after_throw);
10338 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10339 emit_label (loop_start);
10341 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10342 SImode, NULL_RTX, NULL_RTX,
10343 no_toc_restore_needed);
10345 mem = gen_rtx_MEM (Pmode,
10346 gen_rtx_PLUS (Pmode, bottom_of_stack,
10347 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10348 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10350 emit_label (no_toc_restore_needed);
10351 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10352 Pmode, NULL_RTX, NULL_RTX,
10355 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10356 set_mem_alias_set (mem, rs6000_sr_alias_set);
10357 emit_move_insn (bottom_of_stack, mem);
10359 mem = gen_rtx_MEM (Pmode,
10360 gen_rtx_PLUS (Pmode, bottom_of_stack,
10361 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10362 emit_move_insn (opcode_addr, mem);
10363 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10365 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10366 emit_jump (loop_start);
10367 emit_note (NULL, NOTE_INSN_LOOP_END);
10368 emit_label (loop_exit);
10370 #endif /* TARGET_AIX */
10372 /* This ties together stack memory (MEM with an alias set of
10373 rs6000_sr_alias_set) and the change to the stack pointer. */
10376 rs6000_emit_stack_tie ()
10378 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10380 set_mem_alias_set (mem, rs6000_sr_alias_set);
10381 emit_insn (gen_stack_tie (mem));
10384 /* Emit the correct code for allocating stack space, as insns.
10385 If COPY_R12, make sure a copy of the old frame is left in r12.
10386 The generated code may use hard register 0 as a temporary. */
10389 rs6000_emit_allocate_stack (size, copy_r12)
10390 HOST_WIDE_INT size;
10394 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10395 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10396 rtx todec = GEN_INT (-size);
10398 if (current_function_limit_stack)
10400 if (REG_P (stack_limit_rtx)
10401 && REGNO (stack_limit_rtx) > 1
10402 && REGNO (stack_limit_rtx) <= 31)
10404 emit_insn (TARGET_32BIT
10405 ? gen_addsi3 (tmp_reg,
10408 : gen_adddi3 (tmp_reg,
10412 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10415 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10417 && DEFAULT_ABI == ABI_V4)
10419 rtx toload = gen_rtx_CONST (VOIDmode,
10420 gen_rtx_PLUS (Pmode,
10424 emit_insn (gen_elf_high (tmp_reg, toload));
10425 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10426 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10430 warning ("stack limit expression is not supported");
10433 if (copy_r12 || ! TARGET_UPDATE)
10434 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10440 /* Need a note here so that try_split doesn't get confused. */
10441 if (get_last_insn() == NULL_RTX)
10442 emit_note (0, NOTE_INSN_DELETED);
10443 insn = emit_move_insn (tmp_reg, todec);
10444 try_split (PATTERN (insn), insn, 0);
10448 insn = emit_insn (TARGET_32BIT
10449 ? gen_movsi_update (stack_reg, stack_reg,
10451 : gen_movdi_update (stack_reg, stack_reg,
10452 todec, stack_reg));
10456 insn = emit_insn (TARGET_32BIT
10457 ? gen_addsi3 (stack_reg, stack_reg, todec)
10458 : gen_adddi3 (stack_reg, stack_reg, todec));
10459 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10460 gen_rtx_REG (Pmode, 12));
10463 RTX_FRAME_RELATED_P (insn) = 1;
10465 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10466 gen_rtx_SET (VOIDmode, stack_reg,
10467 gen_rtx_PLUS (Pmode, stack_reg,
10472 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10473 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10474 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10475 deduce these equivalences by itself so it wasn't necessary to hold
10476 its hand so much. */
10479 rs6000_frame_related (insn, reg, val, reg2, rreg)
10488 /* copy_rtx will not make unique copies of registers, so we need to
10489 ensure we don't have unwanted sharing here. */
10491 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10494 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10496 real = copy_rtx (PATTERN (insn));
10498 if (reg2 != NULL_RTX)
10499 real = replace_rtx (real, reg2, rreg);
10501 real = replace_rtx (real, reg,
10502 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10503 STACK_POINTER_REGNUM),
10506 /* We expect that 'real' is either a SET or a PARALLEL containing
10507 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10508 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10510 if (GET_CODE (real) == SET)
10514 temp = simplify_rtx (SET_SRC (set));
10516 SET_SRC (set) = temp;
10517 temp = simplify_rtx (SET_DEST (set));
10519 SET_DEST (set) = temp;
10520 if (GET_CODE (SET_DEST (set)) == MEM)
10522 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10524 XEXP (SET_DEST (set), 0) = temp;
10527 else if (GET_CODE (real) == PARALLEL)
10530 for (i = 0; i < XVECLEN (real, 0); i++)
10531 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10533 rtx set = XVECEXP (real, 0, i);
10535 temp = simplify_rtx (SET_SRC (set));
10537 SET_SRC (set) = temp;
10538 temp = simplify_rtx (SET_DEST (set));
10540 SET_DEST (set) = temp;
10541 if (GET_CODE (SET_DEST (set)) == MEM)
10543 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10545 XEXP (SET_DEST (set), 0) = temp;
10547 RTX_FRAME_RELATED_P (set) = 1;
10554 real = spe_synthesize_frame_save (real);
10556 RTX_FRAME_RELATED_P (insn) = 1;
10557 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10562 /* Given an SPE frame note, return a PARALLEL of SETs with the
10563 original note, plus a synthetic register save. */
10566 spe_synthesize_frame_save (real)
10569 rtx synth, offset, reg, real2;
10571 if (GET_CODE (real) != SET
10572 || GET_MODE (SET_SRC (real)) != V2SImode)
10575 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10576 frame related note. The parallel contains a set of the register
10577 being saved, and another set to a synthetic register (n+1200).
10578 This is so we can differentiate between 64-bit and 32-bit saves.
10579 Words cannot describe this nastiness. */
10581 if (GET_CODE (SET_DEST (real)) != MEM
10582 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10583 || GET_CODE (SET_SRC (real)) != REG)
10587 (set (mem (plus (reg x) (const y)))
10590 (set (mem (plus (reg x) (const y+4)))
10594 real2 = copy_rtx (real);
10595 PUT_MODE (SET_DEST (real2), SImode);
10596 reg = SET_SRC (real2);
10597 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10598 synth = copy_rtx (real2);
10600 if (BYTES_BIG_ENDIAN)
10602 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10603 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10606 reg = SET_SRC (synth);
10608 synth = replace_rtx (synth, reg,
10609 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10611 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10612 synth = replace_rtx (synth, offset,
10613 GEN_INT (INTVAL (offset)
10614 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10616 RTX_FRAME_RELATED_P (synth) = 1;
10617 RTX_FRAME_RELATED_P (real2) = 1;
10618 if (BYTES_BIG_ENDIAN)
10619 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10621 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10626 /* Returns an insn that has a vrsave set operation with the
10627 appropriate CLOBBERs. */
10630 generate_set_vrsave (reg, info, epiloguep)
10632 rs6000_stack_t *info;
10636 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10637 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10640 = gen_rtx_SET (VOIDmode,
10642 gen_rtx_UNSPEC_VOLATILE (SImode,
10643 gen_rtvec (2, reg, vrsave),
10648 /* We need to clobber the registers in the mask so the scheduler
10649 does not move sets to VRSAVE before sets of AltiVec registers.
10651 However, if the function receives nonlocal gotos, reload will set
10652 all call saved registers live. We will end up with:
10654 (set (reg 999) (mem))
10655 (parallel [ (set (reg vrsave) (unspec blah))
10656 (clobber (reg 999))])
10658 The clobber will cause the store into reg 999 to be dead, and
10659 flow will attempt to delete an epilogue insn. In this case, we
10660 need an unspec use/set of the register. */
10662 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10663 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10665 if (!epiloguep || call_used_regs [i])
10666 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10667 gen_rtx_REG (V4SImode, i));
10670 rtx reg = gen_rtx_REG (V4SImode, i);
10673 = gen_rtx_SET (VOIDmode,
10675 gen_rtx_UNSPEC (V4SImode,
10676 gen_rtvec (1, reg), 27));
10680 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10682 for (i = 0; i < nclobs; ++i)
10683 XVECEXP (insn, 0, i) = clobs[i];
10688 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10689 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10692 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10695 enum machine_mode mode;
10696 unsigned int regno;
10700 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10701 rtx replacea, replaceb;
10703 int_rtx = GEN_INT (offset);
10705 /* Some cases that need register indexed addressing. */
10706 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10708 && SPE_VECTOR_MODE (mode)
10709 && !SPE_CONST_OFFSET_OK (offset)))
10711 /* Whomever calls us must make sure r11 is available in the
10712 flow path of instructions in the prologue. */
10713 offset_rtx = gen_rtx_REG (Pmode, 11);
10714 emit_move_insn (offset_rtx, int_rtx);
10716 replacea = offset_rtx;
10717 replaceb = int_rtx;
10721 offset_rtx = int_rtx;
10722 replacea = NULL_RTX;
10723 replaceb = NULL_RTX;
10726 reg = gen_rtx_REG (mode, regno);
10727 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10728 mem = gen_rtx_MEM (mode, addr);
10729 set_mem_alias_set (mem, rs6000_sr_alias_set);
10731 insn = emit_move_insn (mem, reg);
10733 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10736 /* Emit an offset memory reference suitable for a frame store, while
10737 converting to a valid addressing mode. */
10740 gen_frame_mem_offset (mode, reg, offset)
10741 enum machine_mode mode;
10745 rtx int_rtx, offset_rtx;
10747 int_rtx = GEN_INT (offset);
10749 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10751 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10752 emit_move_insn (offset_rtx, int_rtx);
10755 offset_rtx = int_rtx;
10757 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10760 /* Emit function prologue as insns. */
10763 rs6000_emit_prologue ()
10765 rs6000_stack_t *info = rs6000_stack_info ();
10766 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10767 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10768 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10769 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10770 rtx frame_reg_rtx = sp_reg_rtx;
10771 rtx cr_save_rtx = NULL;
10773 int saving_FPRs_inline;
10774 int using_store_multiple;
10775 HOST_WIDE_INT sp_offset = 0;
10777 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10779 reg_mode = V2SImode;
10783 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10784 && (!TARGET_SPE_ABI
10785 || info->spe_64bit_regs_used == 0)
10786 && info->first_gp_reg_save < 31);
10787 saving_FPRs_inline = (info->first_fp_reg_save == 64
10788 || FP_SAVE_INLINE (info->first_fp_reg_save));
10790 /* For V.4, update stack before we do any saving and set back pointer. */
10791 if (info->push_p && DEFAULT_ABI == ABI_V4)
10793 if (info->total_size < 32767)
10794 sp_offset = info->total_size;
10796 frame_reg_rtx = frame_ptr_rtx;
10797 rs6000_emit_allocate_stack (info->total_size,
10798 (frame_reg_rtx != sp_reg_rtx
10799 && (info->cr_save_p
10801 || info->first_fp_reg_save < 64
10802 || info->first_gp_reg_save < 32
10804 if (frame_reg_rtx != sp_reg_rtx)
10805 rs6000_emit_stack_tie ();
10808 /* Save AltiVec registers if needed. */
10809 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10813 /* There should be a non inline version of this, for when we
10814 are saving lots of vector registers. */
10815 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10816 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10818 rtx areg, savereg, mem;
10821 offset = info->altivec_save_offset + sp_offset
10822 + 16 * (i - info->first_altivec_reg_save);
10824 savereg = gen_rtx_REG (V4SImode, i);
10826 areg = gen_rtx_REG (Pmode, 0);
10827 emit_move_insn (areg, GEN_INT (offset));
10829 /* AltiVec addressing mode is [reg+reg]. */
10830 mem = gen_rtx_MEM (V4SImode,
10831 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10833 set_mem_alias_set (mem, rs6000_sr_alias_set);
10835 insn = emit_move_insn (mem, savereg);
10837 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10838 areg, GEN_INT (offset));
10842 /* VRSAVE is a bit vector representing which AltiVec registers
10843 are used. The OS uses this to determine which vector
10844 registers to save on a context switch. We need to save
10845 VRSAVE on the stack frame, add whatever AltiVec registers we
10846 used in this function, and do the corresponding magic in the
10849 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10851 rtx reg, mem, vrsave;
10854 /* Get VRSAVE onto a GPR. */
10855 reg = gen_rtx_REG (SImode, 12);
10856 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10858 emit_insn (gen_get_vrsave_internal (reg));
10860 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10863 offset = info->vrsave_save_offset + sp_offset;
10865 = gen_rtx_MEM (SImode,
10866 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10867 set_mem_alias_set (mem, rs6000_sr_alias_set);
10868 insn = emit_move_insn (mem, reg);
10870 /* Include the registers in the mask. */
10871 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10873 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10876 /* If we use the link register, get it into r0. */
10877 if (info->lr_save_p)
10878 emit_move_insn (gen_rtx_REG (Pmode, 0),
10879 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10881 /* If we need to save CR, put it into r12. */
10882 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10884 cr_save_rtx = gen_rtx_REG (SImode, 12);
10885 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10888 /* Do any required saving of fpr's. If only one or two to save, do
10889 it ourselves. Otherwise, call function. */
10890 if (saving_FPRs_inline)
10893 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10894 if ((regs_ever_live[info->first_fp_reg_save+i]
10895 && ! call_used_regs[info->first_fp_reg_save+i]))
10896 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10897 info->first_fp_reg_save + i,
10898 info->fp_save_offset + sp_offset + 8 * i,
10901 else if (info->first_fp_reg_save != 64)
10905 const char *alloc_rname;
10907 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10909 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10910 gen_rtx_REG (Pmode,
10911 LINK_REGISTER_REGNUM));
10912 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10913 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10914 alloc_rname = ggc_strdup (rname);
10915 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10916 gen_rtx_SYMBOL_REF (Pmode,
10918 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10920 rtx addr, reg, mem;
10921 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10922 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10923 GEN_INT (info->fp_save_offset
10924 + sp_offset + 8*i));
10925 mem = gen_rtx_MEM (DFmode, addr);
10926 set_mem_alias_set (mem, rs6000_sr_alias_set);
10928 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10930 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10931 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10932 NULL_RTX, NULL_RTX);
10935 /* Save GPRs. This is done as a PARALLEL if we are using
10936 the store-multiple instructions. */
10937 if (using_store_multiple)
10941 p = rtvec_alloc (32 - info->first_gp_reg_save);
10942 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10944 rtx addr, reg, mem;
10945 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10946 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10947 GEN_INT (info->gp_save_offset
10950 mem = gen_rtx_MEM (reg_mode, addr);
10951 set_mem_alias_set (mem, rs6000_sr_alias_set);
10953 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10955 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10956 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10957 NULL_RTX, NULL_RTX);
10962 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10963 if ((regs_ever_live[info->first_gp_reg_save+i]
10964 && ! call_used_regs[info->first_gp_reg_save+i])
10965 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10966 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10967 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10969 rtx addr, reg, mem;
10970 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10972 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10974 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10977 if (!SPE_CONST_OFFSET_OK (offset))
10979 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10980 emit_move_insn (b, GEN_INT (offset));
10983 b = GEN_INT (offset);
10985 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10986 mem = gen_rtx_MEM (V2SImode, addr);
10987 set_mem_alias_set (mem, rs6000_sr_alias_set);
10988 insn = emit_move_insn (mem, reg);
10990 if (GET_CODE (b) == CONST_INT)
10991 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10992 NULL_RTX, NULL_RTX);
10994 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10995 b, GEN_INT (offset));
10999 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11000 GEN_INT (info->gp_save_offset
11003 mem = gen_rtx_MEM (reg_mode, addr);
11004 set_mem_alias_set (mem, rs6000_sr_alias_set);
11006 insn = emit_move_insn (mem, reg);
11007 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11008 NULL_RTX, NULL_RTX);
11013 /* ??? There's no need to emit actual instructions here, but it's the
11014 easiest way to get the frame unwind information emitted. */
11015 if (current_function_calls_eh_return)
11017 unsigned int i, regno;
11021 regno = EH_RETURN_DATA_REGNO (i);
11022 if (regno == INVALID_REGNUM)
11025 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11026 info->ehrd_offset + sp_offset
11027 + reg_size * (int) i,
11032 /* Save lr if we used it. */
11033 if (info->lr_save_p)
11035 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11036 GEN_INT (info->lr_save_offset + sp_offset));
11037 rtx reg = gen_rtx_REG (Pmode, 0);
11038 rtx mem = gen_rtx_MEM (Pmode, addr);
11039 /* This should not be of rs6000_sr_alias_set, because of
11040 __builtin_return_address. */
11042 insn = emit_move_insn (mem, reg);
11043 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11044 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11047 /* Save CR if we use any that must be preserved. */
11048 if (info->cr_save_p)
11050 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11051 GEN_INT (info->cr_save_offset + sp_offset));
11052 rtx mem = gen_rtx_MEM (SImode, addr);
11054 set_mem_alias_set (mem, rs6000_sr_alias_set);
11056 /* If r12 was used to hold the original sp, copy cr into r0 now
11058 if (REGNO (frame_reg_rtx) == 12)
11060 cr_save_rtx = gen_rtx_REG (SImode, 0);
11061 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11063 insn = emit_move_insn (mem, cr_save_rtx);
11065 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11066 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11067 But that's OK. All we have to do is specify that _one_ condition
11068 code register is saved in this stack slot. The thrower's epilogue
11069 will then restore all the call-saved registers.
11070 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11071 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11072 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11075 /* Update stack and set back pointer unless this is V.4,
11076 for which it was done previously. */
11077 if (info->push_p && DEFAULT_ABI != ABI_V4)
11078 rs6000_emit_allocate_stack (info->total_size, FALSE);
11080 /* Set frame pointer, if needed. */
11081 if (frame_pointer_needed)
11083 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11085 RTX_FRAME_RELATED_P (insn) = 1;
11088 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11089 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11090 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11091 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11093 /* If emit_load_toc_table will use the link register, we need to save
11094 it. We use R11 for this purpose because emit_load_toc_table
11095 can use register 0. This allows us to use a plain 'blr' to return
11096 from the procedure more often. */
11097 int save_LR_around_toc_setup = (TARGET_ELF
11098 && DEFAULT_ABI != ABI_AIX
11100 && ! info->lr_save_p
11101 && EXIT_BLOCK_PTR->pred != NULL);
11102 if (save_LR_around_toc_setup)
11103 emit_move_insn (gen_rtx_REG (Pmode, 11),
11104 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11106 rs6000_emit_load_toc_table (TRUE);
11108 if (save_LR_around_toc_setup)
11109 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11110 gen_rtx_REG (Pmode, 11));
11114 if (DEFAULT_ABI == ABI_DARWIN
11115 && flag_pic && current_function_uses_pic_offset_table)
11117 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11118 const char *picbase = machopic_function_base_name ();
11119 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11121 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11123 rs6000_maybe_dead (
11124 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11125 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11130 /* Write function prologue. */
11133 rs6000_output_function_prologue (file, size)
11135 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11137 rs6000_stack_t *info = rs6000_stack_info ();
11139 if (TARGET_DEBUG_STACK)
11140 debug_stack_info (info);
11142 /* Write .extern for any function we will call to save and restore
11144 if (info->first_fp_reg_save < 64
11145 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11146 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11147 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11148 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11149 RESTORE_FP_SUFFIX);
11151 /* Write .extern for AIX common mode routines, if needed. */
11152 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11154 fputs ("\t.extern __mulh\n", file);
11155 fputs ("\t.extern __mull\n", file);
11156 fputs ("\t.extern __divss\n", file);
11157 fputs ("\t.extern __divus\n", file);
11158 fputs ("\t.extern __quoss\n", file);
11159 fputs ("\t.extern __quous\n", file);
11160 common_mode_defined = 1;
11163 if (! HAVE_prologue)
11167 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11168 the "toplevel" insn chain. */
11169 emit_note (0, NOTE_INSN_DELETED);
11170 rs6000_emit_prologue ();
11171 emit_note (0, NOTE_INSN_DELETED);
11173 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11177 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11179 INSN_ADDRESSES_NEW (insn, addr);
11184 if (TARGET_DEBUG_STACK)
11185 debug_rtx_list (get_insns (), 100);
11186 final (get_insns (), file, FALSE, FALSE);
11190 rs6000_pic_labelno++;
11193 /* Emit function epilogue as insns.
11195 At present, dwarf2out_frame_debug_expr doesn't understand
11196 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11197 anywhere in the epilogue. Most of the insns below would in any case
11198 need special notes to explain where r11 is in relation to the stack. */
11201 rs6000_emit_epilogue (sibcall)
11204 rs6000_stack_t *info;
11205 int restoring_FPRs_inline;
11206 int using_load_multiple;
11207 int using_mfcr_multiple;
11208 int use_backchain_to_restore_sp;
11210 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11211 rtx frame_reg_rtx = sp_reg_rtx;
11212 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11213 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11216 info = rs6000_stack_info ();
11218 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11220 reg_mode = V2SImode;
11224 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11225 && (!TARGET_SPE_ABI
11226 || info->spe_64bit_regs_used == 0)
11227 && info->first_gp_reg_save < 31);
11228 restoring_FPRs_inline = (sibcall
11229 || current_function_calls_eh_return
11230 || info->first_fp_reg_save == 64
11231 || FP_SAVE_INLINE (info->first_fp_reg_save));
11232 use_backchain_to_restore_sp = (frame_pointer_needed
11233 || current_function_calls_alloca
11234 || info->total_size > 32767);
11235 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11236 || rs6000_cpu == PROCESSOR_PPC603
11237 || rs6000_cpu == PROCESSOR_PPC750
11240 /* If we have a frame pointer, a call to alloca, or a large stack
11241 frame, restore the old stack pointer using the backchain. Otherwise,
11242 we know what size to update it with. */
11243 if (use_backchain_to_restore_sp)
11245 /* Under V.4, don't reset the stack pointer until after we're done
11246 loading the saved registers. */
11247 if (DEFAULT_ABI == ABI_V4)
11248 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11250 emit_move_insn (frame_reg_rtx,
11251 gen_rtx_MEM (Pmode, sp_reg_rtx));
11254 else if (info->push_p)
11256 if (DEFAULT_ABI == ABI_V4)
11257 sp_offset = info->total_size;
11260 emit_insn (TARGET_32BIT
11261 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11262 GEN_INT (info->total_size))
11263 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11264 GEN_INT (info->total_size)));
11268 /* Restore AltiVec registers if needed. */
11269 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11273 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11274 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11276 rtx addr, areg, mem;
11278 areg = gen_rtx_REG (Pmode, 0);
11280 (areg, GEN_INT (info->altivec_save_offset
11282 + 16 * (i - info->first_altivec_reg_save)));
11284 /* AltiVec addressing mode is [reg+reg]. */
11285 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11286 mem = gen_rtx_MEM (V4SImode, addr);
11287 set_mem_alias_set (mem, rs6000_sr_alias_set);
11289 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11293 /* Restore VRSAVE if needed. */
11294 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11296 rtx addr, mem, reg;
11298 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11299 GEN_INT (info->vrsave_save_offset + sp_offset));
11300 mem = gen_rtx_MEM (SImode, addr);
11301 set_mem_alias_set (mem, rs6000_sr_alias_set);
11302 reg = gen_rtx_REG (SImode, 12);
11303 emit_move_insn (reg, mem);
11305 emit_insn (generate_set_vrsave (reg, info, 1));
11308 /* Get the old lr if we saved it. */
11309 if (info->lr_save_p)
11311 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11312 info->lr_save_offset + sp_offset);
11314 set_mem_alias_set (mem, rs6000_sr_alias_set);
11316 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11319 /* Get the old cr if we saved it. */
11320 if (info->cr_save_p)
11322 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11323 GEN_INT (info->cr_save_offset + sp_offset));
11324 rtx mem = gen_rtx_MEM (SImode, addr);
11326 set_mem_alias_set (mem, rs6000_sr_alias_set);
11328 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11331 /* Set LR here to try to overlap restores below. */
11332 if (info->lr_save_p)
11333 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11334 gen_rtx_REG (Pmode, 0));
11336 /* Load exception handler data registers, if needed. */
11337 if (current_function_calls_eh_return)
11339 unsigned int i, regno;
11345 regno = EH_RETURN_DATA_REGNO (i);
11346 if (regno == INVALID_REGNUM)
11349 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11350 info->ehrd_offset + sp_offset
11351 + reg_size * (int) i);
11352 set_mem_alias_set (mem, rs6000_sr_alias_set);
11354 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11358 /* Restore GPRs. This is done as a PARALLEL if we are using
11359 the load-multiple instructions. */
11360 if (using_load_multiple)
11363 p = rtvec_alloc (32 - info->first_gp_reg_save);
11364 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11366 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11367 GEN_INT (info->gp_save_offset
11370 rtx mem = gen_rtx_MEM (reg_mode, addr);
11372 set_mem_alias_set (mem, rs6000_sr_alias_set);
11375 gen_rtx_SET (VOIDmode,
11376 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11379 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11382 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11383 if ((regs_ever_live[info->first_gp_reg_save+i]
11384 && ! call_used_regs[info->first_gp_reg_save+i])
11385 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11386 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11387 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11389 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11390 GEN_INT (info->gp_save_offset
11393 rtx mem = gen_rtx_MEM (reg_mode, addr);
11395 /* Restore 64-bit quantities for SPE. */
11396 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11398 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11401 if (!SPE_CONST_OFFSET_OK (offset))
11403 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11404 emit_move_insn (b, GEN_INT (offset));
11407 b = GEN_INT (offset);
11409 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11410 mem = gen_rtx_MEM (V2SImode, addr);
11413 set_mem_alias_set (mem, rs6000_sr_alias_set);
11415 emit_move_insn (gen_rtx_REG (reg_mode,
11416 info->first_gp_reg_save + i), mem);
11419 /* Restore fpr's if we need to do it without calling a function. */
11420 if (restoring_FPRs_inline)
11421 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11422 if ((regs_ever_live[info->first_fp_reg_save+i]
11423 && ! call_used_regs[info->first_fp_reg_save+i]))
11426 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11427 GEN_INT (info->fp_save_offset
11430 mem = gen_rtx_MEM (DFmode, addr);
11431 set_mem_alias_set (mem, rs6000_sr_alias_set);
11433 emit_move_insn (gen_rtx_REG (DFmode,
11434 info->first_fp_reg_save + i),
11438 /* If we saved cr, restore it here. Just those that were used. */
11439 if (info->cr_save_p)
11441 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11444 if (using_mfcr_multiple)
11446 for (i = 0; i < 8; i++)
11447 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11453 if (using_mfcr_multiple && count > 1)
11458 p = rtvec_alloc (count);
11461 for (i = 0; i < 8; i++)
11462 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11464 rtvec r = rtvec_alloc (2);
11465 RTVEC_ELT (r, 0) = r12_rtx;
11466 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11467 RTVEC_ELT (p, ndx) =
11468 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11469 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11472 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11477 for (i = 0; i < 8; i++)
11478 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11480 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11486 /* If this is V.4, unwind the stack pointer after all of the loads
11487 have been done. We need to emit a block here so that sched
11488 doesn't decide to move the sp change before the register restores
11489 (which may not have any obvious dependency on the stack). This
11490 doesn't hurt performance, because there is no scheduling that can
11491 be done after this point. */
11492 if (DEFAULT_ABI == ABI_V4)
11494 if (frame_reg_rtx != sp_reg_rtx)
11495 rs6000_emit_stack_tie ();
11497 if (use_backchain_to_restore_sp)
11499 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11501 else if (sp_offset != 0)
11503 emit_insn (TARGET_32BIT
11504 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11505 GEN_INT (sp_offset))
11506 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11507 GEN_INT (sp_offset)));
11511 if (current_function_calls_eh_return)
11513 rtx sa = EH_RETURN_STACKADJ_RTX;
11514 emit_insn (TARGET_32BIT
11515 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11516 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11522 if (! restoring_FPRs_inline)
11523 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11525 p = rtvec_alloc (2);
11527 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11528 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11529 gen_rtx_REG (Pmode,
11530 LINK_REGISTER_REGNUM));
11532 /* If we have to restore more than two FP registers, branch to the
11533 restore function. It will return to our caller. */
11534 if (! restoring_FPRs_inline)
11538 const char *alloc_rname;
11540 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11541 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11542 alloc_rname = ggc_strdup (rname);
11543 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11544 gen_rtx_SYMBOL_REF (Pmode,
11547 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11550 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11551 GEN_INT (info->fp_save_offset + 8*i));
11552 mem = gen_rtx_MEM (DFmode, addr);
11553 set_mem_alias_set (mem, rs6000_sr_alias_set);
11555 RTVEC_ELT (p, i+3) =
11556 gen_rtx_SET (VOIDmode,
11557 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11562 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11566 /* Write function epilogue. */
11569 rs6000_output_function_epilogue (file, size)
11571 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11573 rs6000_stack_t *info = rs6000_stack_info ();
11575 if (! HAVE_epilogue)
11577 rtx insn = get_last_insn ();
11578 /* If the last insn was a BARRIER, we don't have to write anything except
11579 the trace table. */
11580 if (GET_CODE (insn) == NOTE)
11581 insn = prev_nonnote_insn (insn);
11582 if (insn == 0 || GET_CODE (insn) != BARRIER)
11584 /* This is slightly ugly, but at least we don't have two
11585 copies of the epilogue-emitting code. */
11588 /* A NOTE_INSN_DELETED is supposed to be at the start
11589 and end of the "toplevel" insn chain. */
11590 emit_note (0, NOTE_INSN_DELETED);
11591 rs6000_emit_epilogue (FALSE);
11592 emit_note (0, NOTE_INSN_DELETED);
11594 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11598 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11600 INSN_ADDRESSES_NEW (insn, addr);
11605 if (TARGET_DEBUG_STACK)
11606 debug_rtx_list (get_insns (), 100);
11607 final (get_insns (), file, FALSE, FALSE);
11612 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11615 We don't output a traceback table if -finhibit-size-directive was
11616 used. The documentation for -finhibit-size-directive reads
11617 ``don't output a @code{.size} assembler directive, or anything
11618 else that would cause trouble if the function is split in the
11619 middle, and the two halves are placed at locations far apart in
11620 memory.'' The traceback table has this property, since it
11621 includes the offset from the start of the function to the
11622 traceback table itself.
11624 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11625 different traceback table. */
11626 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11627 && rs6000_traceback != traceback_none)
11629 const char *fname = NULL;
11630 const char *language_string = lang_hooks.name;
11631 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11633 int optional_tbtab;
11635 if (rs6000_traceback == traceback_full)
11636 optional_tbtab = 1;
11637 else if (rs6000_traceback == traceback_part)
11638 optional_tbtab = 0;
11640 optional_tbtab = !optimize_size && !TARGET_ELF;
11642 if (optional_tbtab)
11644 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11645 while (*fname == '.') /* V.4 encodes . in the name */
11648 /* Need label immediately before tbtab, so we can compute
11649 its offset from the function start. */
11650 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11651 ASM_OUTPUT_LABEL (file, fname);
11654 /* The .tbtab pseudo-op can only be used for the first eight
11655 expressions, since it can't handle the possibly variable
11656 length fields that follow. However, if you omit the optional
11657 fields, the assembler outputs zeros for all optional fields
11658 anyways, giving each variable length field is minimum length
11659 (as defined in sys/debug.h). Thus we can not use the .tbtab
11660 pseudo-op at all. */
11662 /* An all-zero word flags the start of the tbtab, for debuggers
11663 that have to find it by searching forward from the entry
11664 point or from the current pc. */
11665 fputs ("\t.long 0\n", file);
11667 /* Tbtab format type. Use format type 0. */
11668 fputs ("\t.byte 0,", file);
11670 /* Language type. Unfortunately, there doesn't seem to be any
11671 official way to get this info, so we use language_string. C
11672 is 0. C++ is 9. No number defined for Obj-C, so use the
11673 value for C for now. There is no official value for Java,
11674 although IBM appears to be using 13. There is no official value
11675 for Chill, so we've chosen 44 pseudo-randomly. */
11676 if (! strcmp (language_string, "GNU C")
11677 || ! strcmp (language_string, "GNU Objective-C"))
11679 else if (! strcmp (language_string, "GNU F77"))
11681 else if (! strcmp (language_string, "GNU Ada"))
11683 else if (! strcmp (language_string, "GNU Pascal"))
11685 else if (! strcmp (language_string, "GNU C++"))
11687 else if (! strcmp (language_string, "GNU Java"))
11689 else if (! strcmp (language_string, "GNU CHILL"))
11693 fprintf (file, "%d,", i);
11695 /* 8 single bit fields: global linkage (not set for C extern linkage,
11696 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11697 from start of procedure stored in tbtab, internal function, function
11698 has controlled storage, function has no toc, function uses fp,
11699 function logs/aborts fp operations. */
11700 /* Assume that fp operations are used if any fp reg must be saved. */
11701 fprintf (file, "%d,",
11702 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11704 /* 6 bitfields: function is interrupt handler, name present in
11705 proc table, function calls alloca, on condition directives
11706 (controls stack walks, 3 bits), saves condition reg, saves
11708 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11709 set up as a frame pointer, even when there is no alloca call. */
11710 fprintf (file, "%d,",
11711 ((optional_tbtab << 6)
11712 | ((optional_tbtab & frame_pointer_needed) << 5)
11713 | (info->cr_save_p << 1)
11714 | (info->lr_save_p)));
11716 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11718 fprintf (file, "%d,",
11719 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11721 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11722 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11724 if (optional_tbtab)
11726 /* Compute the parameter info from the function decl argument
11729 int next_parm_info_bit = 31;
11731 for (decl = DECL_ARGUMENTS (current_function_decl);
11732 decl; decl = TREE_CHAIN (decl))
11734 rtx parameter = DECL_INCOMING_RTL (decl);
11735 enum machine_mode mode = GET_MODE (parameter);
11737 if (GET_CODE (parameter) == REG)
11739 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11745 if (mode == SFmode)
11747 else if (mode == DFmode || mode == TFmode)
11752 /* If only one bit will fit, don't or in this entry. */
11753 if (next_parm_info_bit > 0)
11754 parm_info |= (bits << (next_parm_info_bit - 1));
11755 next_parm_info_bit -= 2;
11759 fixed_parms += ((GET_MODE_SIZE (mode)
11760 + (UNITS_PER_WORD - 1))
11762 next_parm_info_bit -= 1;
11768 /* Number of fixed point parameters. */
11769 /* This is actually the number of words of fixed point parameters; thus
11770 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11771 fprintf (file, "%d,", fixed_parms);
11773 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11775 /* This is actually the number of fp registers that hold parameters;
11776 and thus the maximum value is 13. */
11777 /* Set parameters on stack bit if parameters are not in their original
11778 registers, regardless of whether they are on the stack? Xlc
11779 seems to set the bit when not optimizing. */
11780 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11782 if (! optional_tbtab)
11785 /* Optional fields follow. Some are variable length. */
11787 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11788 11 double float. */
11789 /* There is an entry for each parameter in a register, in the order that
11790 they occur in the parameter list. Any intervening arguments on the
11791 stack are ignored. If the list overflows a long (max possible length
11792 34 bits) then completely leave off all elements that don't fit. */
11793 /* Only emit this long if there was at least one parameter. */
11794 if (fixed_parms || float_parms)
11795 fprintf (file, "\t.long %d\n", parm_info);
11797 /* Offset from start of code to tb table. */
11798 fputs ("\t.long ", file);
11799 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11801 RS6000_OUTPUT_BASENAME (file, fname);
11803 assemble_name (file, fname);
11805 fputs ("-.", file);
11807 RS6000_OUTPUT_BASENAME (file, fname);
11809 assemble_name (file, fname);
11813 /* Interrupt handler mask. */
11814 /* Omit this long, since we never set the interrupt handler bit
11817 /* Number of CTL (controlled storage) anchors. */
11818 /* Omit this long, since the has_ctl bit is never set above. */
11820 /* Displacement into stack of each CTL anchor. */
11821 /* Omit this list of longs, because there are no CTL anchors. */
11823 /* Length of function name. */
11826 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11828 /* Function name. */
11829 assemble_string (fname, strlen (fname));
11831 /* Register for alloca automatic storage; this is always reg 31.
11832 Only emit this if the alloca bit was set above. */
11833 if (frame_pointer_needed)
11834 fputs ("\t.byte 31\n", file);
11836 fputs ("\t.align 2\n", file);
11840 /* A C compound statement that outputs the assembler code for a thunk
11841 function, used to implement C++ virtual function calls with
11842 multiple inheritance. The thunk acts as a wrapper around a virtual
11843 function, adjusting the implicit object parameter before handing
11844 control off to the real function.
11846 First, emit code to add the integer DELTA to the location that
11847 contains the incoming first argument. Assume that this argument
11848 contains a pointer, and is the one used to pass the `this' pointer
11849 in C++. This is the incoming argument *before* the function
11850 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11851 values of all other incoming arguments.
11853 After the addition, emit code to jump to FUNCTION, which is a
11854 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11855 not touch the return address. Hence returning from FUNCTION will
11856 return to whoever called the current `thunk'.
11858 The effect must be as if FUNCTION had been called directly with the
11859 adjusted first argument. This macro is responsible for emitting
11860 all of the code for a thunk function; output_function_prologue()
11861 and output_function_epilogue() are not invoked.
11863 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11864 been extracted from it.) It might possibly be useful on some
11865 targets, but probably not.
11867 If you do not define this macro, the target-independent code in the
11868 C++ frontend will generate a less efficient heavyweight thunk that
11869 calls FUNCTION instead of jumping to it. The generic approach does
11870 not support varargs. */
11873 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11875 tree thunk_fndecl ATTRIBUTE_UNUSED;
11876 HOST_WIDE_INT delta;
11877 HOST_WIDE_INT vcall_offset;
11880 rtx this, insn, funexp;
11882 reload_completed = 1;
11883 no_new_pseudos = 1;
11885 /* Mark the end of the (empty) prologue. */
11886 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11888 /* Find the "this" pointer. If the function returns a structure,
11889 the structure return pointer is in r3. */
11890 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11891 this = gen_rtx_REG (Pmode, 4);
11893 this = gen_rtx_REG (Pmode, 3);
11895 /* Apply the constant offset, if required. */
11898 rtx delta_rtx = GEN_INT (delta);
11899 emit_insn (TARGET_32BIT
11900 ? gen_addsi3 (this, this, delta_rtx)
11901 : gen_adddi3 (this, this, delta_rtx));
11904 /* Apply the offset from the vtable, if required. */
11907 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11908 rtx tmp = gen_rtx_REG (Pmode, 12);
11910 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11911 emit_insn (TARGET_32BIT
11912 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11913 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11914 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11915 emit_insn (TARGET_32BIT
11916 ? gen_addsi3 (this, this, tmp)
11917 : gen_adddi3 (this, this, tmp));
11920 /* Generate a tail call to the target function. */
11921 if (!TREE_USED (function))
11923 assemble_external (function);
11924 TREE_USED (function) = 1;
11926 funexp = XEXP (DECL_RTL (function), 0);
11927 SYMBOL_REF_FLAGS (funexp) &= ~SYMBOL_FLAG_LOCAL;
11928 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11931 if (MACHOPIC_INDIRECT)
11932 funexp = machopic_indirect_call_target (funexp);
11935 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11936 generate sibcall RTL explicitly to avoid constraint abort. */
11937 insn = emit_call_insn (
11938 gen_rtx_PARALLEL (VOIDmode,
11940 gen_rtx_CALL (VOIDmode,
11941 funexp, const0_rtx),
11942 gen_rtx_USE (VOIDmode, const0_rtx),
11943 gen_rtx_USE (VOIDmode,
11944 gen_rtx_REG (SImode,
11945 LINK_REGISTER_REGNUM)),
11946 gen_rtx_RETURN (VOIDmode))));
11947 SIBLING_CALL_P (insn) = 1;
11950 /* Run just enough of rest_of_compilation to get the insns emitted.
11951 There's not really enough bulk here to make other passes such as
11952 instruction scheduling worth while. Note that use_thunk calls
11953 assemble_start_function and assemble_end_function. */
11954 insn = get_insns ();
11955 shorten_branches (insn);
11956 final_start_function (insn, file, 1);
11957 final (insn, file, 1, 0);
11958 final_end_function ();
11960 reload_completed = 0;
11961 no_new_pseudos = 0;
11964 /* A quick summary of the various types of 'constant-pool tables'
11967 Target Flags Name One table per
11968 AIX (none) AIX TOC object file
11969 AIX -mfull-toc AIX TOC object file
11970 AIX -mminimal-toc AIX minimal TOC translation unit
11971 SVR4/EABI (none) SVR4 SDATA object file
11972 SVR4/EABI -fpic SVR4 pic object file
11973 SVR4/EABI -fPIC SVR4 PIC translation unit
11974 SVR4/EABI -mrelocatable EABI TOC function
11975 SVR4/EABI -maix AIX TOC object file
11976 SVR4/EABI -maix -mminimal-toc
11977 AIX minimal TOC translation unit
11979 Name Reg. Set by entries contains:
11980 made by addrs? fp? sum?
11982 AIX TOC 2 crt0 as Y option option
11983 AIX minimal TOC 30 prolog gcc Y Y option
11984 SVR4 SDATA 13 crt0 gcc N Y N
11985 SVR4 pic 30 prolog ld Y not yet N
11986 SVR4 PIC 30 prolog gcc Y option option
11987 EABI TOC 30 prolog gcc Y option option
11991 /* Hash functions for the hash table. */
11994 rs6000_hash_constant (k)
11997 enum rtx_code code = GET_CODE (k);
11998 enum machine_mode mode = GET_MODE (k);
11999 unsigned result = (code << 3) ^ mode;
12000 const char *format;
12003 format = GET_RTX_FORMAT (code);
12004 flen = strlen (format);
12010 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12013 if (mode != VOIDmode)
12014 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12026 for (; fidx < flen; fidx++)
12027 switch (format[fidx])
12032 const char *str = XSTR (k, fidx);
12033 len = strlen (str);
12034 result = result * 613 + len;
12035 for (i = 0; i < len; i++)
12036 result = result * 613 + (unsigned) str[i];
12041 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12045 result = result * 613 + (unsigned) XINT (k, fidx);
12048 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12049 result = result * 613 + (unsigned) XWINT (k, fidx);
12053 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12054 result = result * 613 + (unsigned) (XWINT (k, fidx)
12068 toc_hash_function (hash_entry)
12069 const void * hash_entry;
12071 const struct toc_hash_struct *thc =
12072 (const struct toc_hash_struct *) hash_entry;
12073 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12076 /* Compare H1 and H2 for equivalence. */
12079 toc_hash_eq (h1, h2)
12083 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12084 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12086 if (((const struct toc_hash_struct *) h1)->key_mode
12087 != ((const struct toc_hash_struct *) h2)->key_mode)
12090 return rtx_equal_p (r1, r2);
12093 /* These are the names given by the C++ front-end to vtables, and
12094 vtable-like objects. Ideally, this logic should not be here;
12095 instead, there should be some programmatic way of inquiring as
12096 to whether or not an object is a vtable. */
12098 #define VTABLE_NAME_P(NAME) \
12099 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12100 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12101 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12102 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12105 rs6000_output_symbol_ref (file, x)
12109 /* Currently C++ toc references to vtables can be emitted before it
12110 is decided whether the vtable is public or private. If this is
12111 the case, then the linker will eventually complain that there is
12112 a reference to an unknown section. Thus, for vtables only,
12113 we emit the TOC reference to reference the symbol and not the
12115 const char *name = XSTR (x, 0);
12117 if (VTABLE_NAME_P (name))
12119 RS6000_OUTPUT_BASENAME (file, name);
12122 assemble_name (file, name);
12125 /* Output a TOC entry. We derive the entry name from what is being
12129 output_toc (file, x, labelno, mode)
12133 enum machine_mode mode;
12136 const char *name = buf;
12137 const char *real_name;
12144 /* When the linker won't eliminate them, don't output duplicate
12145 TOC entries (this happens on AIX if there is any kind of TOC,
12146 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12148 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12150 struct toc_hash_struct *h;
12153 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12154 time because GGC is not initialised at that point. */
12155 if (toc_hash_table == NULL)
12156 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12157 toc_hash_eq, NULL);
12159 h = ggc_alloc (sizeof (*h));
12161 h->key_mode = mode;
12162 h->labelno = labelno;
12164 found = htab_find_slot (toc_hash_table, h, 1);
12165 if (*found == NULL)
12167 else /* This is indeed a duplicate.
12168 Set this label equal to that label. */
12170 fputs ("\t.set ", file);
12171 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12172 fprintf (file, "%d,", labelno);
12173 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12174 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12180 /* If we're going to put a double constant in the TOC, make sure it's
12181 aligned properly when strict alignment is on. */
12182 if (GET_CODE (x) == CONST_DOUBLE
12183 && STRICT_ALIGNMENT
12184 && GET_MODE_BITSIZE (mode) >= 64
12185 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12186 ASM_OUTPUT_ALIGN (file, 3);
12189 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12191 /* Handle FP constants specially. Note that if we have a minimal
12192 TOC, things we put here aren't actually in the TOC, so we can allow
12194 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12196 REAL_VALUE_TYPE rv;
12199 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12200 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12204 if (TARGET_MINIMAL_TOC)
12205 fputs (DOUBLE_INT_ASM_OP, file);
12207 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12208 k[0] & 0xffffffff, k[1] & 0xffffffff,
12209 k[2] & 0xffffffff, k[3] & 0xffffffff);
12210 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12211 k[0] & 0xffffffff, k[1] & 0xffffffff,
12212 k[2] & 0xffffffff, k[3] & 0xffffffff);
12217 if (TARGET_MINIMAL_TOC)
12218 fputs ("\t.long ", file);
12220 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12221 k[0] & 0xffffffff, k[1] & 0xffffffff,
12222 k[2] & 0xffffffff, k[3] & 0xffffffff);
12223 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12224 k[0] & 0xffffffff, k[1] & 0xffffffff,
12225 k[2] & 0xffffffff, k[3] & 0xffffffff);
12229 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12231 REAL_VALUE_TYPE rv;
12234 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12235 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12239 if (TARGET_MINIMAL_TOC)
12240 fputs (DOUBLE_INT_ASM_OP, file);
12242 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12243 k[0] & 0xffffffff, k[1] & 0xffffffff);
12244 fprintf (file, "0x%lx%08lx\n",
12245 k[0] & 0xffffffff, k[1] & 0xffffffff);
12250 if (TARGET_MINIMAL_TOC)
12251 fputs ("\t.long ", file);
12253 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12254 k[0] & 0xffffffff, k[1] & 0xffffffff);
12255 fprintf (file, "0x%lx,0x%lx\n",
12256 k[0] & 0xffffffff, k[1] & 0xffffffff);
12260 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12262 REAL_VALUE_TYPE rv;
12265 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12266 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12270 if (TARGET_MINIMAL_TOC)
12271 fputs (DOUBLE_INT_ASM_OP, file);
12273 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12274 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12279 if (TARGET_MINIMAL_TOC)
12280 fputs ("\t.long ", file);
12282 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12283 fprintf (file, "0x%lx\n", l & 0xffffffff);
12287 else if (GET_MODE (x) == VOIDmode
12288 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12290 unsigned HOST_WIDE_INT low;
12291 HOST_WIDE_INT high;
12293 if (GET_CODE (x) == CONST_DOUBLE)
12295 low = CONST_DOUBLE_LOW (x);
12296 high = CONST_DOUBLE_HIGH (x);
12299 #if HOST_BITS_PER_WIDE_INT == 32
12302 high = (low & 0x80000000) ? ~0 : 0;
12306 low = INTVAL (x) & 0xffffffff;
12307 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12311 /* TOC entries are always Pmode-sized, but since this
12312 is a bigendian machine then if we're putting smaller
12313 integer constants in the TOC we have to pad them.
12314 (This is still a win over putting the constants in
12315 a separate constant pool, because then we'd have
12316 to have both a TOC entry _and_ the actual constant.)
12318 For a 32-bit target, CONST_INT values are loaded and shifted
12319 entirely within `low' and can be stored in one TOC entry. */
12321 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12322 abort ();/* It would be easy to make this work, but it doesn't now. */
12324 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12326 #if HOST_BITS_PER_WIDE_INT == 32
12327 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12328 POINTER_SIZE, &low, &high, 0);
12331 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12332 high = (HOST_WIDE_INT) low >> 32;
12339 if (TARGET_MINIMAL_TOC)
12340 fputs (DOUBLE_INT_ASM_OP, file);
12342 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12343 (long) high & 0xffffffff, (long) low & 0xffffffff);
12344 fprintf (file, "0x%lx%08lx\n",
12345 (long) high & 0xffffffff, (long) low & 0xffffffff);
12350 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12352 if (TARGET_MINIMAL_TOC)
12353 fputs ("\t.long ", file);
12355 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12356 (long) high & 0xffffffff, (long) low & 0xffffffff);
12357 fprintf (file, "0x%lx,0x%lx\n",
12358 (long) high & 0xffffffff, (long) low & 0xffffffff);
12362 if (TARGET_MINIMAL_TOC)
12363 fputs ("\t.long ", file);
12365 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12366 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12372 if (GET_CODE (x) == CONST)
12374 if (GET_CODE (XEXP (x, 0)) != PLUS)
12377 base = XEXP (XEXP (x, 0), 0);
12378 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12381 if (GET_CODE (base) == SYMBOL_REF)
12382 name = XSTR (base, 0);
12383 else if (GET_CODE (base) == LABEL_REF)
12384 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12385 else if (GET_CODE (base) == CODE_LABEL)
12386 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12390 real_name = (*targetm.strip_name_encoding) (name);
12391 if (TARGET_MINIMAL_TOC)
12392 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12395 fprintf (file, "\t.tc %s", real_name);
12398 fprintf (file, ".N%d", - offset);
12400 fprintf (file, ".P%d", offset);
12402 fputs ("[TC],", file);
12405 /* Currently C++ toc references to vtables can be emitted before it
12406 is decided whether the vtable is public or private. If this is
12407 the case, then the linker will eventually complain that there is
12408 a TOC reference to an unknown section. Thus, for vtables only,
12409 we emit the TOC reference to reference the symbol and not the
12411 if (VTABLE_NAME_P (name))
12413 RS6000_OUTPUT_BASENAME (file, name);
12415 fprintf (file, "%d", offset);
12416 else if (offset > 0)
12417 fprintf (file, "+%d", offset);
12420 output_addr_const (file, x);
12424 /* Output an assembler pseudo-op to write an ASCII string of N characters
12425 starting at P to FILE.
12427 On the RS/6000, we have to do this using the .byte operation and
12428 write out special characters outside the quoted string.
12429 Also, the assembler is broken; very long strings are truncated,
12430 so we must artificially break them up early. */
12433 output_ascii (file, p, n)
12439 int i, count_string;
12440 const char *for_string = "\t.byte \"";
12441 const char *for_decimal = "\t.byte ";
12442 const char *to_close = NULL;
12445 for (i = 0; i < n; i++)
12448 if (c >= ' ' && c < 0177)
12451 fputs (for_string, file);
12454 /* Write two quotes to get one. */
12462 for_decimal = "\"\n\t.byte ";
12466 if (count_string >= 512)
12468 fputs (to_close, file);
12470 for_string = "\t.byte \"";
12471 for_decimal = "\t.byte ";
12479 fputs (for_decimal, file);
12480 fprintf (file, "%d", c);
12482 for_string = "\n\t.byte \"";
12483 for_decimal = ", ";
12489 /* Now close the string if we have written one. Then end the line. */
12491 fputs (to_close, file);
12494 /* Generate a unique section name for FILENAME for a section type
12495 represented by SECTION_DESC. Output goes into BUF.
12497 SECTION_DESC can be any string, as long as it is different for each
12498 possible section type.
12500 We name the section in the same manner as xlc. The name begins with an
12501 underscore followed by the filename (after stripping any leading directory
12502 names) with the last period replaced by the string SECTION_DESC. If
12503 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12507 rs6000_gen_section_name (buf, filename, section_desc)
12509 const char *filename;
12510 const char *section_desc;
12512 const char *q, *after_last_slash, *last_period = 0;
12516 after_last_slash = filename;
12517 for (q = filename; *q; q++)
12520 after_last_slash = q + 1;
12521 else if (*q == '.')
12525 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12526 *buf = (char *) xmalloc (len);
12531 for (q = after_last_slash; *q; q++)
12533 if (q == last_period)
12535 strcpy (p, section_desc);
12536 p += strlen (section_desc);
12540 else if (ISALNUM (*q))
12544 if (last_period == 0)
12545 strcpy (p, section_desc);
12550 /* Emit profile function. */
12553 output_profile_hook (labelno)
12554 int labelno ATTRIBUTE_UNUSED;
12556 if (TARGET_PROFILE_KERNEL)
12559 if (DEFAULT_ABI == ABI_AIX)
12561 #ifdef NO_PROFILE_COUNTERS
12562 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12565 const char *label_name;
12568 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12569 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12570 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12572 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12576 else if (DEFAULT_ABI == ABI_DARWIN)
12578 const char *mcount_name = RS6000_MCOUNT;
12579 int caller_addr_regno = LINK_REGISTER_REGNUM;
12581 /* Be conservative and always set this, at least for now. */
12582 current_function_uses_pic_offset_table = 1;
12585 /* For PIC code, set up a stub and collect the caller's address
12586 from r0, which is where the prologue puts it. */
12587 if (MACHOPIC_INDIRECT)
12589 mcount_name = machopic_stub_name (mcount_name);
12590 if (current_function_uses_pic_offset_table)
12591 caller_addr_regno = 0;
12594 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12596 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12600 /* Write function profiler code. */
12603 output_function_profiler (file, labelno)
12610 switch (DEFAULT_ABI)
12619 warning ("no profiling of 64-bit code for this ABI");
12622 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12623 fprintf (file, "\tmflr %s\n", reg_names[0]);
12626 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12627 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12628 reg_names[0], save_lr, reg_names[1]);
12629 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12630 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12631 assemble_name (file, buf);
12632 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12634 else if (flag_pic > 1)
12636 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12637 reg_names[0], save_lr, reg_names[1]);
12638 /* Now, we need to get the address of the label. */
12639 fputs ("\tbl 1f\n\t.long ", file);
12640 assemble_name (file, buf);
12641 fputs ("-.\n1:", file);
12642 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12643 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12644 reg_names[0], reg_names[11]);
12645 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12646 reg_names[0], reg_names[0], reg_names[11]);
12650 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12651 assemble_name (file, buf);
12652 fputs ("@ha\n", file);
12653 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12654 reg_names[0], save_lr, reg_names[1]);
12655 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12656 assemble_name (file, buf);
12657 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12660 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12661 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12666 if (!TARGET_PROFILE_KERNEL)
12668 /* Don't do anything, done in output_profile_hook (). */
12675 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
12676 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
12678 if (current_function_needs_context)
12680 asm_fprintf (file, "\tstd %s,24(%s)\n",
12681 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12682 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12683 asm_fprintf (file, "\tld %s,24(%s)\n",
12684 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12687 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12695 rs6000_use_dfa_pipeline_interface ()
12700 /* Power4 load update and store update instructions are cracked into a
12701 load or store and an integer insn which are executed in the same cycle.
12702 Branches have their own dispatch slot which does not count against the
12703 GCC issue rate, but it changes the program flow so there are no other
12704 instructions to issue in this cycle. */
12707 rs6000_variable_issue (stream, verbose, insn, more)
12708 FILE *stream ATTRIBUTE_UNUSED;
12709 int verbose ATTRIBUTE_UNUSED;
12713 if (GET_CODE (PATTERN (insn)) == USE
12714 || GET_CODE (PATTERN (insn)) == CLOBBER)
12717 if (rs6000_cpu == PROCESSOR_POWER4)
12719 enum attr_type type = get_attr_type (insn);
12720 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12721 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
12723 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12724 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12725 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
12726 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
12727 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
12728 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
12729 || type == TYPE_IDIV || type == TYPE_LDIV)
12730 return more > 2 ? more - 2 : 0;
12736 /* Adjust the cost of a scheduling dependency. Return the new cost of
12737 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12740 rs6000_adjust_cost (insn, link, dep_insn, cost)
12743 rtx dep_insn ATTRIBUTE_UNUSED;
12746 if (! recog_memoized (insn))
12749 if (REG_NOTE_KIND (link) != 0)
12752 if (REG_NOTE_KIND (link) == 0)
12754 /* Data dependency; DEP_INSN writes a register that INSN reads
12755 some cycles later. */
12756 switch (get_attr_type (insn))
12759 /* Tell the first scheduling pass about the latency between
12760 a mtctr and bctr (and mtlr and br/blr). The first
12761 scheduling pass will not know about this latency since
12762 the mtctr instruction, which has the latency associated
12763 to it, will be generated by reload. */
12764 return TARGET_POWER ? 5 : 4;
12766 /* Leave some extra cycles between a compare and its
12767 dependent branch, to inhibit expensive mispredicts. */
12768 if ((rs6000_cpu_attr == CPU_PPC603
12769 || rs6000_cpu_attr == CPU_PPC604
12770 || rs6000_cpu_attr == CPU_PPC604E
12771 || rs6000_cpu_attr == CPU_PPC620
12772 || rs6000_cpu_attr == CPU_PPC630
12773 || rs6000_cpu_attr == CPU_PPC750
12774 || rs6000_cpu_attr == CPU_PPC7400
12775 || rs6000_cpu_attr == CPU_PPC7450
12776 || rs6000_cpu_attr == CPU_POWER4)
12777 && recog_memoized (dep_insn)
12778 && (INSN_CODE (dep_insn) >= 0)
12779 && (get_attr_type (dep_insn) == TYPE_CMP
12780 || get_attr_type (dep_insn) == TYPE_COMPARE
12781 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12782 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
12783 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
12784 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12785 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12786 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12791 /* Fall out to return default cost. */
12797 /* A C statement (sans semicolon) to update the integer scheduling
12798 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12799 INSN earlier, increase the priority to execute INSN later. Do not
12800 define this macro if you do not need to adjust the scheduling
12801 priorities of insns. */
12804 rs6000_adjust_priority (insn, priority)
12805 rtx insn ATTRIBUTE_UNUSED;
12808 /* On machines (like the 750) which have asymmetric integer units,
12809 where one integer unit can do multiply and divides and the other
12810 can't, reduce the priority of multiply/divide so it is scheduled
12811 before other integer operations. */
12814 if (! INSN_P (insn))
12817 if (GET_CODE (PATTERN (insn)) == USE)
12820 switch (rs6000_cpu_attr) {
12822 switch (get_attr_type (insn))
12829 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12830 priority, priority);
12831 if (priority >= 0 && priority < 0x01000000)
12841 /* Return how many instructions the machine can issue per cycle. */
12844 rs6000_issue_rate ()
12846 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12847 if (!reload_completed)
12850 switch (rs6000_cpu_attr) {
12851 case CPU_RIOS1: /* ? */
12853 case CPU_PPC601: /* ? */
12874 /* Return how many instructions to look ahead for better insn
12878 rs6000_use_sched_lookahead ()
12880 if (rs6000_cpu_attr == CPU_PPC8540)
12886 /* Length in units of the trampoline for entering a nested function. */
12889 rs6000_trampoline_size ()
12893 switch (DEFAULT_ABI)
12899 ret = (TARGET_32BIT) ? 12 : 24;
12904 ret = (TARGET_32BIT) ? 40 : 48;
12911 /* Emit RTL insns to initialize the variable parts of a trampoline.
12912 FNADDR is an RTX for the address of the function's pure code.
12913 CXT is an RTX for the static chain value for the function. */
12916 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12921 enum machine_mode pmode = Pmode;
12922 int regsize = (TARGET_32BIT) ? 4 : 8;
12923 rtx ctx_reg = force_reg (pmode, cxt);
12925 switch (DEFAULT_ABI)
12930 /* Macros to shorten the code expansions below. */
12931 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12932 #define MEM_PLUS(addr,offset) \
12933 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12935 /* Under AIX, just build the 3 word function descriptor */
12938 rtx fn_reg = gen_reg_rtx (pmode);
12939 rtx toc_reg = gen_reg_rtx (pmode);
12940 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12941 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12942 emit_move_insn (MEM_DEREF (addr), fn_reg);
12943 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12944 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12948 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12951 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12952 FALSE, VOIDmode, 4,
12954 GEN_INT (rs6000_trampoline_size ()), SImode,
12964 /* Table of valid machine attributes. */
12966 const struct attribute_spec rs6000_attribute_table[] =
12968 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12969 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12970 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12971 { NULL, 0, 0, false, false, false, NULL }
12974 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12975 struct attribute_spec.handler. */
12978 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12981 tree args ATTRIBUTE_UNUSED;
12982 int flags ATTRIBUTE_UNUSED;
12983 bool *no_add_attrs;
12985 if (TREE_CODE (*node) != FUNCTION_TYPE
12986 && TREE_CODE (*node) != FIELD_DECL
12987 && TREE_CODE (*node) != TYPE_DECL)
12989 warning ("`%s' attribute only applies to functions",
12990 IDENTIFIER_POINTER (name));
12991 *no_add_attrs = true;
12997 /* Set longcall attributes on all functions declared when
12998 rs6000_default_long_calls is true. */
13000 rs6000_set_default_type_attributes (type)
13003 if (rs6000_default_long_calls
13004 && (TREE_CODE (type) == FUNCTION_TYPE
13005 || TREE_CODE (type) == METHOD_TYPE))
13006 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13008 TYPE_ATTRIBUTES (type));
13011 /* Return a reference suitable for calling a function with the
13012 longcall attribute. */
13015 rs6000_longcall_ref (call_ref)
13018 const char *call_name;
13021 if (GET_CODE (call_ref) != SYMBOL_REF)
13024 /* System V adds '.' to the internal name, so skip them. */
13025 call_name = XSTR (call_ref, 0);
13026 if (*call_name == '.')
13028 while (*call_name == '.')
13031 node = get_identifier (call_name);
13032 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13035 return force_reg (Pmode, call_ref);
13039 #ifdef USING_ELFOS_H
13041 /* A C statement or statements to switch to the appropriate section
13042 for output of RTX in mode MODE. You can assume that RTX is some
13043 kind of constant in RTL. The argument MODE is redundant except in
13044 the case of a `const_int' rtx. Select the section by calling
13045 `text_section' or one of the alternatives for other sections.
13047 Do not define this macro if you put all constants in the read-only
13051 rs6000_elf_select_rtx_section (mode, x, align)
13052 enum machine_mode mode;
13054 unsigned HOST_WIDE_INT align;
13056 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13059 default_elf_select_rtx_section (mode, x, align);
13062 /* A C statement or statements to switch to the appropriate
13063 section for output of DECL. DECL is either a `VAR_DECL' node
13064 or a constant of some sort. RELOC indicates whether forming
13065 the initial value of DECL requires link-time relocations. */
13068 rs6000_elf_select_section (decl, reloc, align)
13071 unsigned HOST_WIDE_INT align;
13073 /* Pretend that we're always building for a shared library when
13074 ABI_AIX, because otherwise we end up with dynamic relocations
13075 in read-only sections. This happens for function pointers,
13076 references to vtables in typeinfo, and probably other cases. */
13077 default_elf_select_section_1 (decl, reloc, align,
13078 flag_pic || DEFAULT_ABI == ABI_AIX);
13081 /* A C statement to build up a unique section name, expressed as a
13082 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13083 RELOC indicates whether the initial value of EXP requires
13084 link-time relocations. If you do not define this macro, GCC will use
13085 the symbol name prefixed by `.' as the section name. Note - this
13086 macro can now be called for uninitialized data items as well as
13087 initialized data and functions. */
13090 rs6000_elf_unique_section (decl, reloc)
13094 /* As above, pretend that we're always building for a shared library
13095 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13096 default_unique_section_1 (decl, reloc,
13097 flag_pic || DEFAULT_ABI == ABI_AIX);
13100 /* For a SYMBOL_REF, set generic flags and then perform some
13101 target-specific processing.
13103 When the AIX ABI is requested on a non-AIX system, replace the
13104 function name with the real name (with a leading .) rather than the
13105 function descriptor name. This saves a lot of overriding code to
13106 read the prefixes. */
13109 rs6000_elf_encode_section_info (decl, rtl, first)
13114 default_encode_section_info (decl, rtl, first);
13117 && TREE_CODE (decl) == FUNCTION_DECL
13119 && DEFAULT_ABI == ABI_AIX)
13121 rtx sym_ref = XEXP (rtl, 0);
13122 size_t len = strlen (XSTR (sym_ref, 0));
13123 char *str = alloca (len + 2);
13125 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13126 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13131 rs6000_elf_in_small_data_p (decl)
13134 if (rs6000_sdata == SDATA_NONE)
13137 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13139 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13140 if (strcmp (section, ".sdata") == 0
13141 || strcmp (section, ".sdata2") == 0
13142 || strcmp (section, ".sbss") == 0
13143 || strcmp (section, ".sbss2") == 0
13144 || strcmp (section, ".PPC.EMB.sdata0") == 0
13145 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13150 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13153 && size <= g_switch_value
13154 /* If it's not public, and we're not going to reference it there,
13155 there's no need to put it in the small data section. */
13156 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13163 #endif /* USING_ELFOS_H */
13166 /* Return a REG that occurs in ADDR with coefficient 1.
13167 ADDR can be effectively incremented by incrementing REG.
13169 r0 is special and we must not select it as an address
13170 register by this routine since our caller will try to
13171 increment the returned register via an "la" instruction. */
13174 find_addr_reg (addr)
13177 while (GET_CODE (addr) == PLUS)
13179 if (GET_CODE (XEXP (addr, 0)) == REG
13180 && REGNO (XEXP (addr, 0)) != 0)
13181 addr = XEXP (addr, 0);
13182 else if (GET_CODE (XEXP (addr, 1)) == REG
13183 && REGNO (XEXP (addr, 1)) != 0)
13184 addr = XEXP (addr, 1);
13185 else if (CONSTANT_P (XEXP (addr, 0)))
13186 addr = XEXP (addr, 1);
13187 else if (CONSTANT_P (XEXP (addr, 1)))
13188 addr = XEXP (addr, 0);
13192 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13198 rs6000_fatal_bad_address (op)
13201 fatal_insn ("bad address", op);
13207 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13208 reference and a constant. */
13211 symbolic_operand (op)
13214 switch (GET_CODE (op))
13221 return (GET_CODE (op) == SYMBOL_REF ||
13222 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13223 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13224 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13231 #ifdef RS6000_LONG_BRANCH
13233 static tree stub_list = 0;
13235 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13236 procedure calls to the linked list. */
13239 add_compiler_stub (label_name, function_name, line_number)
13241 tree function_name;
13244 tree stub = build_tree_list (function_name, label_name);
13245 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13246 TREE_CHAIN (stub) = stub_list;
13250 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13251 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13252 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13254 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13255 handling procedure calls from the linked list and initializes the
13259 output_compiler_stub ()
13262 char label_buf[256];
13266 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13268 fprintf (asm_out_file,
13269 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13271 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13272 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13273 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13274 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13276 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13278 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13281 label_buf[0] = '_';
13282 strcpy (label_buf+1,
13283 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13286 strcpy (tmp_buf, "lis r12,hi16(");
13287 strcat (tmp_buf, label_buf);
13288 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13289 strcat (tmp_buf, label_buf);
13290 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13291 output_asm_insn (tmp_buf, 0);
13293 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13294 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13295 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13296 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13302 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13303 already there or not. */
13306 no_previous_def (function_name)
13307 tree function_name;
13310 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13311 if (function_name == STUB_FUNCTION_NAME (stub))
13316 /* GET_PREV_LABEL gets the label name from the previous definition of
13320 get_prev_label (function_name)
13321 tree function_name;
13324 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13325 if (function_name == STUB_FUNCTION_NAME (stub))
13326 return STUB_LABEL_NAME (stub);
13330 /* INSN is either a function call or a millicode call. It may have an
13331 unconditional jump in its delay slot.
13333 CALL_DEST is the routine we are calling. */
13336 output_call (insn, call_dest, operand_number)
13339 int operand_number;
13341 static char buf[256];
13342 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13345 tree funname = get_identifier (XSTR (call_dest, 0));
13347 if (no_previous_def (funname))
13349 int line_number = 0;
13350 rtx label_rtx = gen_label_rtx ();
13351 char *label_buf, temp_buf[256];
13352 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13353 CODE_LABEL_NUMBER (label_rtx));
13354 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13355 labelname = get_identifier (label_buf);
13356 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13358 line_number = NOTE_LINE_NUMBER (insn);
13359 add_compiler_stub (labelname, funname, line_number);
13362 labelname = get_prev_label (funname);
13364 sprintf (buf, "jbsr %%z%d,%.246s",
13365 operand_number, IDENTIFIER_POINTER (labelname));
13370 sprintf (buf, "bl %%z%d", operand_number);
13375 #endif /* RS6000_LONG_BRANCH */
13377 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13379 const char *const symbol_ = (SYMBOL); \
13380 char *buffer_ = (BUF); \
13381 if (symbol_[0] == '"') \
13383 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13385 else if (name_needs_quotes(symbol_)) \
13387 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13391 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13396 /* Generate PIC and indirect symbol stubs. */
13399 machopic_output_stub (file, symb, stub)
13401 const char *symb, *stub;
13403 unsigned int length;
13404 char *symbol_name, *lazy_ptr_name;
13405 char *local_label_0;
13406 static int label = 0;
13408 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13409 symb = (*targetm.strip_name_encoding) (symb);
13413 length = strlen (symb);
13414 symbol_name = alloca (length + 32);
13415 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13417 lazy_ptr_name = alloca (length + 32);
13418 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13420 local_label_0 = alloca (length + 32);
13421 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13424 machopic_picsymbol_stub1_section ();
13426 machopic_symbol_stub1_section ();
13427 fprintf (file, "\t.align 2\n");
13429 fprintf (file, "%s:\n", stub);
13430 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13434 fprintf (file, "\tmflr r0\n");
13435 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13436 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13437 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13438 lazy_ptr_name, local_label_0);
13439 fprintf (file, "\tmtlr r0\n");
13440 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13441 lazy_ptr_name, local_label_0);
13442 fprintf (file, "\tmtctr r12\n");
13443 fprintf (file, "\tbctr\n");
13447 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13448 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13449 fprintf (file, "\tmtctr r12\n");
13450 fprintf (file, "\tbctr\n");
13453 machopic_lazy_symbol_ptr_section ();
13454 fprintf (file, "%s:\n", lazy_ptr_name);
13455 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13456 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13459 /* Legitimize PIC addresses. If the address is already
13460 position-independent, we return ORIG. Newly generated
13461 position-independent addresses go into a reg. This is REG if non
13462 zero, otherwise we allocate register(s) as necessary. */
13464 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13467 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13469 enum machine_mode mode;
13474 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13475 reg = gen_reg_rtx (Pmode);
13477 if (GET_CODE (orig) == CONST)
13479 if (GET_CODE (XEXP (orig, 0)) == PLUS
13480 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13483 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13486 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13489 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13495 if (GET_CODE (offset) == CONST_INT)
13497 if (SMALL_INT (offset))
13498 return plus_constant (base, INTVAL (offset));
13499 else if (! reload_in_progress && ! reload_completed)
13500 offset = force_reg (Pmode, offset);
13503 rtx mem = force_const_mem (Pmode, orig);
13504 return machopic_legitimize_pic_address (mem, Pmode, reg);
13507 return gen_rtx (PLUS, Pmode, base, offset);
13510 /* Fall back on generic machopic code. */
13511 return machopic_legitimize_pic_address (orig, mode, reg);
13514 /* This is just a placeholder to make linking work without having to
13515 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13516 ever needed for Darwin (not too likely!) this would have to get a
13517 real definition. */
13524 #endif /* TARGET_MACHO */
13527 static unsigned int
13528 rs6000_elf_section_type_flags (decl, name, reloc)
13534 = default_section_type_flags_1 (decl, name, reloc,
13535 flag_pic || DEFAULT_ABI == ABI_AIX);
13537 if (TARGET_RELOCATABLE)
13538 flags |= SECTION_WRITE;
13543 /* Record an element in the table of global constructors. SYMBOL is
13544 a SYMBOL_REF of the function to be called; PRIORITY is a number
13545 between 0 and MAX_INIT_PRIORITY.
13547 This differs from default_named_section_asm_out_constructor in
13548 that we have special handling for -mrelocatable. */
13551 rs6000_elf_asm_out_constructor (symbol, priority)
13555 const char *section = ".ctors";
13558 if (priority != DEFAULT_INIT_PRIORITY)
13560 sprintf (buf, ".ctors.%.5u",
13561 /* Invert the numbering so the linker puts us in the proper
13562 order; constructors are run from right to left, and the
13563 linker sorts in increasing order. */
13564 MAX_INIT_PRIORITY - priority);
13568 named_section_flags (section, SECTION_WRITE);
13569 assemble_align (POINTER_SIZE);
13571 if (TARGET_RELOCATABLE)
13573 fputs ("\t.long (", asm_out_file);
13574 output_addr_const (asm_out_file, symbol);
13575 fputs (")@fixup\n", asm_out_file);
13578 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13582 rs6000_elf_asm_out_destructor (symbol, priority)
13586 const char *section = ".dtors";
13589 if (priority != DEFAULT_INIT_PRIORITY)
13591 sprintf (buf, ".dtors.%.5u",
13592 /* Invert the numbering so the linker puts us in the proper
13593 order; constructors are run from right to left, and the
13594 linker sorts in increasing order. */
13595 MAX_INIT_PRIORITY - priority);
13599 named_section_flags (section, SECTION_WRITE);
13600 assemble_align (POINTER_SIZE);
13602 if (TARGET_RELOCATABLE)
13604 fputs ("\t.long (", asm_out_file);
13605 output_addr_const (asm_out_file, symbol);
13606 fputs (")@fixup\n", asm_out_file);
13609 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13615 rs6000_xcoff_asm_globalize_label (stream, name)
13619 fputs (GLOBAL_ASM_OP, stream);
13620 RS6000_OUTPUT_BASENAME (stream, name);
13621 putc ('\n', stream);
13625 rs6000_xcoff_asm_named_section (name, flags)
13627 unsigned int flags;
13630 static const char * const suffix[3] = { "PR", "RO", "RW" };
13632 if (flags & SECTION_CODE)
13634 else if (flags & SECTION_WRITE)
13639 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13640 (flags & SECTION_CODE) ? "." : "",
13641 name, suffix[smclass], flags & SECTION_ENTSIZE);
13645 rs6000_xcoff_select_section (decl, reloc, align)
13648 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13650 if (decl_readonly_section_1 (decl, reloc, 1))
13652 if (TREE_PUBLIC (decl))
13653 read_only_data_section ();
13655 read_only_private_data_section ();
13659 if (TREE_PUBLIC (decl))
13662 private_data_section ();
13667 rs6000_xcoff_unique_section (decl, reloc)
13669 int reloc ATTRIBUTE_UNUSED;
13673 /* Use select_section for private and uninitialized data. */
13674 if (!TREE_PUBLIC (decl)
13675 || DECL_COMMON (decl)
13676 || DECL_INITIAL (decl) == NULL_TREE
13677 || DECL_INITIAL (decl) == error_mark_node
13678 || (flag_zero_initialized_in_bss
13679 && initializer_zerop (DECL_INITIAL (decl))))
13682 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13683 name = (*targetm.strip_name_encoding) (name);
13684 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13687 /* Select section for constant in constant pool.
13689 On RS/6000, all constants are in the private read-only data area.
13690 However, if this is being placed in the TOC it must be output as a
13694 rs6000_xcoff_select_rtx_section (mode, x, align)
13695 enum machine_mode mode;
13697 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13699 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13702 read_only_private_data_section ();
13705 /* Remove any trailing [DS] or the like from the symbol name. */
13707 static const char *
13708 rs6000_xcoff_strip_name_encoding (name)
13714 len = strlen (name);
13715 if (name[len - 1] == ']')
13716 return ggc_alloc_string (name, len - 4);
13721 /* Section attributes. AIX is always PIC. */
13723 static unsigned int
13724 rs6000_xcoff_section_type_flags (decl, name, reloc)
13729 unsigned int align;
13730 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13732 /* Align to at least UNIT size. */
13733 if (flags & SECTION_CODE)
13734 align = MIN_UNITS_PER_WORD;
13736 /* Increase alignment of large objects if not already stricter. */
13737 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13738 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13739 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13741 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13743 #endif /* TARGET_XCOFF */
13746 /* Cross-module name binding. Darwin does not support overriding
13747 functions at dynamic-link time. */
13750 rs6000_binds_local_p (decl)
13753 return default_binds_local_p_1 (decl, 0);
13757 /* Compute a (partial) cost for rtx X. Return true if the complete
13758 cost has been computed, and false if subexpressions should be
13759 scanned. In either case, *TOTAL contains the cost result. */
13762 rs6000_rtx_costs (x, code, outer_code, total)
13764 int code, outer_code ATTRIBUTE_UNUSED;
13769 /* On the RS/6000, if it is valid in the insn, it is free.
13770 So this always returns 0. */
13781 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13782 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
13783 + 0x8000) >= 0x10000)
13784 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13785 ? COSTS_N_INSNS (2)
13786 : COSTS_N_INSNS (1));
13792 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13793 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
13794 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13795 ? COSTS_N_INSNS (2)
13796 : COSTS_N_INSNS (1));
13802 *total = COSTS_N_INSNS (2);
13805 switch (rs6000_cpu)
13807 case PROCESSOR_RIOS1:
13808 case PROCESSOR_PPC405:
13809 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13810 ? COSTS_N_INSNS (5)
13811 : (INTVAL (XEXP (x, 1)) >= -256
13812 && INTVAL (XEXP (x, 1)) <= 255)
13813 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13816 case PROCESSOR_PPC440:
13817 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13818 ? COSTS_N_INSNS (3)
13819 : COSTS_N_INSNS (2));
13822 case PROCESSOR_RS64A:
13823 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13824 ? GET_MODE (XEXP (x, 1)) != DImode
13825 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13826 : (INTVAL (XEXP (x, 1)) >= -256
13827 && INTVAL (XEXP (x, 1)) <= 255)
13828 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13831 case PROCESSOR_RIOS2:
13832 case PROCESSOR_MPCCORE:
13833 case PROCESSOR_PPC604e:
13834 *total = COSTS_N_INSNS (2);
13837 case PROCESSOR_PPC601:
13838 *total = COSTS_N_INSNS (5);
13841 case PROCESSOR_PPC603:
13842 case PROCESSOR_PPC7400:
13843 case PROCESSOR_PPC750:
13844 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13845 ? COSTS_N_INSNS (5)
13846 : (INTVAL (XEXP (x, 1)) >= -256
13847 && INTVAL (XEXP (x, 1)) <= 255)
13848 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13851 case PROCESSOR_PPC7450:
13852 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13853 ? COSTS_N_INSNS (4)
13854 : COSTS_N_INSNS (3));
13857 case PROCESSOR_PPC403:
13858 case PROCESSOR_PPC604:
13859 case PROCESSOR_PPC8540:
13860 *total = COSTS_N_INSNS (4);
13863 case PROCESSOR_PPC620:
13864 case PROCESSOR_PPC630:
13865 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13866 ? GET_MODE (XEXP (x, 1)) != DImode
13867 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13868 : (INTVAL (XEXP (x, 1)) >= -256
13869 && INTVAL (XEXP (x, 1)) <= 255)
13870 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13873 case PROCESSOR_POWER4:
13874 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13875 ? GET_MODE (XEXP (x, 1)) != DImode
13876 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
13877 : COSTS_N_INSNS (2));
13886 if (GET_CODE (XEXP (x, 1)) == CONST_INT
13887 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
13889 *total = COSTS_N_INSNS (2);
13896 switch (rs6000_cpu)
13898 case PROCESSOR_RIOS1:
13899 *total = COSTS_N_INSNS (19);
13902 case PROCESSOR_RIOS2:
13903 *total = COSTS_N_INSNS (13);
13906 case PROCESSOR_RS64A:
13907 *total = (GET_MODE (XEXP (x, 1)) != DImode
13908 ? COSTS_N_INSNS (65)
13909 : COSTS_N_INSNS (67));
13912 case PROCESSOR_MPCCORE:
13913 *total = COSTS_N_INSNS (6);
13916 case PROCESSOR_PPC403:
13917 *total = COSTS_N_INSNS (33);
13920 case PROCESSOR_PPC405:
13921 *total = COSTS_N_INSNS (35);
13924 case PROCESSOR_PPC440:
13925 *total = COSTS_N_INSNS (34);
13928 case PROCESSOR_PPC601:
13929 *total = COSTS_N_INSNS (36);
13932 case PROCESSOR_PPC603:
13933 *total = COSTS_N_INSNS (37);
13936 case PROCESSOR_PPC604:
13937 case PROCESSOR_PPC604e:
13938 *total = COSTS_N_INSNS (20);
13941 case PROCESSOR_PPC620:
13942 case PROCESSOR_PPC630:
13943 *total = (GET_MODE (XEXP (x, 1)) != DImode
13944 ? COSTS_N_INSNS (21)
13945 : COSTS_N_INSNS (37));
13948 case PROCESSOR_PPC750:
13949 case PROCESSOR_PPC8540:
13950 case PROCESSOR_PPC7400:
13951 *total = COSTS_N_INSNS (19);
13954 case PROCESSOR_PPC7450:
13955 *total = COSTS_N_INSNS (23);
13958 case PROCESSOR_POWER4:
13959 *total = (GET_MODE (XEXP (x, 1)) != DImode
13960 ? COSTS_N_INSNS (18)
13961 : COSTS_N_INSNS (34));
13969 *total = COSTS_N_INSNS (4);
13973 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13982 /* A C expression returning the cost of moving data from a register of class
13983 CLASS1 to one of CLASS2. */
13986 rs6000_register_move_cost (mode, from, to)
13987 enum machine_mode mode;
13988 enum reg_class from, to;
13990 /* Moves from/to GENERAL_REGS. */
13991 if (reg_classes_intersect_p (to, GENERAL_REGS)
13992 || reg_classes_intersect_p (from, GENERAL_REGS))
13994 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13997 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13998 return (rs6000_memory_move_cost (mode, from, 0)
13999 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14001 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14002 else if (from == CR_REGS)
14006 /* A move will cost one instruction per GPR moved. */
14007 return 2 * HARD_REGNO_NREGS (0, mode);
14010 /* Moving between two similar registers is just one instruction. */
14011 else if (reg_classes_intersect_p (to, from))
14012 return mode == TFmode ? 4 : 2;
14014 /* Everything else has to go through GENERAL_REGS. */
14016 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14017 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14020 /* A C expressions returning the cost of moving data of MODE from a register to
14024 rs6000_memory_move_cost (mode, class, in)
14025 enum machine_mode mode;
14026 enum reg_class class;
14027 int in ATTRIBUTE_UNUSED;
14029 if (reg_classes_intersect_p (class, GENERAL_REGS))
14030 return 4 * HARD_REGNO_NREGS (0, mode);
14031 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14032 return 4 * HARD_REGNO_NREGS (32, mode);
14033 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14034 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14036 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14039 /* Return true if TYPE is of type __ev64_opaque__. */
14042 is_ev64_opaque_type (type)
14046 && (type == opaque_V2SI_type_node
14047 || type == opaque_V2SF_type_node
14048 || type == opaque_p_V2SI_type_node
14049 || (TREE_CODE (type) == VECTOR_TYPE
14050 && TYPE_NAME (type)
14051 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14052 && DECL_NAME (TYPE_NAME (type))
14053 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
14054 "__ev64_opaque__") == 0)));
14058 rs6000_dwarf_register_span (reg)
14063 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14066 regno = REGNO (reg);
14068 /* The duality of the SPE register size wreaks all kinds of havoc.
14069 This is a way of distinguishing r0 in 32-bits from r0 in
14072 gen_rtx_PARALLEL (VOIDmode,
14075 gen_rtx_REG (SImode, regno + 1200),
14076 gen_rtx_REG (SImode, regno))
14078 gen_rtx_REG (SImode, regno),
14079 gen_rtx_REG (SImode, regno + 1200)));
14082 #include "gt-rs6000.h"