1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
63 && easy_vector_same (x, y))
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
70 enum processor_type rs6000_cpu;
71 struct rs6000_cpu_select rs6000_select[3] =
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
79 /* Size of long double */
80 const char *rs6000_long_double_size_string;
81 int rs6000_long_double_type_size;
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi;
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave;
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string;
92 /* Nonzero if we want SPE ABI extensions. */
95 /* Whether isel instructions should be generated. */
98 /* Whether SPE simd instructions should be generated. */
101 /* Nonzero if floating point operations are done in the GPRs. */
102 int rs6000_float_gprs = 0;
104 /* String from -mfloat-gprs=. */
105 const char *rs6000_float_gprs_string;
107 /* String from -misel=. */
108 const char *rs6000_isel_string;
110 /* String from -mspe=. */
111 const char *rs6000_spe_string;
113 /* Set to nonzero once AIX common-mode calls have been defined. */
114 static GTY(()) int common_mode_defined;
116 /* Save information from a "cmpxx" operation until the branch or scc is
118 rtx rs6000_compare_op0, rs6000_compare_op1;
119 int rs6000_compare_fp_p;
121 /* Label number of label created for -mrelocatable, to call to so we can
122 get the address of the GOT section */
123 int rs6000_pic_labelno;
126 /* Which abi to adhere to */
127 const char *rs6000_abi_name = RS6000_ABI_NAME;
129 /* Semantics of the small data area */
130 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
132 /* Which small data model to use */
133 const char *rs6000_sdata_name = (char *)0;
135 /* Counter for labels which are to be placed in .fixup. */
136 int fixuplabelno = 0;
139 /* ABI enumeration available for subtarget to use. */
140 enum rs6000_abi rs6000_current_abi;
142 /* ABI string from -mabi= option. */
143 const char *rs6000_abi_string;
146 const char *rs6000_debug_name;
147 int rs6000_debug_stack; /* debug stack applications */
148 int rs6000_debug_arg; /* debug argument handling */
150 /* A copy of V2SI_type_node to be used as an opaque type. */
151 static GTY(()) tree opaque_V2SI_type_node;
153 /* Same, but for V2SF. */
154 static GTY(()) tree opaque_V2SF_type_node;
156 const char *rs6000_traceback_name;
158 traceback_default = 0,
164 /* Flag to say the TOC is initialized */
166 char toc_label_name[10];
168 /* Alias set for saves and restores from the rs6000 stack. */
169 static int rs6000_sr_alias_set;
171 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
172 The only place that looks at this is rs6000_set_default_type_attributes;
173 everywhere else should rely on the presence or absence of a longcall
174 attribute on the function declaration. */
175 int rs6000_default_long_calls;
176 const char *rs6000_longcall_switch;
178 struct builtin_description
180 /* mask is not const because we're going to alter it below. This
181 nonsense will go away when we rewrite the -march infrastructure
182 to give us more target flag bits. */
184 const enum insn_code icode;
185 const char *const name;
186 const enum rs6000_builtins code;
189 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
190 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
191 static void validate_condition_mode
192 PARAMS ((enum rtx_code, enum machine_mode));
193 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
194 static void rs6000_maybe_dead PARAMS ((rtx));
195 static void rs6000_emit_stack_tie PARAMS ((void));
196 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
197 static rtx spe_synthesize_frame_save PARAMS ((rtx));
198 static bool spe_func_has_64bit_regs_p PARAMS ((void));
199 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
200 unsigned int, int, int));
201 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
202 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
203 static unsigned rs6000_hash_constant PARAMS ((rtx));
204 static unsigned toc_hash_function PARAMS ((const void *));
205 static int toc_hash_eq PARAMS ((const void *, const void *));
206 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
207 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
208 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
209 #ifdef HAVE_GAS_HIDDEN
210 static void rs6000_assemble_visibility PARAMS ((tree, int));
212 static int rs6000_ra_ever_killed PARAMS ((void));
213 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
214 const struct attribute_spec rs6000_attribute_table[];
215 static void rs6000_set_default_type_attributes PARAMS ((tree));
216 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
217 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
218 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
219 HOST_WIDE_INT, tree));
220 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
221 HOST_WIDE_INT, HOST_WIDE_INT));
223 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
225 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
226 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
227 static void rs6000_elf_select_section PARAMS ((tree, int,
228 unsigned HOST_WIDE_INT));
229 static void rs6000_elf_unique_section PARAMS ((tree, int));
230 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
231 unsigned HOST_WIDE_INT));
232 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
234 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
235 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
238 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
239 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
240 static void rs6000_xcoff_select_section PARAMS ((tree, int,
241 unsigned HOST_WIDE_INT));
242 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
243 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
244 unsigned HOST_WIDE_INT));
245 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
246 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
247 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
251 static bool rs6000_binds_local_p PARAMS ((tree));
253 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
254 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
255 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
256 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
257 static int rs6000_adjust_priority PARAMS ((rtx, int));
258 static int rs6000_issue_rate PARAMS ((void));
259 static int rs6000_use_sched_lookahead PARAMS ((void));
261 static void rs6000_init_builtins PARAMS ((void));
262 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
263 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
264 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
265 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
266 static void altivec_init_builtins PARAMS ((void));
267 static void rs6000_common_init_builtins PARAMS ((void));
269 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
270 int, enum rs6000_builtins,
271 enum rs6000_builtins));
272 static void spe_init_builtins PARAMS ((void));
273 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
274 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
275 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
276 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
278 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
279 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
280 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
281 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
282 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
283 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
284 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
285 static void rs6000_parse_abi_options PARAMS ((void));
286 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
287 static int first_altivec_reg_to_save PARAMS ((void));
288 static unsigned int compute_vrsave_mask PARAMS ((void));
289 static void is_altivec_return_reg PARAMS ((rtx, void *));
290 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
291 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
292 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
293 static bool is_ev64_opaque_type PARAMS ((tree));
294 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
296 /* Hash table stuff for keeping track of TOC entries. */
298 struct toc_hash_struct GTY(())
300 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
301 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
303 enum machine_mode key_mode;
307 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
309 /* Default register names. */
310 char rs6000_reg_names[][8] =
312 "0", "1", "2", "3", "4", "5", "6", "7",
313 "8", "9", "10", "11", "12", "13", "14", "15",
314 "16", "17", "18", "19", "20", "21", "22", "23",
315 "24", "25", "26", "27", "28", "29", "30", "31",
316 "0", "1", "2", "3", "4", "5", "6", "7",
317 "8", "9", "10", "11", "12", "13", "14", "15",
318 "16", "17", "18", "19", "20", "21", "22", "23",
319 "24", "25", "26", "27", "28", "29", "30", "31",
320 "mq", "lr", "ctr","ap",
321 "0", "1", "2", "3", "4", "5", "6", "7",
323 /* AltiVec registers. */
324 "0", "1", "2", "3", "4", "5", "6", "7",
325 "8", "9", "10", "11", "12", "13", "14", "15",
326 "16", "17", "18", "19", "20", "21", "22", "23",
327 "24", "25", "26", "27", "28", "29", "30", "31",
333 #ifdef TARGET_REGNAMES
334 static const char alt_reg_names[][8] =
336 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
337 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
338 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
339 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
340 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
341 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
342 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
343 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
344 "mq", "lr", "ctr", "ap",
345 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
347 /* AltiVec registers. */
348 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
349 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
350 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
351 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
358 #ifndef MASK_STRICT_ALIGN
359 #define MASK_STRICT_ALIGN 0
361 #ifndef TARGET_PROFILE_KERNEL
362 #define TARGET_PROFILE_KERNEL 0
365 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
366 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
368 /* Initialize the GCC target structure. */
369 #undef TARGET_ATTRIBUTE_TABLE
370 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
371 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
372 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
374 #undef TARGET_ASM_ALIGNED_DI_OP
375 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
377 /* Default unaligned ops are only provided for ELF. Find the ops needed
378 for non-ELF systems. */
379 #ifndef OBJECT_FORMAT_ELF
381 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
383 #undef TARGET_ASM_UNALIGNED_HI_OP
384 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
385 #undef TARGET_ASM_UNALIGNED_SI_OP
386 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
387 #undef TARGET_ASM_UNALIGNED_DI_OP
388 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
391 #undef TARGET_ASM_UNALIGNED_HI_OP
392 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
393 #undef TARGET_ASM_UNALIGNED_SI_OP
394 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
398 /* This hook deals with fixups for relocatable code and DI-mode objects
400 #undef TARGET_ASM_INTEGER
401 #define TARGET_ASM_INTEGER rs6000_assemble_integer
403 #ifdef HAVE_GAS_HIDDEN
404 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
405 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
408 #undef TARGET_ASM_FUNCTION_PROLOGUE
409 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
410 #undef TARGET_ASM_FUNCTION_EPILOGUE
411 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
413 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
414 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
415 #undef TARGET_SCHED_VARIABLE_ISSUE
416 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
418 #undef TARGET_SCHED_ISSUE_RATE
419 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
420 #undef TARGET_SCHED_ADJUST_COST
421 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
422 #undef TARGET_SCHED_ADJUST_PRIORITY
423 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
425 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
426 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
428 #undef TARGET_INIT_BUILTINS
429 #define TARGET_INIT_BUILTINS rs6000_init_builtins
431 #undef TARGET_EXPAND_BUILTIN
432 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
435 #undef TARGET_BINDS_LOCAL_P
436 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
439 #undef TARGET_ASM_OUTPUT_MI_THUNK
440 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
442 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
443 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
445 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
446 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
448 #undef TARGET_RTX_COSTS
449 #define TARGET_RTX_COSTS rs6000_rtx_costs
450 #undef TARGET_ADDRESS_COST
451 #define TARGET_ADDRESS_COST hook_int_rtx_0
453 #undef TARGET_VECTOR_OPAQUE_P
454 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
456 #undef TARGET_DWARF_REGISTER_SPAN
457 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
459 struct gcc_target targetm = TARGET_INITIALIZER;
461 /* Override command line options. Mostly we process the processor
462 type and sometimes adjust other TARGET_ options. */
465 rs6000_override_options (default_cpu)
466 const char *default_cpu;
469 struct rs6000_cpu_select *ptr;
471 /* Simplify the entries below by making a mask for any POWER
472 variant and any PowerPC variant. */
474 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
475 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
476 | MASK_PPC_GFXOPT | MASK_POWERPC64)
477 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
481 const char *const name; /* Canonical processor name. */
482 const enum processor_type processor; /* Processor type enum value. */
483 const int target_enable; /* Target flags to enable. */
484 const int target_disable; /* Target flags to disable. */
485 } const processor_target_table[]
486 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
487 POWER_MASKS | POWERPC_MASKS},
488 {"power", PROCESSOR_POWER,
489 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
490 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
491 {"power2", PROCESSOR_POWER,
492 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
493 POWERPC_MASKS | MASK_NEW_MNEMONICS},
494 {"power3", PROCESSOR_PPC630,
495 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
497 {"power4", PROCESSOR_POWER4,
498 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
500 {"powerpc", PROCESSOR_POWERPC,
501 MASK_POWERPC | MASK_NEW_MNEMONICS,
502 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
503 {"powerpc64", PROCESSOR_POWERPC64,
504 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
505 POWER_MASKS | POWERPC_OPT_MASKS},
506 {"rios", PROCESSOR_RIOS1,
507 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
508 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
509 {"rios1", PROCESSOR_RIOS1,
510 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
511 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
512 {"rsc", PROCESSOR_PPC601,
513 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
514 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
515 {"rsc1", PROCESSOR_PPC601,
516 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
517 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
518 {"rios2", PROCESSOR_RIOS2,
519 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
520 POWERPC_MASKS | MASK_NEW_MNEMONICS},
521 {"rs64a", PROCESSOR_RS64A,
522 MASK_POWERPC | MASK_NEW_MNEMONICS,
523 POWER_MASKS | POWERPC_OPT_MASKS},
524 {"401", PROCESSOR_PPC403,
525 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
526 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
527 {"403", PROCESSOR_PPC403,
528 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
529 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
530 {"405", PROCESSOR_PPC405,
531 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
532 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
533 {"405f", PROCESSOR_PPC405,
534 MASK_POWERPC | MASK_NEW_MNEMONICS,
535 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
536 {"505", PROCESSOR_MPCCORE,
537 MASK_POWERPC | MASK_NEW_MNEMONICS,
538 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
539 {"601", PROCESSOR_PPC601,
540 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
541 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
542 {"602", PROCESSOR_PPC603,
543 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
544 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
545 {"603", PROCESSOR_PPC603,
546 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
547 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
548 {"603e", PROCESSOR_PPC603,
549 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
550 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
551 {"ec603e", PROCESSOR_PPC603,
552 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
553 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
554 {"604", PROCESSOR_PPC604,
555 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
556 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
557 {"604e", PROCESSOR_PPC604e,
558 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
559 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
560 {"620", PROCESSOR_PPC620,
561 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
563 {"630", PROCESSOR_PPC630,
564 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
566 {"740", PROCESSOR_PPC750,
567 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
568 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
569 {"750", PROCESSOR_PPC750,
570 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
571 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
572 {"7400", PROCESSOR_PPC7400,
573 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
574 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
575 {"7450", PROCESSOR_PPC7450,
576 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
577 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
578 {"8540", PROCESSOR_PPC8540,
579 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
580 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
581 {"801", PROCESSOR_MPCCORE,
582 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
583 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
584 {"821", PROCESSOR_MPCCORE,
585 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
586 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
587 {"823", PROCESSOR_MPCCORE,
588 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
589 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
590 {"860", PROCESSOR_MPCCORE,
591 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
592 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
594 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
596 /* Save current -mmultiple/-mno-multiple status. */
597 int multiple = TARGET_MULTIPLE;
598 /* Save current -mstring/-mno-string status. */
599 int string = TARGET_STRING;
601 /* Identify the processor type. */
602 rs6000_select[0].string = default_cpu;
603 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
605 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
607 ptr = &rs6000_select[i];
608 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
610 for (j = 0; j < ptt_size; j++)
611 if (! strcmp (ptr->string, processor_target_table[j].name))
614 rs6000_cpu = processor_target_table[j].processor;
618 target_flags |= processor_target_table[j].target_enable;
619 target_flags &= ~processor_target_table[j].target_disable;
625 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
632 /* If we are optimizing big endian systems for space, use the load/store
633 multiple and string instructions. */
634 if (BYTES_BIG_ENDIAN && optimize_size)
635 target_flags |= MASK_MULTIPLE | MASK_STRING;
637 /* If -mmultiple or -mno-multiple was explicitly used, don't
638 override with the processor default */
639 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
640 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
642 /* If -mstring or -mno-string was explicitly used, don't override
643 with the processor default. */
644 if ((target_flags_explicit & MASK_STRING) != 0)
645 target_flags = (target_flags & ~MASK_STRING) | string;
647 /* Don't allow -mmultiple or -mstring on little endian systems
648 unless the cpu is a 750, because the hardware doesn't support the
649 instructions used in little endian mode, and causes an alignment
650 trap. The 750 does not cause an alignment trap (except when the
651 target is unaligned). */
653 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
657 target_flags &= ~MASK_MULTIPLE;
658 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
659 warning ("-mmultiple is not supported on little endian systems");
664 target_flags &= ~MASK_STRING;
665 if ((target_flags_explicit & MASK_STRING) != 0)
666 warning ("-mstring is not supported on little endian systems");
670 /* Set debug flags */
671 if (rs6000_debug_name)
673 if (! strcmp (rs6000_debug_name, "all"))
674 rs6000_debug_stack = rs6000_debug_arg = 1;
675 else if (! strcmp (rs6000_debug_name, "stack"))
676 rs6000_debug_stack = 1;
677 else if (! strcmp (rs6000_debug_name, "arg"))
678 rs6000_debug_arg = 1;
680 error ("unknown -mdebug-%s switch", rs6000_debug_name);
683 if (rs6000_traceback_name)
685 if (! strncmp (rs6000_traceback_name, "full", 4))
686 rs6000_traceback = traceback_full;
687 else if (! strncmp (rs6000_traceback_name, "part", 4))
688 rs6000_traceback = traceback_part;
689 else if (! strncmp (rs6000_traceback_name, "no", 2))
690 rs6000_traceback = traceback_none;
692 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
693 rs6000_traceback_name);
696 /* Set size of long double */
697 rs6000_long_double_type_size = 64;
698 if (rs6000_long_double_size_string)
701 int size = strtol (rs6000_long_double_size_string, &tail, 10);
702 if (*tail != '\0' || (size != 64 && size != 128))
703 error ("Unknown switch -mlong-double-%s",
704 rs6000_long_double_size_string);
706 rs6000_long_double_type_size = size;
709 /* Handle -mabi= options. */
710 rs6000_parse_abi_options ();
712 /* Handle generic -mFOO=YES/NO options. */
713 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
714 &rs6000_altivec_vrsave);
715 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
717 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
718 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
721 #ifdef SUBTARGET_OVERRIDE_OPTIONS
722 SUBTARGET_OVERRIDE_OPTIONS;
724 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
725 SUBSUBTARGET_OVERRIDE_OPTIONS;
730 /* The e500 does not have string instructions, and we set
731 MASK_STRING above when optimizing for size. */
732 if ((target_flags & MASK_STRING) != 0)
733 target_flags = target_flags & ~MASK_STRING;
735 else if (rs6000_select[1].string != NULL)
737 /* For the powerpc-eabispe configuration, we set all these by
738 default, so let's unset them if we manually set another
739 CPU that is not the E500. */
740 if (rs6000_abi_string == 0)
742 if (rs6000_spe_string == 0)
744 if (rs6000_float_gprs_string == 0)
745 rs6000_float_gprs = 0;
746 if (rs6000_isel_string == 0)
750 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
751 using TARGET_OPTIONS to handle a toggle switch, but we're out of
752 bits in target_flags so TARGET_SWITCHES cannot be used.
753 Assumption here is that rs6000_longcall_switch points into the
754 text of the complete option, rather than being a copy, so we can
755 scan back for the presence or absence of the no- modifier. */
756 if (rs6000_longcall_switch)
758 const char *base = rs6000_longcall_switch;
759 while (base[-1] != 'm') base--;
761 if (*rs6000_longcall_switch != '\0')
762 error ("invalid option `%s'", base);
763 rs6000_default_long_calls = (base[0] != 'n');
766 #ifdef TARGET_REGNAMES
767 /* If the user desires alternate register names, copy in the
768 alternate names now. */
770 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
773 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
774 If -maix-struct-return or -msvr4-struct-return was explicitly
775 used, don't override with the ABI default. */
776 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
778 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
779 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
781 target_flags |= MASK_AIX_STRUCT_RET;
784 if (TARGET_LONG_DOUBLE_128
785 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
786 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
788 /* Allocate an alias set for register saves & restores from stack. */
789 rs6000_sr_alias_set = new_alias_set ();
792 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
794 /* We can only guarantee the availability of DI pseudo-ops when
795 assembling for 64-bit targets. */
798 targetm.asm_out.aligned_op.di = NULL;
799 targetm.asm_out.unaligned_op.di = NULL;
802 /* Set maximum branch target alignment at two instructions, eight bytes. */
803 align_jumps_max_skip = 8;
804 align_loops_max_skip = 8;
806 /* Arrange to save and restore machine status around nested functions. */
807 init_machine_status = rs6000_init_machine_status;
810 /* Handle generic options of the form -mfoo=yes/no.
811 NAME is the option name.
812 VALUE is the option value.
813 FLAG is the pointer to the flag where to store a 1 or 0, depending on
814 whether the option value is 'yes' or 'no' respectively. */
816 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
820 else if (!strcmp (value, "yes"))
822 else if (!strcmp (value, "no"))
825 error ("unknown -m%s= option specified: '%s'", name, value);
828 /* Handle -mabi= options. */
830 rs6000_parse_abi_options ()
832 if (rs6000_abi_string == 0)
834 else if (! strcmp (rs6000_abi_string, "altivec"))
835 rs6000_altivec_abi = 1;
836 else if (! strcmp (rs6000_abi_string, "no-altivec"))
837 rs6000_altivec_abi = 0;
838 else if (! strcmp (rs6000_abi_string, "spe"))
842 error ("not configured for ABI: '%s'", rs6000_abi_string);
845 else if (! strcmp (rs6000_abi_string, "no-spe"))
848 error ("unknown ABI specified: '%s'", rs6000_abi_string);
852 optimization_options (level, size)
853 int level ATTRIBUTE_UNUSED;
854 int size ATTRIBUTE_UNUSED;
858 /* Do anything needed at the start of the asm file. */
861 rs6000_file_start (file, default_cpu)
863 const char *default_cpu;
867 const char *start = buffer;
868 struct rs6000_cpu_select *ptr;
870 if (flag_verbose_asm)
872 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
873 rs6000_select[0].string = default_cpu;
875 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
877 ptr = &rs6000_select[i];
878 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
880 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
886 switch (rs6000_sdata)
888 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
889 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
890 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
891 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
894 if (rs6000_sdata && g_switch_value)
896 fprintf (file, "%s -G %d", start, g_switch_value);
906 /* Return nonzero if this function is known to have a null epilogue. */
911 if (reload_completed)
913 rs6000_stack_t *info = rs6000_stack_info ();
915 if (info->first_gp_reg_save == 32
916 && info->first_fp_reg_save == 64
917 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
920 && info->vrsave_mask == 0
928 /* Returns 1 always. */
931 any_operand (op, mode)
932 rtx op ATTRIBUTE_UNUSED;
933 enum machine_mode mode ATTRIBUTE_UNUSED;
938 /* Returns 1 if op is the count register. */
940 count_register_operand (op, mode)
942 enum machine_mode mode ATTRIBUTE_UNUSED;
944 if (GET_CODE (op) != REG)
947 if (REGNO (op) == COUNT_REGISTER_REGNUM)
950 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
956 /* Returns 1 if op is an altivec register. */
958 altivec_register_operand (op, mode)
960 enum machine_mode mode ATTRIBUTE_UNUSED;
963 return (register_operand (op, mode)
964 && (GET_CODE (op) != REG
965 || REGNO (op) > FIRST_PSEUDO_REGISTER
966 || ALTIVEC_REGNO_P (REGNO (op))));
970 xer_operand (op, mode)
972 enum machine_mode mode ATTRIBUTE_UNUSED;
974 if (GET_CODE (op) != REG)
977 if (XER_REGNO_P (REGNO (op)))
983 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
984 by such constants completes more quickly. */
987 s8bit_cint_operand (op, mode)
989 enum machine_mode mode ATTRIBUTE_UNUSED;
991 return ( GET_CODE (op) == CONST_INT
992 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
995 /* Return 1 if OP is a constant that can fit in a D field. */
998 short_cint_operand (op, mode)
1000 enum machine_mode mode ATTRIBUTE_UNUSED;
1002 return (GET_CODE (op) == CONST_INT
1003 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1006 /* Similar for an unsigned D field. */
1009 u_short_cint_operand (op, mode)
1011 enum machine_mode mode ATTRIBUTE_UNUSED;
1013 return (GET_CODE (op) == CONST_INT
1014 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1017 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1020 non_short_cint_operand (op, mode)
1022 enum machine_mode mode ATTRIBUTE_UNUSED;
1024 return (GET_CODE (op) == CONST_INT
1025 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1028 /* Returns 1 if OP is a CONST_INT that is a positive value
1029 and an exact power of 2. */
1032 exact_log2_cint_operand (op, mode)
1034 enum machine_mode mode ATTRIBUTE_UNUSED;
1036 return (GET_CODE (op) == CONST_INT
1038 && exact_log2 (INTVAL (op)) >= 0);
1041 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1045 gpc_reg_operand (op, mode)
1047 enum machine_mode mode;
1049 return (register_operand (op, mode)
1050 && (GET_CODE (op) != REG
1051 || (REGNO (op) >= ARG_POINTER_REGNUM
1052 && !XER_REGNO_P (REGNO (op)))
1053 || REGNO (op) < MQ_REGNO));
1056 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1060 cc_reg_operand (op, mode)
1062 enum machine_mode mode;
1064 return (register_operand (op, mode)
1065 && (GET_CODE (op) != REG
1066 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1067 || CR_REGNO_P (REGNO (op))));
1070 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1071 CR field that isn't CR0. */
1074 cc_reg_not_cr0_operand (op, mode)
1076 enum machine_mode mode;
1078 return (register_operand (op, mode)
1079 && (GET_CODE (op) != REG
1080 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1081 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1084 /* Returns 1 if OP is either a constant integer valid for a D-field or
1085 a non-special register. If a register, it must be in the proper
1086 mode unless MODE is VOIDmode. */
1089 reg_or_short_operand (op, mode)
1091 enum machine_mode mode;
1093 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1096 /* Similar, except check if the negation of the constant would be
1097 valid for a D-field. */
1100 reg_or_neg_short_operand (op, mode)
1102 enum machine_mode mode;
1104 if (GET_CODE (op) == CONST_INT)
1105 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1107 return gpc_reg_operand (op, mode);
1110 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1111 a non-special register. If a register, it must be in the proper
1112 mode unless MODE is VOIDmode. */
1115 reg_or_aligned_short_operand (op, mode)
1117 enum machine_mode mode;
1119 if (gpc_reg_operand (op, mode))
1121 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1128 /* Return 1 if the operand is either a register or an integer whose
1129 high-order 16 bits are zero. */
1132 reg_or_u_short_operand (op, mode)
1134 enum machine_mode mode;
1136 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1139 /* Return 1 is the operand is either a non-special register or ANY
1140 constant integer. */
1143 reg_or_cint_operand (op, mode)
1145 enum machine_mode mode;
1147 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1150 /* Return 1 is the operand is either a non-special register or ANY
1151 32-bit signed constant integer. */
1154 reg_or_arith_cint_operand (op, mode)
1156 enum machine_mode mode;
1158 return (gpc_reg_operand (op, mode)
1159 || (GET_CODE (op) == CONST_INT
1160 #if HOST_BITS_PER_WIDE_INT != 32
1161 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1162 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1167 /* Return 1 is the operand is either a non-special register or a 32-bit
1168 signed constant integer valid for 64-bit addition. */
1171 reg_or_add_cint64_operand (op, mode)
1173 enum machine_mode mode;
1175 return (gpc_reg_operand (op, mode)
1176 || (GET_CODE (op) == CONST_INT
1177 #if HOST_BITS_PER_WIDE_INT == 32
1178 && INTVAL (op) < 0x7fff8000
1180 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1186 /* Return 1 is the operand is either a non-special register or a 32-bit
1187 signed constant integer valid for 64-bit subtraction. */
1190 reg_or_sub_cint64_operand (op, mode)
1192 enum machine_mode mode;
1194 return (gpc_reg_operand (op, mode)
1195 || (GET_CODE (op) == CONST_INT
1196 #if HOST_BITS_PER_WIDE_INT == 32
1197 && (- INTVAL (op)) < 0x7fff8000
1199 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1205 /* Return 1 is the operand is either a non-special register or ANY
1206 32-bit unsigned constant integer. */
1209 reg_or_logical_cint_operand (op, mode)
1211 enum machine_mode mode;
1213 if (GET_CODE (op) == CONST_INT)
1215 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1217 if (GET_MODE_BITSIZE (mode) <= 32)
1220 if (INTVAL (op) < 0)
1224 return ((INTVAL (op) & GET_MODE_MASK (mode)
1225 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1227 else if (GET_CODE (op) == CONST_DOUBLE)
1229 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1233 return CONST_DOUBLE_HIGH (op) == 0;
1236 return gpc_reg_operand (op, mode);
1239 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1242 got_operand (op, mode)
1244 enum machine_mode mode ATTRIBUTE_UNUSED;
1246 return (GET_CODE (op) == SYMBOL_REF
1247 || GET_CODE (op) == CONST
1248 || GET_CODE (op) == LABEL_REF);
1251 /* Return 1 if the operand is a simple references that can be loaded via
1252 the GOT (labels involving addition aren't allowed). */
1255 got_no_const_operand (op, mode)
1257 enum machine_mode mode ATTRIBUTE_UNUSED;
1259 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1262 /* Return the number of instructions it takes to form a constant in an
1263 integer register. */
1266 num_insns_constant_wide (value)
1267 HOST_WIDE_INT value;
1269 /* signed constant loadable with {cal|addi} */
1270 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1273 /* constant loadable with {cau|addis} */
1274 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1277 #if HOST_BITS_PER_WIDE_INT == 64
1278 else if (TARGET_POWERPC64)
1280 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1281 HOST_WIDE_INT high = value >> 31;
1283 if (high == 0 || high == -1)
1289 return num_insns_constant_wide (high) + 1;
1291 return (num_insns_constant_wide (high)
1292 + num_insns_constant_wide (low) + 1);
1301 num_insns_constant (op, mode)
1303 enum machine_mode mode;
1305 if (GET_CODE (op) == CONST_INT)
1307 #if HOST_BITS_PER_WIDE_INT == 64
1308 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1309 && mask64_operand (op, mode))
1313 return num_insns_constant_wide (INTVAL (op));
1316 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1321 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1322 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1323 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1326 else if (GET_CODE (op) == CONST_DOUBLE)
1332 int endian = (WORDS_BIG_ENDIAN == 0);
1334 if (mode == VOIDmode || mode == DImode)
1336 high = CONST_DOUBLE_HIGH (op);
1337 low = CONST_DOUBLE_LOW (op);
1341 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1342 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1344 low = l[1 - endian];
1348 return (num_insns_constant_wide (low)
1349 + num_insns_constant_wide (high));
1353 if (high == 0 && low >= 0)
1354 return num_insns_constant_wide (low);
1356 else if (high == -1 && low < 0)
1357 return num_insns_constant_wide (low);
1359 else if (mask64_operand (op, mode))
1363 return num_insns_constant_wide (high) + 1;
1366 return (num_insns_constant_wide (high)
1367 + num_insns_constant_wide (low) + 1);
1375 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1376 register with one instruction per word. We only do this if we can
1377 safely read CONST_DOUBLE_{LOW,HIGH}. */
1380 easy_fp_constant (op, mode)
1382 enum machine_mode mode;
1384 if (GET_CODE (op) != CONST_DOUBLE
1385 || GET_MODE (op) != mode
1386 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1389 /* Consider all constants with -msoft-float to be easy. */
1390 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1394 /* If we are using V.4 style PIC, consider all constants to be hard. */
1395 if (flag_pic && DEFAULT_ABI == ABI_V4)
1398 #ifdef TARGET_RELOCATABLE
1399 /* Similarly if we are using -mrelocatable, consider all constants
1401 if (TARGET_RELOCATABLE)
1410 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1411 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1413 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1414 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1415 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1416 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1419 else if (mode == DFmode)
1424 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1425 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1427 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1428 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1431 else if (mode == SFmode)
1436 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1437 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1439 return num_insns_constant_wide (l) == 1;
1442 else if (mode == DImode)
1443 return ((TARGET_POWERPC64
1444 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1445 || (num_insns_constant (op, DImode) <= 2));
1447 else if (mode == SImode)
1453 /* Return non zero if all elements of a vector have the same value. */
1456 easy_vector_same (op, mode)
1458 enum machine_mode mode ATTRIBUTE_UNUSED;
1462 units = CONST_VECTOR_NUNITS (op);
1464 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1465 for (i = 1; i < units; ++i)
1466 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1473 /* Return 1 if the operand is a CONST_INT and can be put into a
1474 register without using memory. */
1477 easy_vector_constant (op, mode)
1479 enum machine_mode mode;
1483 if (GET_CODE (op) != CONST_VECTOR
1488 if (zero_constant (op, mode)
1489 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1490 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1493 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1496 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1497 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1499 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1501 evmergelo r0, r0, r0
1504 I don't know how efficient it would be to allow bigger constants,
1505 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1506 instructions is better than a 64-bit memory load, but I don't
1507 have the e500 timing specs. */
1508 if (TARGET_SPE && mode == V2SImode
1509 && cst >= -0x7fff && cst <= 0x7fff
1510 && cst2 >= -0x7fff && cst <= 0x7fff)
1513 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1516 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1522 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1525 easy_vector_constant_add_self (op, mode)
1527 enum machine_mode mode;
1531 if (!easy_vector_constant (op, mode))
1534 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1536 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1540 output_vec_const_move (operands)
1544 enum machine_mode mode;
1550 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1551 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1552 mode = GET_MODE (dest);
1556 if (zero_constant (vec, mode))
1557 return "vxor %0,%0,%0";
1558 else if (EASY_VECTOR_15 (cst, vec, mode))
1560 operands[1] = GEN_INT (cst);
1564 return "vspltisw %0,%1";
1566 return "vspltish %0,%1";
1568 return "vspltisb %0,%1";
1573 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1581 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1582 pattern of V1DI, V4HI, and V2SF.
1584 FIXME: We should probabl return # and add post reload
1585 splitters for these, but this way is so easy ;-).
1587 operands[1] = GEN_INT (cst);
1588 operands[2] = GEN_INT (cst2);
1590 return "li %0,%1\n\tevmergelo %0,%0,%0";
1592 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1598 /* Return 1 if the operand is the constant 0. This works for scalars
1599 as well as vectors. */
1601 zero_constant (op, mode)
1603 enum machine_mode mode;
1605 return op == CONST0_RTX (mode);
1608 /* Return 1 if the operand is 0.0. */
1610 zero_fp_constant (op, mode)
1612 enum machine_mode mode;
1614 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1617 /* Return 1 if the operand is in volatile memory. Note that during
1618 the RTL generation phase, memory_operand does not return TRUE for
1619 volatile memory references. So this function allows us to
1620 recognize volatile references where its safe. */
1623 volatile_mem_operand (op, mode)
1625 enum machine_mode mode;
1627 if (GET_CODE (op) != MEM)
1630 if (!MEM_VOLATILE_P (op))
1633 if (mode != GET_MODE (op))
1636 if (reload_completed)
1637 return memory_operand (op, mode);
1639 if (reload_in_progress)
1640 return strict_memory_address_p (mode, XEXP (op, 0));
1642 return memory_address_p (mode, XEXP (op, 0));
1645 /* Return 1 if the operand is an offsettable memory operand. */
1648 offsettable_mem_operand (op, mode)
1650 enum machine_mode mode;
1652 return ((GET_CODE (op) == MEM)
1653 && offsettable_address_p (reload_completed || reload_in_progress,
1654 mode, XEXP (op, 0)));
1657 /* Return 1 if the operand is either an easy FP constant (see above) or
1661 mem_or_easy_const_operand (op, mode)
1663 enum machine_mode mode;
1665 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1668 /* Return 1 if the operand is either a non-special register or an item
1669 that can be used as the operand of a `mode' add insn. */
1672 add_operand (op, mode)
1674 enum machine_mode mode;
1676 if (GET_CODE (op) == CONST_INT)
1677 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1678 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1680 return gpc_reg_operand (op, mode);
1683 /* Return 1 if OP is a constant but not a valid add_operand. */
1686 non_add_cint_operand (op, mode)
1688 enum machine_mode mode ATTRIBUTE_UNUSED;
1690 return (GET_CODE (op) == CONST_INT
1691 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1692 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1695 /* Return 1 if the operand is a non-special register or a constant that
1696 can be used as the operand of an OR or XOR insn on the RS/6000. */
1699 logical_operand (op, mode)
1701 enum machine_mode mode;
1703 HOST_WIDE_INT opl, oph;
1705 if (gpc_reg_operand (op, mode))
1708 if (GET_CODE (op) == CONST_INT)
1710 opl = INTVAL (op) & GET_MODE_MASK (mode);
1712 #if HOST_BITS_PER_WIDE_INT <= 32
1713 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1717 else if (GET_CODE (op) == CONST_DOUBLE)
1719 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1722 opl = CONST_DOUBLE_LOW (op);
1723 oph = CONST_DOUBLE_HIGH (op);
1730 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1731 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1734 /* Return 1 if C is a constant that is not a logical operand (as
1735 above), but could be split into one. */
1738 non_logical_cint_operand (op, mode)
1740 enum machine_mode mode;
1742 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1743 && ! logical_operand (op, mode)
1744 && reg_or_logical_cint_operand (op, mode));
1747 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1748 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1749 Reject all ones and all zeros, since these should have been optimized
1750 away and confuse the making of MB and ME. */
1753 mask_operand (op, mode)
1755 enum machine_mode mode ATTRIBUTE_UNUSED;
1757 HOST_WIDE_INT c, lsb;
1759 if (GET_CODE (op) != CONST_INT)
1764 /* Fail in 64-bit mode if the mask wraps around because the upper
1765 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1766 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1769 /* We don't change the number of transitions by inverting,
1770 so make sure we start with the LS bit zero. */
1774 /* Reject all zeros or all ones. */
1778 /* Find the first transition. */
1781 /* Invert to look for a second transition. */
1784 /* Erase first transition. */
1787 /* Find the second transition (if any). */
1790 /* Match if all the bits above are 1's (or c is zero). */
1794 /* Return 1 for the PowerPC64 rlwinm corner case. */
1797 mask_operand_wrap (op, mode)
1799 enum machine_mode mode ATTRIBUTE_UNUSED;
1801 HOST_WIDE_INT c, lsb;
1803 if (GET_CODE (op) != CONST_INT)
1808 if ((c & 0x80000001) != 0x80000001)
1822 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1823 It is if there are no more than one 1->0 or 0->1 transitions.
1824 Reject all zeros, since zero should have been optimized away and
1825 confuses the making of MB and ME. */
1828 mask64_operand (op, mode)
1830 enum machine_mode mode ATTRIBUTE_UNUSED;
1832 if (GET_CODE (op) == CONST_INT)
1834 HOST_WIDE_INT c, lsb;
1838 /* Reject all zeros. */
1842 /* We don't change the number of transitions by inverting,
1843 so make sure we start with the LS bit zero. */
1847 /* Find the transition, and check that all bits above are 1's. */
1850 /* Match if all the bits above are 1's (or c is zero). */
1856 /* Like mask64_operand, but allow up to three transitions. This
1857 predicate is used by insn patterns that generate two rldicl or
1858 rldicr machine insns. */
1861 mask64_2_operand (op, mode)
1863 enum machine_mode mode ATTRIBUTE_UNUSED;
1865 if (GET_CODE (op) == CONST_INT)
1867 HOST_WIDE_INT c, lsb;
1871 /* Disallow all zeros. */
1875 /* We don't change the number of transitions by inverting,
1876 so make sure we start with the LS bit zero. */
1880 /* Find the first transition. */
1883 /* Invert to look for a second transition. */
1886 /* Erase first transition. */
1889 /* Find the second transition. */
1892 /* Invert to look for a third transition. */
1895 /* Erase second transition. */
1898 /* Find the third transition (if any). */
1901 /* Match if all the bits above are 1's (or c is zero). */
1907 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1908 implement ANDing by the mask IN. */
1910 build_mask64_2_operands (in, out)
1914 #if HOST_BITS_PER_WIDE_INT >= 64
1915 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1918 if (GET_CODE (in) != CONST_INT)
1924 /* Assume c initially something like 0x00fff000000fffff. The idea
1925 is to rotate the word so that the middle ^^^^^^ group of zeros
1926 is at the MS end and can be cleared with an rldicl mask. We then
1927 rotate back and clear off the MS ^^ group of zeros with a
1929 c = ~c; /* c == 0xff000ffffff00000 */
1930 lsb = c & -c; /* lsb == 0x0000000000100000 */
1931 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1932 c = ~c; /* c == 0x00fff000000fffff */
1933 c &= -lsb; /* c == 0x00fff00000000000 */
1934 lsb = c & -c; /* lsb == 0x0000100000000000 */
1935 c = ~c; /* c == 0xff000fffffffffff */
1936 c &= -lsb; /* c == 0xff00000000000000 */
1938 while ((lsb >>= 1) != 0)
1939 shift++; /* shift == 44 on exit from loop */
1940 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1941 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1942 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1946 /* Assume c initially something like 0xff000f0000000000. The idea
1947 is to rotate the word so that the ^^^ middle group of zeros
1948 is at the LS end and can be cleared with an rldicr mask. We then
1949 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1951 lsb = c & -c; /* lsb == 0x0000010000000000 */
1952 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1953 c = ~c; /* c == 0x00fff0ffffffffff */
1954 c &= -lsb; /* c == 0x00fff00000000000 */
1955 lsb = c & -c; /* lsb == 0x0000100000000000 */
1956 c = ~c; /* c == 0xff000fffffffffff */
1957 c &= -lsb; /* c == 0xff00000000000000 */
1959 while ((lsb >>= 1) != 0)
1960 shift++; /* shift == 44 on exit from loop */
1961 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1962 m1 >>= shift; /* m1 == 0x0000000000000fff */
1963 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1966 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1967 masks will be all 1's. We are guaranteed more than one transition. */
1968 out[0] = GEN_INT (64 - shift);
1969 out[1] = GEN_INT (m1);
1970 out[2] = GEN_INT (shift);
1971 out[3] = GEN_INT (m2);
1979 /* Return 1 if the operand is either a non-special register or a constant
1980 that can be used as the operand of a PowerPC64 logical AND insn. */
1983 and64_operand (op, mode)
1985 enum machine_mode mode;
1987 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1988 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1990 return (logical_operand (op, mode) || mask64_operand (op, mode));
1993 /* Like the above, but also match constants that can be implemented
1994 with two rldicl or rldicr insns. */
1997 and64_2_operand (op, mode)
1999 enum machine_mode mode;
2001 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2002 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2004 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2007 /* Return 1 if the operand is either a non-special register or a
2008 constant that can be used as the operand of an RS/6000 logical AND insn. */
2011 and_operand (op, mode)
2013 enum machine_mode mode;
2015 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2016 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2018 return (logical_operand (op, mode) || mask_operand (op, mode));
2021 /* Return 1 if the operand is a general register or memory operand. */
2024 reg_or_mem_operand (op, mode)
2026 enum machine_mode mode;
2028 return (gpc_reg_operand (op, mode)
2029 || memory_operand (op, mode)
2030 || volatile_mem_operand (op, mode));
2033 /* Return 1 if the operand is a general register or memory operand without
2034 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2038 lwa_operand (op, mode)
2040 enum machine_mode mode;
2044 if (reload_completed && GET_CODE (inner) == SUBREG)
2045 inner = SUBREG_REG (inner);
2047 return gpc_reg_operand (inner, mode)
2048 || (memory_operand (inner, mode)
2049 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2050 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2051 && (GET_CODE (XEXP (inner, 0)) != PLUS
2052 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2053 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2056 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2059 symbol_ref_operand (op, mode)
2061 enum machine_mode mode;
2063 if (mode != VOIDmode && GET_MODE (op) != mode)
2066 return (GET_CODE (op) == SYMBOL_REF);
2069 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2070 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2073 call_operand (op, mode)
2075 enum machine_mode mode;
2077 if (mode != VOIDmode && GET_MODE (op) != mode)
2080 return (GET_CODE (op) == SYMBOL_REF
2081 || (GET_CODE (op) == REG
2082 && (REGNO (op) == LINK_REGISTER_REGNUM
2083 || REGNO (op) == COUNT_REGISTER_REGNUM
2084 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2087 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2088 this file and the function is not weakly defined. */
2091 current_file_function_operand (op, mode)
2093 enum machine_mode mode ATTRIBUTE_UNUSED;
2095 return (GET_CODE (op) == SYMBOL_REF
2096 && (SYMBOL_REF_FLAG (op)
2097 || (op == XEXP (DECL_RTL (current_function_decl), 0)
2098 && ! DECL_WEAK (current_function_decl))));
2101 /* Return 1 if this operand is a valid input for a move insn. */
2104 input_operand (op, mode)
2106 enum machine_mode mode;
2108 /* Memory is always valid. */
2109 if (memory_operand (op, mode))
2112 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2113 if (GET_CODE (op) == CONSTANT_P_RTX)
2116 /* For floating-point, easy constants are valid. */
2117 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2119 && easy_fp_constant (op, mode))
2122 /* Allow any integer constant. */
2123 if (GET_MODE_CLASS (mode) == MODE_INT
2124 && (GET_CODE (op) == CONST_INT
2125 || GET_CODE (op) == CONST_DOUBLE))
2128 /* Allow easy vector constants. */
2129 if (GET_CODE (op) == CONST_VECTOR
2130 && easy_vector_constant (op, mode))
2133 /* For floating-point or multi-word mode, the only remaining valid type
2135 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2136 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2137 return register_operand (op, mode);
2139 /* The only cases left are integral modes one word or smaller (we
2140 do not get called for MODE_CC values). These can be in any
2142 if (register_operand (op, mode))
2145 /* A SYMBOL_REF referring to the TOC is valid. */
2146 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
2149 /* A constant pool expression (relative to the TOC) is valid */
2150 if (TOC_RELATIVE_EXPR_P (op))
2153 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2155 if (DEFAULT_ABI == ABI_V4
2156 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2157 && small_data_operand (op, Pmode))
2163 /* Return 1 for an operand in small memory on V.4/eabi. */
2166 small_data_operand (op, mode)
2167 rtx op ATTRIBUTE_UNUSED;
2168 enum machine_mode mode ATTRIBUTE_UNUSED;
2173 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2176 if (DEFAULT_ABI != ABI_V4)
2179 if (GET_CODE (op) == SYMBOL_REF)
2182 else if (GET_CODE (op) != CONST
2183 || GET_CODE (XEXP (op, 0)) != PLUS
2184 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2185 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2190 rtx sum = XEXP (op, 0);
2191 HOST_WIDE_INT summand;
2193 /* We have to be careful here, because it is the referenced address
2194 that must be 32k from _SDA_BASE_, not just the symbol. */
2195 summand = INTVAL (XEXP (sum, 1));
2196 if (summand < 0 || summand > g_switch_value)
2199 sym_ref = XEXP (sum, 0);
2202 if (*XSTR (sym_ref, 0) != '@')
2213 constant_pool_expr_1 (op, have_sym, have_toc)
2218 switch (GET_CODE(op))
2221 if (CONSTANT_POOL_ADDRESS_P (op))
2223 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2231 else if (! strcmp (XSTR (op, 0), toc_label_name))
2240 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2241 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2243 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2252 constant_pool_expr_p (op)
2257 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2261 toc_relative_expr_p (op)
2266 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2269 /* Try machine-dependent ways of modifying an illegitimate address
2270 to be legitimate. If we find one, return the new, valid address.
2271 This is used from only one place: `memory_address' in explow.c.
2273 OLDX is the address as it was before break_out_memory_refs was
2274 called. In some cases it is useful to look at this to decide what
2277 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2279 It is always safe for this function to do nothing. It exists to
2280 recognize opportunities to optimize the output.
2282 On RS/6000, first check for the sum of a register with a constant
2283 integer that is out of range. If so, generate code to add the
2284 constant with the low-order 16 bits masked to the register and force
2285 this result into another register (this can be done with `cau').
2286 Then generate an address of REG+(CONST&0xffff), allowing for the
2287 possibility of bit 16 being a one.
2289 Then check for the sum of a register and something not constant, try to
2290 load the other things into a register and return the sum. */
2292 rs6000_legitimize_address (x, oldx, mode)
2294 rtx oldx ATTRIBUTE_UNUSED;
2295 enum machine_mode mode;
2297 if (GET_CODE (x) == PLUS
2298 && GET_CODE (XEXP (x, 0)) == REG
2299 && GET_CODE (XEXP (x, 1)) == CONST_INT
2300 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2302 HOST_WIDE_INT high_int, low_int;
2304 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2305 high_int = INTVAL (XEXP (x, 1)) - low_int;
2306 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2307 GEN_INT (high_int)), 0);
2308 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2310 else if (GET_CODE (x) == PLUS
2311 && GET_CODE (XEXP (x, 0)) == REG
2312 && GET_CODE (XEXP (x, 1)) != CONST_INT
2313 && GET_MODE_NUNITS (mode) == 1
2314 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2316 || (mode != DFmode && mode != TFmode))
2317 && (TARGET_POWERPC64 || mode != DImode)
2320 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2321 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2323 else if (ALTIVEC_VECTOR_MODE (mode))
2327 /* Make sure both operands are registers. */
2328 if (GET_CODE (x) == PLUS)
2329 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2330 force_reg (Pmode, XEXP (x, 1)));
2332 reg = force_reg (Pmode, x);
2335 else if (SPE_VECTOR_MODE (mode))
2337 /* We accept [reg + reg] and [reg + OFFSET]. */
2339 if (GET_CODE (x) == PLUS)
2341 rtx op1 = XEXP (x, 0);
2342 rtx op2 = XEXP (x, 1);
2344 op1 = force_reg (Pmode, op1);
2346 if (GET_CODE (op2) != REG
2347 && (GET_CODE (op2) != CONST_INT
2348 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2349 op2 = force_reg (Pmode, op2);
2351 return gen_rtx_PLUS (Pmode, op1, op2);
2354 return force_reg (Pmode, x);
2360 && GET_CODE (x) != CONST_INT
2361 && GET_CODE (x) != CONST_DOUBLE
2363 && GET_MODE_NUNITS (mode) == 1
2364 && (GET_MODE_BITSIZE (mode) <= 32
2365 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2367 rtx reg = gen_reg_rtx (Pmode);
2368 emit_insn (gen_elf_high (reg, (x)));
2369 return gen_rtx_LO_SUM (Pmode, reg, (x));
2371 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2374 && ! MACHO_DYNAMIC_NO_PIC_P
2376 && GET_CODE (x) != CONST_INT
2377 && GET_CODE (x) != CONST_DOUBLE
2379 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2383 rtx reg = gen_reg_rtx (Pmode);
2384 emit_insn (gen_macho_high (reg, (x)));
2385 return gen_rtx_LO_SUM (Pmode, reg, (x));
2388 && CONSTANT_POOL_EXPR_P (x)
2389 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2391 return create_TOC_reference (x);
2397 /* The convention appears to be to define this wherever it is used.
2398 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2399 is now used here. */
2400 #ifndef REG_MODE_OK_FOR_BASE_P
2401 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2404 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2405 replace the input X, or the original X if no replacement is called for.
2406 The output parameter *WIN is 1 if the calling macro should goto WIN,
2409 For RS/6000, we wish to handle large displacements off a base
2410 register by splitting the addend across an addiu/addis and the mem insn.
2411 This cuts number of extra insns needed from 3 to 1.
2413 On Darwin, we use this to generate code for floating point constants.
2414 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2415 The Darwin code is inside #if TARGET_MACHO because only then is
2416 machopic_function_base_name() defined. */
2418 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2420 enum machine_mode mode;
2423 int ind_levels ATTRIBUTE_UNUSED;
2426 /* We must recognize output that we have already generated ourselves. */
2427 if (GET_CODE (x) == PLUS
2428 && GET_CODE (XEXP (x, 0)) == PLUS
2429 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2430 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2431 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2433 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2434 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2435 opnum, (enum reload_type)type);
2441 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2442 && GET_CODE (x) == LO_SUM
2443 && GET_CODE (XEXP (x, 0)) == PLUS
2444 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2445 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2446 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2447 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2448 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2449 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2450 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2452 /* Result of previous invocation of this function on Darwin
2453 floating point constant. */
2454 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2455 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2456 opnum, (enum reload_type)type);
2461 if (GET_CODE (x) == PLUS
2462 && GET_CODE (XEXP (x, 0)) == REG
2463 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2464 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2465 && GET_CODE (XEXP (x, 1)) == CONST_INT
2466 && !SPE_VECTOR_MODE (mode)
2467 && !ALTIVEC_VECTOR_MODE (mode))
2469 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2470 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2472 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2474 /* Check for 32-bit overflow. */
2475 if (high + low != val)
2481 /* Reload the high part into a base reg; leave the low part
2482 in the mem directly. */
2484 x = gen_rtx_PLUS (GET_MODE (x),
2485 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2489 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2490 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2491 opnum, (enum reload_type)type);
2496 if (GET_CODE (x) == SYMBOL_REF
2497 && DEFAULT_ABI == ABI_DARWIN
2498 && !ALTIVEC_VECTOR_MODE (mode)
2501 /* Darwin load of floating point constant. */
2502 rtx offset = gen_rtx (CONST, Pmode,
2503 gen_rtx (MINUS, Pmode, x,
2504 gen_rtx (SYMBOL_REF, Pmode,
2505 machopic_function_base_name ())));
2506 x = gen_rtx (LO_SUM, GET_MODE (x),
2507 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2508 gen_rtx (HIGH, Pmode, offset)), offset);
2509 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2510 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2511 opnum, (enum reload_type)type);
2515 if (GET_CODE (x) == SYMBOL_REF
2516 && DEFAULT_ABI == ABI_DARWIN
2517 && !ALTIVEC_VECTOR_MODE (mode)
2518 && MACHO_DYNAMIC_NO_PIC_P)
2520 /* Darwin load of floating point constant. */
2521 x = gen_rtx (LO_SUM, GET_MODE (x),
2522 gen_rtx (HIGH, Pmode, x), x);
2523 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2524 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2525 opnum, (enum reload_type)type);
2531 && CONSTANT_POOL_EXPR_P (x)
2532 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2534 (x) = create_TOC_reference (x);
2542 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2543 that is a valid memory address for an instruction.
2544 The MODE argument is the machine mode for the MEM expression
2545 that wants to use this address.
2547 On the RS/6000, there are four valid address: a SYMBOL_REF that
2548 refers to a constant pool entry of an address (or the sum of it
2549 plus a constant), a short (16-bit signed) constant plus a register,
2550 the sum of two registers, or a register indirect, possibly with an
2551 auto-increment. For DFmode and DImode with a constant plus register,
2552 we must ensure that both words are addressable or PowerPC64 with offset
2555 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2556 32-bit DImode, TImode), indexed addressing cannot be used because
2557 adjacent memory cells are accessed by adding word-sized offsets
2558 during assembly output. */
2560 rs6000_legitimate_address (mode, x, reg_ok_strict)
2561 enum machine_mode mode;
2565 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2567 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2568 && !ALTIVEC_VECTOR_MODE (mode)
2569 && !SPE_VECTOR_MODE (mode)
2571 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2573 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2575 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2577 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2579 && GET_CODE (x) == PLUS
2580 && GET_CODE (XEXP (x, 0)) == REG
2581 && XEXP (x, 0) == virtual_stack_vars_rtx
2582 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2584 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2587 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2589 || (mode != DFmode && mode != TFmode))
2590 && (TARGET_POWERPC64 || mode != DImode)
2591 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2593 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2598 /* Try to output insns to set TARGET equal to the constant C if it can
2599 be done in less than N insns. Do all computations in MODE.
2600 Returns the place where the output has been placed if it can be
2601 done and the insns have been emitted. If it would take more than N
2602 insns, zero is returned and no insns and emitted. */
2605 rs6000_emit_set_const (dest, mode, source, n)
2607 enum machine_mode mode;
2608 int n ATTRIBUTE_UNUSED;
2610 rtx result, insn, set;
2611 HOST_WIDE_INT c0, c1;
2613 if (mode == QImode || mode == HImode)
2616 dest = gen_reg_rtx (mode);
2617 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2620 else if (mode == SImode)
2622 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2624 emit_insn (gen_rtx_SET (VOIDmode, result,
2625 GEN_INT (INTVAL (source)
2626 & (~ (HOST_WIDE_INT) 0xffff))));
2627 emit_insn (gen_rtx_SET (VOIDmode, dest,
2628 gen_rtx_IOR (SImode, result,
2629 GEN_INT (INTVAL (source) & 0xffff))));
2632 else if (mode == DImode)
2634 if (GET_CODE (source) == CONST_INT)
2636 c0 = INTVAL (source);
2639 else if (GET_CODE (source) == CONST_DOUBLE)
2641 #if HOST_BITS_PER_WIDE_INT >= 64
2642 c0 = CONST_DOUBLE_LOW (source);
2645 c0 = CONST_DOUBLE_LOW (source);
2646 c1 = CONST_DOUBLE_HIGH (source);
2652 result = rs6000_emit_set_long_const (dest, c0, c1);
2657 insn = get_last_insn ();
2658 set = single_set (insn);
2659 if (! CONSTANT_P (SET_SRC (set)))
2660 set_unique_reg_note (insn, REG_EQUAL, source);
2665 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2666 fall back to a straight forward decomposition. We do this to avoid
2667 exponential run times encountered when looking for longer sequences
2668 with rs6000_emit_set_const. */
2670 rs6000_emit_set_long_const (dest, c1, c2)
2672 HOST_WIDE_INT c1, c2;
2674 if (!TARGET_POWERPC64)
2676 rtx operand1, operand2;
2678 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2680 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2682 emit_move_insn (operand1, GEN_INT (c1));
2683 emit_move_insn (operand2, GEN_INT (c2));
2687 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2690 ud2 = (c1 & 0xffff0000) >> 16;
2691 #if HOST_BITS_PER_WIDE_INT >= 64
2695 ud4 = (c2 & 0xffff0000) >> 16;
2697 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2698 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2701 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2703 emit_move_insn (dest, GEN_INT (ud1));
2706 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2707 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2710 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2713 emit_move_insn (dest, GEN_INT (ud2 << 16));
2715 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2717 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2718 || (ud4 == 0 && ! (ud3 & 0x8000)))
2721 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2724 emit_move_insn (dest, GEN_INT (ud3 << 16));
2727 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2728 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2730 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2735 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2738 emit_move_insn (dest, GEN_INT (ud4 << 16));
2741 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2743 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2745 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2746 GEN_INT (ud2 << 16)));
2748 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2754 /* Emit a move from SOURCE to DEST in mode MODE. */
2756 rs6000_emit_move (dest, source, mode)
2759 enum machine_mode mode;
2763 operands[1] = source;
2765 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2766 if (GET_CODE (operands[1]) == CONST_DOUBLE
2767 && ! FLOAT_MODE_P (mode)
2768 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2770 /* FIXME. This should never happen. */
2771 /* Since it seems that it does, do the safe thing and convert
2773 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2775 if (GET_CODE (operands[1]) == CONST_DOUBLE
2776 && ! FLOAT_MODE_P (mode)
2777 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2778 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2779 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2780 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2783 /* Check if GCC is setting up a block move that will end up using FP
2784 registers as temporaries. We must make sure this is acceptable. */
2785 if (GET_CODE (operands[0]) == MEM
2786 && GET_CODE (operands[1]) == MEM
2788 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2789 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2790 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2791 ? 32 : MEM_ALIGN (operands[0])))
2792 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2794 : MEM_ALIGN (operands[1]))))
2795 && ! MEM_VOLATILE_P (operands [0])
2796 && ! MEM_VOLATILE_P (operands [1]))
2798 emit_move_insn (adjust_address (operands[0], SImode, 0),
2799 adjust_address (operands[1], SImode, 0));
2800 emit_move_insn (adjust_address (operands[0], SImode, 4),
2801 adjust_address (operands[1], SImode, 4));
2805 if (!no_new_pseudos)
2807 if (GET_CODE (operands[1]) == MEM && optimize > 0
2808 && (mode == QImode || mode == HImode || mode == SImode)
2809 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2811 rtx reg = gen_reg_rtx (word_mode);
2813 emit_insn (gen_rtx_SET (word_mode, reg,
2814 gen_rtx_ZERO_EXTEND (word_mode,
2816 operands[1] = gen_lowpart (mode, reg);
2818 if (GET_CODE (operands[0]) != REG)
2819 operands[1] = force_reg (mode, operands[1]);
2822 if (mode == SFmode && ! TARGET_POWERPC
2823 && TARGET_HARD_FLOAT && TARGET_FPRS
2824 && GET_CODE (operands[0]) == MEM)
2828 if (reload_in_progress || reload_completed)
2829 regnum = true_regnum (operands[1]);
2830 else if (GET_CODE (operands[1]) == REG)
2831 regnum = REGNO (operands[1]);
2835 /* If operands[1] is a register, on POWER it may have
2836 double-precision data in it, so truncate it to single
2838 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2841 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2842 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2843 operands[1] = newreg;
2847 /* Handle the case where reload calls us with an invalid address;
2848 and the case of CONSTANT_P_RTX. */
2849 if (!ALTIVEC_VECTOR_MODE (mode)
2850 && (! general_operand (operands[1], mode)
2851 || ! nonimmediate_operand (operands[0], mode)
2852 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2854 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2858 /* FIXME: In the long term, this switch statement should go away
2859 and be replaced by a sequence of tests based on things like
2865 if (CONSTANT_P (operands[1])
2866 && GET_CODE (operands[1]) != CONST_INT)
2867 operands[1] = force_const_mem (mode, operands[1]);
2873 if (CONSTANT_P (operands[1])
2874 && ! easy_fp_constant (operands[1], mode))
2875 operands[1] = force_const_mem (mode, operands[1]);
2886 if (CONSTANT_P (operands[1])
2887 && !easy_vector_constant (operands[1], mode))
2888 operands[1] = force_const_mem (mode, operands[1]);
2893 /* Use default pattern for address of ELF small data */
2896 && DEFAULT_ABI == ABI_V4
2897 && (GET_CODE (operands[1]) == SYMBOL_REF
2898 || GET_CODE (operands[1]) == CONST)
2899 && small_data_operand (operands[1], mode))
2901 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2905 if (DEFAULT_ABI == ABI_V4
2906 && mode == Pmode && mode == SImode
2907 && flag_pic == 1 && got_operand (operands[1], mode))
2909 emit_insn (gen_movsi_got (operands[0], operands[1]));
2913 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2917 && CONSTANT_P (operands[1])
2918 && GET_CODE (operands[1]) != HIGH
2919 && GET_CODE (operands[1]) != CONST_INT)
2921 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2923 /* If this is a function address on -mcall-aixdesc,
2924 convert it to the address of the descriptor. */
2925 if (DEFAULT_ABI == ABI_AIX
2926 && GET_CODE (operands[1]) == SYMBOL_REF
2927 && XSTR (operands[1], 0)[0] == '.')
2929 const char *name = XSTR (operands[1], 0);
2931 while (*name == '.')
2933 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2934 CONSTANT_POOL_ADDRESS_P (new_ref)
2935 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2936 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2937 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2938 operands[1] = new_ref;
2941 if (DEFAULT_ABI == ABI_DARWIN)
2944 if (MACHO_DYNAMIC_NO_PIC_P)
2946 /* Take care of any required data indirection. */
2947 operands[1] = rs6000_machopic_legitimize_pic_address (
2948 operands[1], mode, operands[0]);
2949 if (operands[0] != operands[1])
2950 emit_insn (gen_rtx_SET (VOIDmode,
2951 operands[0], operands[1]));
2955 emit_insn (gen_macho_high (target, operands[1]));
2956 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2960 emit_insn (gen_elf_high (target, operands[1]));
2961 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2965 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2966 and we have put it in the TOC, we just need to make a TOC-relative
2969 && GET_CODE (operands[1]) == SYMBOL_REF
2970 && CONSTANT_POOL_EXPR_P (operands[1])
2971 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2972 get_pool_mode (operands[1])))
2974 operands[1] = create_TOC_reference (operands[1]);
2976 else if (mode == Pmode
2977 && CONSTANT_P (operands[1])
2978 && ((GET_CODE (operands[1]) != CONST_INT
2979 && ! easy_fp_constant (operands[1], mode))
2980 || (GET_CODE (operands[1]) == CONST_INT
2981 && num_insns_constant (operands[1], mode) > 2)
2982 || (GET_CODE (operands[0]) == REG
2983 && FP_REGNO_P (REGNO (operands[0]))))
2984 && GET_CODE (operands[1]) != HIGH
2985 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2986 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2988 /* Emit a USE operation so that the constant isn't deleted if
2989 expensive optimizations are turned on because nobody
2990 references it. This should only be done for operands that
2991 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2992 This should not be done for operands that contain LABEL_REFs.
2993 For now, we just handle the obvious case. */
2994 if (GET_CODE (operands[1]) != LABEL_REF)
2995 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2998 /* Darwin uses a special PIC legitimizer. */
2999 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3002 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3004 if (operands[0] != operands[1])
3005 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3010 /* If we are to limit the number of things we put in the TOC and
3011 this is a symbol plus a constant we can add in one insn,
3012 just put the symbol in the TOC and add the constant. Don't do
3013 this if reload is in progress. */
3014 if (GET_CODE (operands[1]) == CONST
3015 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3016 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3017 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3018 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3019 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3020 && ! side_effects_p (operands[0]))
3023 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3024 rtx other = XEXP (XEXP (operands[1], 0), 1);
3026 sym = force_reg (mode, sym);
3028 emit_insn (gen_addsi3 (operands[0], sym, other));
3030 emit_insn (gen_adddi3 (operands[0], sym, other));
3034 operands[1] = force_const_mem (mode, operands[1]);
3037 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
3038 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3039 get_pool_constant (XEXP (operands[1], 0)),
3040 get_pool_mode (XEXP (operands[1], 0))))
3043 = gen_rtx_MEM (mode,
3044 create_TOC_reference (XEXP (operands[1], 0)));
3045 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3046 RTX_UNCHANGING_P (operands[1]) = 1;
3052 if (GET_CODE (operands[0]) == MEM
3053 && GET_CODE (XEXP (operands[0], 0)) != REG
3054 && ! reload_in_progress)
3056 = replace_equiv_address (operands[0],
3057 copy_addr_to_reg (XEXP (operands[0], 0)));
3059 if (GET_CODE (operands[1]) == MEM
3060 && GET_CODE (XEXP (operands[1], 0)) != REG
3061 && ! reload_in_progress)
3063 = replace_equiv_address (operands[1],
3064 copy_addr_to_reg (XEXP (operands[1], 0)));
3067 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3069 gen_rtx_SET (VOIDmode,
3070 operands[0], operands[1]),
3071 gen_rtx_CLOBBER (VOIDmode,
3072 gen_rtx_SCRATCH (SImode)))));
3081 /* Above, we may have called force_const_mem which may have returned
3082 an invalid address. If we can, fix this up; otherwise, reload will
3083 have to deal with it. */
3084 if (GET_CODE (operands[1]) == MEM
3085 && ! memory_address_p (mode, XEXP (operands[1], 0))
3086 && ! reload_in_progress)
3087 operands[1] = adjust_address (operands[1], mode, 0);
3089 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3093 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3094 for a call to a function whose data type is FNTYPE.
3095 For a library call, FNTYPE is 0.
3097 For incoming args we set the number of arguments in the prototype large
3098 so we never return a PARALLEL. */
3101 init_cumulative_args (cum, fntype, libname, incoming)
3102 CUMULATIVE_ARGS *cum;
3104 rtx libname ATTRIBUTE_UNUSED;
3107 static CUMULATIVE_ARGS zero_cumulative;
3109 *cum = zero_cumulative;
3111 cum->fregno = FP_ARG_MIN_REG;
3112 cum->vregno = ALTIVEC_ARG_MIN_REG;
3113 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3114 cum->call_cookie = CALL_NORMAL;
3115 cum->sysv_gregno = GP_ARG_MIN_REG;
3118 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3120 else if (cum->prototype)
3121 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3122 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3123 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3126 cum->nargs_prototype = 0;
3128 cum->orig_nargs = cum->nargs_prototype;
3130 /* Check for a longcall attribute. */
3132 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3133 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3134 cum->call_cookie = CALL_LONG;
3136 if (TARGET_DEBUG_ARG)
3138 fprintf (stderr, "\ninit_cumulative_args:");
3141 tree ret_type = TREE_TYPE (fntype);
3142 fprintf (stderr, " ret code = %s,",
3143 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3146 if (cum->call_cookie & CALL_LONG)
3147 fprintf (stderr, " longcall,");
3149 fprintf (stderr, " proto = %d, nargs = %d\n",
3150 cum->prototype, cum->nargs_prototype);
3154 /* If defined, a C expression which determines whether, and in which
3155 direction, to pad out an argument with extra space. The value
3156 should be of type `enum direction': either `upward' to pad above
3157 the argument, `downward' to pad below, or `none' to inhibit
3160 For the AIX ABI structs are always stored left shifted in their
3164 function_arg_padding (mode, type)
3165 enum machine_mode mode;
3168 if (type != 0 && AGGREGATE_TYPE_P (type))
3171 /* This is the default definition. */
3172 return (! BYTES_BIG_ENDIAN
3175 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3176 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3177 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3178 ? downward : upward));
3181 /* If defined, a C expression that gives the alignment boundary, in bits,
3182 of an argument with the specified mode and type. If it is not defined,
3183 PARM_BOUNDARY is used for all arguments.
3185 V.4 wants long longs to be double word aligned. */
3188 function_arg_boundary (mode, type)
3189 enum machine_mode mode;
3190 tree type ATTRIBUTE_UNUSED;
3192 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3194 else if (SPE_VECTOR_MODE (mode))
3196 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3199 return PARM_BOUNDARY;
3202 /* Update the data in CUM to advance over an argument
3203 of mode MODE and data type TYPE.
3204 (TYPE is null for libcalls where that information may not be available.) */
3207 function_arg_advance (cum, mode, type, named)
3208 CUMULATIVE_ARGS *cum;
3209 enum machine_mode mode;
3213 cum->nargs_prototype--;
3215 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3217 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3220 cum->words += RS6000_ARG_SIZE (mode, type);
3222 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3223 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3225 else if (DEFAULT_ABI == ABI_V4)
3227 if (TARGET_HARD_FLOAT && TARGET_FPRS
3228 && (mode == SFmode || mode == DFmode))
3230 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3235 cum->words += cum->words & 1;
3236 cum->words += RS6000_ARG_SIZE (mode, type);
3242 int gregno = cum->sysv_gregno;
3244 /* Aggregates and IEEE quad get passed by reference. */
3245 if ((type && AGGREGATE_TYPE_P (type))
3249 n_words = RS6000_ARG_SIZE (mode, type);
3251 /* Long long and SPE vectors are put in odd registers. */
3252 if (n_words == 2 && (gregno & 1) == 0)
3255 /* Long long and SPE vectors are not split between registers
3257 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3259 /* Long long is aligned on the stack. */
3261 cum->words += cum->words & 1;
3262 cum->words += n_words;
3265 /* Note: continuing to accumulate gregno past when we've started
3266 spilling to the stack indicates the fact that we've started
3267 spilling to the stack to expand_builtin_saveregs. */
3268 cum->sysv_gregno = gregno + n_words;
3271 if (TARGET_DEBUG_ARG)
3273 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3274 cum->words, cum->fregno);
3275 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3276 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3277 fprintf (stderr, "mode = %4s, named = %d\n",
3278 GET_MODE_NAME (mode), named);
3283 int align = (TARGET_32BIT && (cum->words & 1) != 0
3284 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3286 cum->words += align + RS6000_ARG_SIZE (mode, type);
3288 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3289 && TARGET_HARD_FLOAT && TARGET_FPRS)
3290 cum->fregno += (mode == TFmode ? 2 : 1);
3292 if (TARGET_DEBUG_ARG)
3294 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3295 cum->words, cum->fregno);
3296 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3297 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3298 fprintf (stderr, "named = %d, align = %d\n", named, align);
3303 /* Determine where to put an argument to a function.
3304 Value is zero to push the argument on the stack,
3305 or a hard register in which to store the argument.
3307 MODE is the argument's machine mode.
3308 TYPE is the data type of the argument (as a tree).
3309 This is null for libcalls where that information may
3311 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3312 the preceding args and about the function being called.
3313 NAMED is nonzero if this argument is a named parameter
3314 (otherwise it is an extra parameter matching an ellipsis).
3316 On RS/6000 the first eight words of non-FP are normally in registers
3317 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3318 Under V.4, the first 8 FP args are in registers.
3320 If this is floating-point and no prototype is specified, we use
3321 both an FP and integer register (or possibly FP reg and stack). Library
3322 functions (when TYPE is zero) always have the proper types for args,
3323 so we can pass the FP value just in one register. emit_library_function
3324 doesn't support PARALLEL anyway. */
3327 function_arg (cum, mode, type, named)
3328 CUMULATIVE_ARGS *cum;
3329 enum machine_mode mode;
3333 enum rs6000_abi abi = DEFAULT_ABI;
3335 /* Return a marker to indicate whether CR1 needs to set or clear the
3336 bit that V.4 uses to say fp args were passed in registers.
3337 Assume that we don't need the marker for software floating point,
3338 or compiler generated library calls. */
3339 if (mode == VOIDmode)
3342 && cum->nargs_prototype < 0
3343 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3345 /* For the SPE, we need to crxor CR6 always. */
3347 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3348 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3349 return GEN_INT (cum->call_cookie
3350 | ((cum->fregno == FP_ARG_MIN_REG)
3351 ? CALL_V4_SET_FP_ARGS
3352 : CALL_V4_CLEAR_FP_ARGS));
3355 return GEN_INT (cum->call_cookie);
3358 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3360 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3361 return gen_rtx_REG (mode, cum->vregno);
3365 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3367 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3368 return gen_rtx_REG (mode, cum->sysv_gregno);
3372 else if (abi == ABI_V4)
3374 if (TARGET_HARD_FLOAT && TARGET_FPRS
3375 && (mode == SFmode || mode == DFmode))
3377 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3378 return gen_rtx_REG (mode, cum->fregno);
3385 int gregno = cum->sysv_gregno;
3387 /* Aggregates and IEEE quad get passed by reference. */
3388 if ((type && AGGREGATE_TYPE_P (type))
3392 n_words = RS6000_ARG_SIZE (mode, type);
3394 /* Long long and SPE vectors are put in odd registers. */
3395 if (n_words == 2 && (gregno & 1) == 0)
3398 /* Long long and SPE vectors are not split between registers
3400 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3402 /* SPE vectors in ... get split into 2 registers. */
3403 if (TARGET_SPE && TARGET_SPE_ABI
3404 && SPE_VECTOR_MODE (mode) && !named)
3407 enum machine_mode m = SImode;
3409 r1 = gen_rtx_REG (m, gregno);
3410 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3411 r2 = gen_rtx_REG (m, gregno + 1);
3412 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3413 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3415 return gen_rtx_REG (mode, gregno);
3423 int align = (TARGET_32BIT && (cum->words & 1) != 0
3424 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3425 int align_words = cum->words + align;
3427 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3430 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3433 || ((cum->nargs_prototype > 0)
3434 /* IBM AIX extended its linkage convention definition always
3435 to require FP args after register save area hole on the
3437 && (DEFAULT_ABI != ABI_AIX
3439 || (align_words < GP_ARG_NUM_REG))))
3440 return gen_rtx_REG (mode, cum->fregno);
3442 return gen_rtx_PARALLEL (mode,
3444 gen_rtx_EXPR_LIST (VOIDmode,
3445 ((align_words >= GP_ARG_NUM_REG)
3448 + RS6000_ARG_SIZE (mode, type)
3450 /* If this is partially on the stack, then
3451 we only include the portion actually
3452 in registers here. */
3453 ? gen_rtx_REG (SImode,
3454 GP_ARG_MIN_REG + align_words)
3455 : gen_rtx_REG (mode,
3456 GP_ARG_MIN_REG + align_words))),
3458 gen_rtx_EXPR_LIST (VOIDmode,
3459 gen_rtx_REG (mode, cum->fregno),
3462 else if (align_words < GP_ARG_NUM_REG)
3463 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3469 /* For an arg passed partly in registers and partly in memory,
3470 this is the number of registers used.
3471 For args passed entirely in registers or entirely in memory, zero. */
3474 function_arg_partial_nregs (cum, mode, type, named)
3475 CUMULATIVE_ARGS *cum;
3476 enum machine_mode mode;
3478 int named ATTRIBUTE_UNUSED;
3480 if (DEFAULT_ABI == ABI_V4)
3483 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3484 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3486 if (cum->nargs_prototype >= 0)
3490 if (cum->words < GP_ARG_NUM_REG
3491 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3493 int ret = GP_ARG_NUM_REG - cum->words;
3494 if (ret && TARGET_DEBUG_ARG)
3495 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3503 /* A C expression that indicates when an argument must be passed by
3504 reference. If nonzero for an argument, a copy of that argument is
3505 made in memory and a pointer to the argument is passed instead of
3506 the argument itself. The pointer is passed in whatever way is
3507 appropriate for passing a pointer to that type.
3509 Under V.4, structures and unions are passed by reference.
3511 As an extension to all ABIs, variable sized types are passed by
3515 function_arg_pass_by_reference (cum, mode, type, named)
3516 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3517 enum machine_mode mode ATTRIBUTE_UNUSED;
3519 int named ATTRIBUTE_UNUSED;
3521 if (DEFAULT_ABI == ABI_V4
3522 && ((type && AGGREGATE_TYPE_P (type))
3525 if (TARGET_DEBUG_ARG)
3526 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3530 return type && int_size_in_bytes (type) <= 0;
3533 /* Perform any needed actions needed for a function that is receiving a
3534 variable number of arguments.
3538 MODE and TYPE are the mode and type of the current parameter.
3540 PRETEND_SIZE is a variable that should be set to the amount of stack
3541 that must be pushed by the prolog to pretend that our caller pushed
3544 Normally, this macro will push all remaining incoming registers on the
3545 stack and set PRETEND_SIZE to the length of the registers pushed. */
3548 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3549 CUMULATIVE_ARGS *cum;
3550 enum machine_mode mode;
3552 int *pretend_size ATTRIBUTE_UNUSED;
3556 CUMULATIVE_ARGS next_cum;
3557 int reg_size = TARGET_32BIT ? 4 : 8;
3558 rtx save_area = NULL_RTX, mem;
3559 int first_reg_offset, set;
3563 fntype = TREE_TYPE (current_function_decl);
3564 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3565 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3566 != void_type_node));
3568 /* For varargs, we do not want to skip the dummy va_dcl argument.
3569 For stdargs, we do want to skip the last named argument. */
3572 function_arg_advance (&next_cum, mode, type, 1);
3574 if (DEFAULT_ABI == ABI_V4)
3576 /* Indicate to allocate space on the stack for varargs save area. */
3577 cfun->machine->sysv_varargs_p = 1;
3579 save_area = plus_constant (virtual_stack_vars_rtx,
3580 - RS6000_VARARGS_SIZE);
3582 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3586 first_reg_offset = next_cum.words;
3587 save_area = virtual_incoming_args_rtx;
3588 cfun->machine->sysv_varargs_p = 0;
3590 if (MUST_PASS_IN_STACK (mode, type))
3591 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3594 set = get_varargs_alias_set ();
3595 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3597 mem = gen_rtx_MEM (BLKmode,
3598 plus_constant (save_area,
3599 first_reg_offset * reg_size)),
3600 set_mem_alias_set (mem, set);
3601 set_mem_align (mem, BITS_PER_WORD);
3604 (GP_ARG_MIN_REG + first_reg_offset, mem,
3605 GP_ARG_NUM_REG - first_reg_offset,
3606 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3609 /* Save FP registers if needed. */
3610 if (DEFAULT_ABI == ABI_V4
3611 && TARGET_HARD_FLOAT && TARGET_FPRS
3613 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3615 int fregno = next_cum.fregno;
3616 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3617 rtx lab = gen_label_rtx ();
3618 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3620 emit_jump_insn (gen_rtx_SET (VOIDmode,
3622 gen_rtx_IF_THEN_ELSE (VOIDmode,
3623 gen_rtx_NE (VOIDmode, cr1,
3625 gen_rtx_LABEL_REF (VOIDmode, lab),
3628 while (fregno <= FP_ARG_V4_MAX_REG)
3630 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3631 set_mem_alias_set (mem, set);
3632 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3641 /* Create the va_list data type. */
3644 rs6000_build_va_list ()
3646 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3648 /* For AIX, prefer 'char *' because that's what the system
3649 header files like. */
3650 if (DEFAULT_ABI != ABI_V4)
3651 return build_pointer_type (char_type_node);
3653 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3654 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3656 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3657 unsigned_char_type_node);
3658 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3659 unsigned_char_type_node);
3660 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3662 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3665 DECL_FIELD_CONTEXT (f_gpr) = record;
3666 DECL_FIELD_CONTEXT (f_fpr) = record;
3667 DECL_FIELD_CONTEXT (f_ovf) = record;
3668 DECL_FIELD_CONTEXT (f_sav) = record;
3670 TREE_CHAIN (record) = type_decl;
3671 TYPE_NAME (record) = type_decl;
3672 TYPE_FIELDS (record) = f_gpr;
3673 TREE_CHAIN (f_gpr) = f_fpr;
3674 TREE_CHAIN (f_fpr) = f_ovf;
3675 TREE_CHAIN (f_ovf) = f_sav;
3677 layout_type (record);
3679 /* The correct type is an array type of one element. */
3680 return build_array_type (record, build_index_type (size_zero_node));
3683 /* Implement va_start. */
3686 rs6000_va_start (valist, nextarg)
3690 HOST_WIDE_INT words, n_gpr, n_fpr;
3691 tree f_gpr, f_fpr, f_ovf, f_sav;
3692 tree gpr, fpr, ovf, sav, t;
3694 /* Only SVR4 needs something special. */
3695 if (DEFAULT_ABI != ABI_V4)
3697 std_expand_builtin_va_start (valist, nextarg);
3701 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3702 f_fpr = TREE_CHAIN (f_gpr);
3703 f_ovf = TREE_CHAIN (f_fpr);
3704 f_sav = TREE_CHAIN (f_ovf);
3706 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3707 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3708 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3709 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3710 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3712 /* Count number of gp and fp argument registers used. */
3713 words = current_function_args_info.words;
3714 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3715 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3717 if (TARGET_DEBUG_ARG)
3719 fputs ("va_start: words = ", stderr);
3720 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3721 fputs (", n_gpr = ", stderr);
3722 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3723 fputs (", n_fpr = ", stderr);
3724 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3725 putc ('\n', stderr);
3728 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3729 TREE_SIDE_EFFECTS (t) = 1;
3730 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3732 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3733 TREE_SIDE_EFFECTS (t) = 1;
3734 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3736 /* Find the overflow area. */
3737 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3739 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3740 build_int_2 (words * UNITS_PER_WORD, 0));
3741 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3742 TREE_SIDE_EFFECTS (t) = 1;
3743 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3745 /* Find the register save area. */
3746 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3747 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3748 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3749 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3750 TREE_SIDE_EFFECTS (t) = 1;
3751 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3754 /* Implement va_arg. */
3757 rs6000_va_arg (valist, type)
3760 tree f_gpr, f_fpr, f_ovf, f_sav;
3761 tree gpr, fpr, ovf, sav, reg, t, u;
3762 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3763 rtx lab_false, lab_over, addr_rtx, r;
3765 if (DEFAULT_ABI != ABI_V4)
3767 /* Variable sized types are passed by reference. */
3768 if (int_size_in_bytes (type) <= 0)
3770 u = build_pointer_type (type);
3772 /* Args grow upward. */
3773 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3774 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3775 TREE_SIDE_EFFECTS (t) = 1;
3777 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3778 TREE_SIDE_EFFECTS (t) = 1;
3780 t = build1 (INDIRECT_REF, u, t);
3781 TREE_SIDE_EFFECTS (t) = 1;
3783 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3786 return std_expand_builtin_va_arg (valist, type);
3789 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3790 f_fpr = TREE_CHAIN (f_gpr);
3791 f_ovf = TREE_CHAIN (f_fpr);
3792 f_sav = TREE_CHAIN (f_ovf);
3794 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3795 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3796 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3797 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3798 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3800 size = int_size_in_bytes (type);
3801 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3803 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3805 /* Aggregates and long doubles are passed by reference. */
3811 size = UNITS_PER_WORD;
3814 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3816 /* FP args go in FP registers, if present. */
3825 /* Otherwise into GP registers. */
3833 /* Pull the value out of the saved registers ... */
3835 lab_false = gen_label_rtx ();
3836 lab_over = gen_label_rtx ();
3837 addr_rtx = gen_reg_rtx (Pmode);
3839 /* AltiVec vectors never go in registers. */
3840 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3842 TREE_THIS_VOLATILE (reg) = 1;
3843 emit_cmp_and_jump_insns
3844 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3845 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3848 /* Long long is aligned in the registers. */
3851 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3852 build_int_2 (n_reg - 1, 0));
3853 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3854 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3855 TREE_SIDE_EFFECTS (u) = 1;
3856 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3860 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3864 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3865 build_int_2 (n_reg, 0));
3866 TREE_SIDE_EFFECTS (u) = 1;
3868 u = build1 (CONVERT_EXPR, integer_type_node, u);
3869 TREE_SIDE_EFFECTS (u) = 1;
3871 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3872 TREE_SIDE_EFFECTS (u) = 1;
3874 t = build (PLUS_EXPR, ptr_type_node, t, u);
3875 TREE_SIDE_EFFECTS (t) = 1;
3877 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3879 emit_move_insn (addr_rtx, r);
3881 emit_jump_insn (gen_jump (lab_over));
3885 emit_label (lab_false);
3887 /* ... otherwise out of the overflow area. */
3889 /* Make sure we don't find reg 7 for the next int arg.
3891 All AltiVec vectors go in the overflow area. So in the AltiVec
3892 case we need to get the vectors from the overflow area, but
3893 remember where the GPRs and FPRs are. */
3894 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3895 || !TARGET_ALTIVEC))
3897 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3898 TREE_SIDE_EFFECTS (t) = 1;
3899 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3902 /* Care for on-stack alignment if needed. */
3909 /* AltiVec vectors are 16 byte aligned. */
3910 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3915 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3916 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3920 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3922 emit_move_insn (addr_rtx, r);
3924 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3925 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3926 TREE_SIDE_EFFECTS (t) = 1;
3927 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3929 emit_label (lab_over);
3933 r = gen_rtx_MEM (Pmode, addr_rtx);
3934 set_mem_alias_set (r, get_varargs_alias_set ());
3935 emit_move_insn (addr_rtx, r);
3943 #define def_builtin(MASK, NAME, TYPE, CODE) \
3945 if ((MASK) & target_flags) \
3946 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3950 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3952 static const struct builtin_description bdesc_3arg[] =
3954 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3955 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3956 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3957 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3958 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3959 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3960 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3961 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3962 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3963 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3964 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3965 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3966 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3967 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3968 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3969 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3970 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3971 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3972 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3973 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3974 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3975 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3976 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3979 /* DST operations: void foo (void *, const int, const char). */
3981 static const struct builtin_description bdesc_dst[] =
3983 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3984 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3985 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3986 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3989 /* Simple binary operations: VECc = foo (VECa, VECb). */
3991 static struct builtin_description bdesc_2arg[] =
3993 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3994 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3995 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3996 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3997 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3998 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3999 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4000 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4001 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4002 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4003 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4004 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4005 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4006 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4007 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4008 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4009 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4010 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4011 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4012 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4013 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4014 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4015 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4016 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4017 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4018 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4019 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4020 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4021 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4022 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4023 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4024 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4025 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4026 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4027 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4028 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4029 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4030 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4031 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4032 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4033 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4034 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4035 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4036 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4037 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4038 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4039 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4040 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4041 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4042 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4043 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4044 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4045 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4046 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4047 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4048 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4049 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4050 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4051 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4052 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4053 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4054 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4055 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4056 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4057 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4058 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4059 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4060 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4061 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4062 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4063 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4064 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4065 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4066 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4067 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4068 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4069 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4070 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4071 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4072 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4073 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4074 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4075 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4076 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4077 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4078 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4079 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4080 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4081 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4082 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4083 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4084 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4085 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4086 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4087 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4088 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4089 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4090 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4091 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4092 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4093 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4094 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4095 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4096 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4097 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4098 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4099 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4100 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4101 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4102 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4103 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4104 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4105 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4107 /* Place holder, leave as first spe builtin. */
4108 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4109 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4110 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4111 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4112 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4113 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4114 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4115 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4116 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4117 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4118 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4119 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4120 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4121 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4122 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4123 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4124 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4125 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4126 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4127 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4128 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4129 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4130 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4131 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4132 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4133 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4134 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4135 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4136 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4137 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4138 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4139 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4140 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4141 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4142 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4143 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4144 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4145 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4146 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4147 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4148 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4149 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4150 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4151 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4152 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4153 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4154 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4155 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4156 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4157 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4158 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4159 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4160 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4161 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4162 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4163 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4164 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4165 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4166 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4167 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4168 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4169 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4170 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4171 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4172 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4173 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4174 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4175 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4176 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4177 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4178 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4179 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4180 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4181 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4182 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4183 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4184 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4185 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4186 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4187 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4188 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4189 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4190 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4191 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4192 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4193 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4194 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4195 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4196 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4197 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4198 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4199 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4200 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4201 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4202 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4203 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4204 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4205 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4206 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4207 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4208 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4209 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4210 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4211 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4212 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4213 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4214 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4215 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4216 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4218 /* SPE binary operations expecting a 5-bit unsigned literal. */
4219 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4221 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4222 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4223 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4224 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4225 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4226 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4227 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4228 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4229 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4230 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4231 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4232 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4233 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4234 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4235 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4236 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4237 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4238 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4239 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4240 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4241 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4242 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4243 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4244 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4245 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4246 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4248 /* Place-holder. Leave as last binary SPE builtin. */
4249 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4252 /* AltiVec predicates. */
4254 struct builtin_description_predicates
4256 const unsigned int mask;
4257 const enum insn_code icode;
4259 const char *const name;
4260 const enum rs6000_builtins code;
4263 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4265 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4266 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4267 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4268 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4269 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4270 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4271 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4272 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4273 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4274 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4275 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4276 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4277 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4280 /* SPE predicates. */
4281 static struct builtin_description bdesc_spe_predicates[] =
4283 /* Place-holder. Leave as first. */
4284 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4285 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4286 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4287 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4288 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4289 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4290 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4291 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4292 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4293 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4294 /* Place-holder. Leave as last. */
4295 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4298 /* SPE evsel predicates. */
4299 static struct builtin_description bdesc_spe_evsel[] =
4301 /* Place-holder. Leave as first. */
4302 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4303 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4304 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4305 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4306 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4307 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4308 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4309 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4310 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4311 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4312 /* Place-holder. Leave as last. */
4313 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4316 /* ABS* operations. */
4318 static const struct builtin_description bdesc_abs[] =
4320 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4321 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4322 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4323 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4324 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4325 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4326 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4329 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4332 static struct builtin_description bdesc_1arg[] =
4334 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4335 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4336 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4337 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4338 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4339 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4340 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4341 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4342 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4343 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4344 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4345 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4346 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4347 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4348 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4349 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4350 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4352 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4353 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4354 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4355 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4356 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4357 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4358 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4359 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4360 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4361 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4362 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4363 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4364 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4365 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4366 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4367 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4368 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4369 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4370 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4371 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4372 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4373 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4374 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4375 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4376 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4377 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4378 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4379 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4380 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4381 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4382 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4383 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4385 /* Place-holder. Leave as last unary SPE builtin. */
4386 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4390 rs6000_expand_unop_builtin (icode, arglist, target)
4391 enum insn_code icode;
4396 tree arg0 = TREE_VALUE (arglist);
4397 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4398 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4399 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4401 if (icode == CODE_FOR_nothing)
4402 /* Builtin not supported on this processor. */
4405 /* If we got invalid arguments bail out before generating bad rtl. */
4406 if (arg0 == error_mark_node)
4409 if (icode == CODE_FOR_altivec_vspltisb
4410 || icode == CODE_FOR_altivec_vspltish
4411 || icode == CODE_FOR_altivec_vspltisw
4412 || icode == CODE_FOR_spe_evsplatfi
4413 || icode == CODE_FOR_spe_evsplati)
4415 /* Only allow 5-bit *signed* literals. */
4416 if (GET_CODE (op0) != CONST_INT
4417 || INTVAL (op0) > 0x1f
4418 || INTVAL (op0) < -0x1f)
4420 error ("argument 1 must be a 5-bit signed literal");
4426 || GET_MODE (target) != tmode
4427 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4428 target = gen_reg_rtx (tmode);
4430 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4431 op0 = copy_to_mode_reg (mode0, op0);
4433 pat = GEN_FCN (icode) (target, op0);
4442 altivec_expand_abs_builtin (icode, arglist, target)
4443 enum insn_code icode;
4447 rtx pat, scratch1, scratch2;
4448 tree arg0 = TREE_VALUE (arglist);
4449 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4450 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4451 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4453 /* If we have invalid arguments, bail out before generating bad rtl. */
4454 if (arg0 == error_mark_node)
4458 || GET_MODE (target) != tmode
4459 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4460 target = gen_reg_rtx (tmode);
4462 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4463 op0 = copy_to_mode_reg (mode0, op0);
4465 scratch1 = gen_reg_rtx (mode0);
4466 scratch2 = gen_reg_rtx (mode0);
4468 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4477 rs6000_expand_binop_builtin (icode, arglist, target)
4478 enum insn_code icode;
4483 tree arg0 = TREE_VALUE (arglist);
4484 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4485 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4486 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4487 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4488 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4489 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4491 if (icode == CODE_FOR_nothing)
4492 /* Builtin not supported on this processor. */
4495 /* If we got invalid arguments bail out before generating bad rtl. */
4496 if (arg0 == error_mark_node || arg1 == error_mark_node)
4499 if (icode == CODE_FOR_altivec_vcfux
4500 || icode == CODE_FOR_altivec_vcfsx
4501 || icode == CODE_FOR_altivec_vctsxs
4502 || icode == CODE_FOR_altivec_vctuxs
4503 || icode == CODE_FOR_altivec_vspltb
4504 || icode == CODE_FOR_altivec_vsplth
4505 || icode == CODE_FOR_altivec_vspltw
4506 || icode == CODE_FOR_spe_evaddiw
4507 || icode == CODE_FOR_spe_evldd
4508 || icode == CODE_FOR_spe_evldh
4509 || icode == CODE_FOR_spe_evldw
4510 || icode == CODE_FOR_spe_evlhhesplat
4511 || icode == CODE_FOR_spe_evlhhossplat
4512 || icode == CODE_FOR_spe_evlhhousplat
4513 || icode == CODE_FOR_spe_evlwhe
4514 || icode == CODE_FOR_spe_evlwhos
4515 || icode == CODE_FOR_spe_evlwhou
4516 || icode == CODE_FOR_spe_evlwhsplat
4517 || icode == CODE_FOR_spe_evlwwsplat
4518 || icode == CODE_FOR_spe_evrlwi
4519 || icode == CODE_FOR_spe_evslwi
4520 || icode == CODE_FOR_spe_evsrwis
4521 || icode == CODE_FOR_spe_evsrwiu)
4523 /* Only allow 5-bit unsigned literals. */
4524 if (TREE_CODE (arg1) != INTEGER_CST
4525 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4527 error ("argument 2 must be a 5-bit unsigned literal");
4533 || GET_MODE (target) != tmode
4534 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4535 target = gen_reg_rtx (tmode);
4537 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4538 op0 = copy_to_mode_reg (mode0, op0);
4539 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4540 op1 = copy_to_mode_reg (mode1, op1);
4542 pat = GEN_FCN (icode) (target, op0, op1);
4551 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4552 enum insn_code icode;
4558 tree cr6_form = TREE_VALUE (arglist);
4559 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4560 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4561 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4562 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4563 enum machine_mode tmode = SImode;
4564 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4565 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4568 if (TREE_CODE (cr6_form) != INTEGER_CST)
4570 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4574 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4579 /* If we have invalid arguments, bail out before generating bad rtl. */
4580 if (arg0 == error_mark_node || arg1 == error_mark_node)
4584 || GET_MODE (target) != tmode
4585 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4586 target = gen_reg_rtx (tmode);
4588 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4589 op0 = copy_to_mode_reg (mode0, op0);
4590 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4591 op1 = copy_to_mode_reg (mode1, op1);
4593 scratch = gen_reg_rtx (mode0);
4595 pat = GEN_FCN (icode) (scratch, op0, op1,
4596 gen_rtx (SYMBOL_REF, Pmode, opcode));
4601 /* The vec_any* and vec_all* predicates use the same opcodes for two
4602 different operations, but the bits in CR6 will be different
4603 depending on what information we want. So we have to play tricks
4604 with CR6 to get the right bits out.
4606 If you think this is disgusting, look at the specs for the
4607 AltiVec predicates. */
4609 switch (cr6_form_int)
4612 emit_insn (gen_cr6_test_for_zero (target));
4615 emit_insn (gen_cr6_test_for_zero_reverse (target));
4618 emit_insn (gen_cr6_test_for_lt (target));
4621 emit_insn (gen_cr6_test_for_lt_reverse (target));
4624 error ("argument 1 of __builtin_altivec_predicate is out of range");
4632 altivec_expand_stv_builtin (icode, arglist)
4633 enum insn_code icode;
4636 tree arg0 = TREE_VALUE (arglist);
4637 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4638 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4639 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4640 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4641 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4643 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4644 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4645 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4647 /* Invalid arguments. Bail before doing anything stoopid! */
4648 if (arg0 == error_mark_node
4649 || arg1 == error_mark_node
4650 || arg2 == error_mark_node)
4653 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4654 op0 = copy_to_mode_reg (mode2, op0);
4655 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4656 op1 = copy_to_mode_reg (mode0, op1);
4657 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4658 op2 = copy_to_mode_reg (mode1, op2);
4660 pat = GEN_FCN (icode) (op1, op2, op0);
4667 rs6000_expand_ternop_builtin (icode, arglist, target)
4668 enum insn_code icode;
4673 tree arg0 = TREE_VALUE (arglist);
4674 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4675 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4676 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4677 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4678 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4679 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4680 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4681 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4682 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4684 if (icode == CODE_FOR_nothing)
4685 /* Builtin not supported on this processor. */
4688 /* If we got invalid arguments bail out before generating bad rtl. */
4689 if (arg0 == error_mark_node
4690 || arg1 == error_mark_node
4691 || arg2 == error_mark_node)
4694 if (icode == CODE_FOR_altivec_vsldoi_4sf
4695 || icode == CODE_FOR_altivec_vsldoi_4si
4696 || icode == CODE_FOR_altivec_vsldoi_8hi
4697 || icode == CODE_FOR_altivec_vsldoi_16qi)
4699 /* Only allow 4-bit unsigned literals. */
4700 if (TREE_CODE (arg2) != INTEGER_CST
4701 || TREE_INT_CST_LOW (arg2) & ~0xf)
4703 error ("argument 3 must be a 4-bit unsigned literal");
4709 || GET_MODE (target) != tmode
4710 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4711 target = gen_reg_rtx (tmode);
4713 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4714 op0 = copy_to_mode_reg (mode0, op0);
4715 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4716 op1 = copy_to_mode_reg (mode1, op1);
4717 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4718 op2 = copy_to_mode_reg (mode2, op2);
4720 pat = GEN_FCN (icode) (target, op0, op1, op2);
4728 /* Expand the lvx builtins. */
4730 altivec_expand_ld_builtin (exp, target, expandedp)
4735 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4736 tree arglist = TREE_OPERAND (exp, 1);
4737 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4739 enum machine_mode tmode, mode0;
4741 enum insn_code icode;
4745 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4746 icode = CODE_FOR_altivec_lvx_16qi;
4748 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4749 icode = CODE_FOR_altivec_lvx_8hi;
4751 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4752 icode = CODE_FOR_altivec_lvx_4si;
4754 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4755 icode = CODE_FOR_altivec_lvx_4sf;
4764 arg0 = TREE_VALUE (arglist);
4765 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4766 tmode = insn_data[icode].operand[0].mode;
4767 mode0 = insn_data[icode].operand[1].mode;
4770 || GET_MODE (target) != tmode
4771 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4772 target = gen_reg_rtx (tmode);
4774 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4775 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4777 pat = GEN_FCN (icode) (target, op0);
4784 /* Expand the stvx builtins. */
4786 altivec_expand_st_builtin (exp, target, expandedp)
4788 rtx target ATTRIBUTE_UNUSED;
4791 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4792 tree arglist = TREE_OPERAND (exp, 1);
4793 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4795 enum machine_mode mode0, mode1;
4797 enum insn_code icode;
4801 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4802 icode = CODE_FOR_altivec_stvx_16qi;
4804 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4805 icode = CODE_FOR_altivec_stvx_8hi;
4807 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4808 icode = CODE_FOR_altivec_stvx_4si;
4810 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4811 icode = CODE_FOR_altivec_stvx_4sf;
4818 arg0 = TREE_VALUE (arglist);
4819 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4820 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4821 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4822 mode0 = insn_data[icode].operand[0].mode;
4823 mode1 = insn_data[icode].operand[1].mode;
4825 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4826 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4827 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4828 op1 = copy_to_mode_reg (mode1, op1);
4830 pat = GEN_FCN (icode) (op0, op1);
4838 /* Expand the dst builtins. */
4840 altivec_expand_dst_builtin (exp, target, expandedp)
4842 rtx target ATTRIBUTE_UNUSED;
4845 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4846 tree arglist = TREE_OPERAND (exp, 1);
4847 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4848 tree arg0, arg1, arg2;
4849 enum machine_mode mode0, mode1, mode2;
4850 rtx pat, op0, op1, op2;
4851 struct builtin_description *d;
4856 /* Handle DST variants. */
4857 d = (struct builtin_description *) bdesc_dst;
4858 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4859 if (d->code == fcode)
4861 arg0 = TREE_VALUE (arglist);
4862 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4863 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4864 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4865 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4866 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4867 mode0 = insn_data[d->icode].operand[0].mode;
4868 mode1 = insn_data[d->icode].operand[1].mode;
4869 mode2 = insn_data[d->icode].operand[2].mode;
4871 /* Invalid arguments, bail out before generating bad rtl. */
4872 if (arg0 == error_mark_node
4873 || arg1 == error_mark_node
4874 || arg2 == error_mark_node)
4877 if (TREE_CODE (arg2) != INTEGER_CST
4878 || TREE_INT_CST_LOW (arg2) & ~0x3)
4880 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4884 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4885 op0 = copy_to_mode_reg (mode0, op0);
4886 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4887 op1 = copy_to_mode_reg (mode1, op1);
4889 pat = GEN_FCN (d->icode) (op0, op1, op2);
4900 /* Expand the builtin in EXP and store the result in TARGET. Store
4901 true in *EXPANDEDP if we found a builtin to expand. */
4903 altivec_expand_builtin (exp, target, expandedp)
4908 struct builtin_description *d;
4909 struct builtin_description_predicates *dp;
4911 enum insn_code icode;
4912 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4913 tree arglist = TREE_OPERAND (exp, 1);
4916 enum machine_mode tmode, mode0;
4917 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4919 target = altivec_expand_ld_builtin (exp, target, expandedp);
4923 target = altivec_expand_st_builtin (exp, target, expandedp);
4927 target = altivec_expand_dst_builtin (exp, target, expandedp);
4935 case ALTIVEC_BUILTIN_STVX:
4936 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4937 case ALTIVEC_BUILTIN_STVEBX:
4938 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4939 case ALTIVEC_BUILTIN_STVEHX:
4940 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4941 case ALTIVEC_BUILTIN_STVEWX:
4942 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4943 case ALTIVEC_BUILTIN_STVXL:
4944 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4946 case ALTIVEC_BUILTIN_MFVSCR:
4947 icode = CODE_FOR_altivec_mfvscr;
4948 tmode = insn_data[icode].operand[0].mode;
4951 || GET_MODE (target) != tmode
4952 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4953 target = gen_reg_rtx (tmode);
4955 pat = GEN_FCN (icode) (target);
4961 case ALTIVEC_BUILTIN_MTVSCR:
4962 icode = CODE_FOR_altivec_mtvscr;
4963 arg0 = TREE_VALUE (arglist);
4964 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4965 mode0 = insn_data[icode].operand[0].mode;
4967 /* If we got invalid arguments bail out before generating bad rtl. */
4968 if (arg0 == error_mark_node)
4971 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4972 op0 = copy_to_mode_reg (mode0, op0);
4974 pat = GEN_FCN (icode) (op0);
4979 case ALTIVEC_BUILTIN_DSSALL:
4980 emit_insn (gen_altivec_dssall ());
4983 case ALTIVEC_BUILTIN_DSS:
4984 icode = CODE_FOR_altivec_dss;
4985 arg0 = TREE_VALUE (arglist);
4986 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4987 mode0 = insn_data[icode].operand[0].mode;
4989 /* If we got invalid arguments bail out before generating bad rtl. */
4990 if (arg0 == error_mark_node)
4993 if (TREE_CODE (arg0) != INTEGER_CST
4994 || TREE_INT_CST_LOW (arg0) & ~0x3)
4996 error ("argument to dss must be a 2-bit unsigned literal");
5000 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5001 op0 = copy_to_mode_reg (mode0, op0);
5003 emit_insn (gen_altivec_dss (op0));
5007 /* Expand abs* operations. */
5008 d = (struct builtin_description *) bdesc_abs;
5009 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5010 if (d->code == fcode)
5011 return altivec_expand_abs_builtin (d->icode, arglist, target);
5013 /* Expand the AltiVec predicates. */
5014 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5015 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5016 if (dp->code == fcode)
5017 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5019 /* LV* are funky. We initialized them differently. */
5022 case ALTIVEC_BUILTIN_LVSL:
5023 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5025 case ALTIVEC_BUILTIN_LVSR:
5026 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5028 case ALTIVEC_BUILTIN_LVEBX:
5029 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5031 case ALTIVEC_BUILTIN_LVEHX:
5032 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5034 case ALTIVEC_BUILTIN_LVEWX:
5035 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5037 case ALTIVEC_BUILTIN_LVXL:
5038 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5040 case ALTIVEC_BUILTIN_LVX:
5041 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5052 /* Binops that need to be initialized manually, but can be expanded
5053 automagically by rs6000_expand_binop_builtin. */
5054 static struct builtin_description bdesc_2arg_spe[] =
5056 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5057 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5058 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5059 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5060 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5061 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5062 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5063 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5064 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5065 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5066 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5067 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5068 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5069 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5070 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5071 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5072 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5073 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5074 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5075 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5076 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5077 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5080 /* Expand the builtin in EXP and store the result in TARGET. Store
5081 true in *EXPANDEDP if we found a builtin to expand.
5083 This expands the SPE builtins that are not simple unary and binary
5086 spe_expand_builtin (exp, target, expandedp)
5091 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5092 tree arglist = TREE_OPERAND (exp, 1);
5094 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5095 enum insn_code icode;
5096 enum machine_mode tmode, mode0;
5098 struct builtin_description *d;
5103 /* Syntax check for a 5-bit unsigned immediate. */
5106 case SPE_BUILTIN_EVSTDD:
5107 case SPE_BUILTIN_EVSTDH:
5108 case SPE_BUILTIN_EVSTDW:
5109 case SPE_BUILTIN_EVSTWHE:
5110 case SPE_BUILTIN_EVSTWHO:
5111 case SPE_BUILTIN_EVSTWWE:
5112 case SPE_BUILTIN_EVSTWWO:
5113 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5114 if (TREE_CODE (arg1) != INTEGER_CST
5115 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5117 error ("argument 2 must be a 5-bit unsigned literal");
5125 d = (struct builtin_description *) bdesc_2arg_spe;
5126 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5127 if (d->code == fcode)
5128 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5130 d = (struct builtin_description *) bdesc_spe_predicates;
5131 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5132 if (d->code == fcode)
5133 return spe_expand_predicate_builtin (d->icode, arglist, target);
5135 d = (struct builtin_description *) bdesc_spe_evsel;
5136 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5137 if (d->code == fcode)
5138 return spe_expand_evsel_builtin (d->icode, arglist, target);
5142 case SPE_BUILTIN_EVSTDDX:
5143 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5144 case SPE_BUILTIN_EVSTDHX:
5145 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5146 case SPE_BUILTIN_EVSTDWX:
5147 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5148 case SPE_BUILTIN_EVSTWHEX:
5149 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5150 case SPE_BUILTIN_EVSTWHOX:
5151 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5152 case SPE_BUILTIN_EVSTWWEX:
5153 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5154 case SPE_BUILTIN_EVSTWWOX:
5155 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5156 case SPE_BUILTIN_EVSTDD:
5157 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5158 case SPE_BUILTIN_EVSTDH:
5159 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5160 case SPE_BUILTIN_EVSTDW:
5161 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5162 case SPE_BUILTIN_EVSTWHE:
5163 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5164 case SPE_BUILTIN_EVSTWHO:
5165 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5166 case SPE_BUILTIN_EVSTWWE:
5167 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5168 case SPE_BUILTIN_EVSTWWO:
5169 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5170 case SPE_BUILTIN_MFSPEFSCR:
5171 icode = CODE_FOR_spe_mfspefscr;
5172 tmode = insn_data[icode].operand[0].mode;
5175 || GET_MODE (target) != tmode
5176 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5177 target = gen_reg_rtx (tmode);
5179 pat = GEN_FCN (icode) (target);
5184 case SPE_BUILTIN_MTSPEFSCR:
5185 icode = CODE_FOR_spe_mtspefscr;
5186 arg0 = TREE_VALUE (arglist);
5187 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5188 mode0 = insn_data[icode].operand[0].mode;
5190 if (arg0 == error_mark_node)
5193 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5194 op0 = copy_to_mode_reg (mode0, op0);
5196 pat = GEN_FCN (icode) (op0);
5209 spe_expand_predicate_builtin (icode, arglist, target)
5210 enum insn_code icode;
5214 rtx pat, scratch, tmp;
5215 tree form = TREE_VALUE (arglist);
5216 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5217 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5218 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5219 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5220 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5221 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5225 if (TREE_CODE (form) != INTEGER_CST)
5227 error ("argument 1 of __builtin_spe_predicate must be a constant");
5231 form_int = TREE_INT_CST_LOW (form);
5236 if (arg0 == error_mark_node || arg1 == error_mark_node)
5240 || GET_MODE (target) != SImode
5241 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5242 target = gen_reg_rtx (SImode);
5244 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5245 op0 = copy_to_mode_reg (mode0, op0);
5246 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5247 op1 = copy_to_mode_reg (mode1, op1);
5249 scratch = gen_reg_rtx (CCmode);
5251 pat = GEN_FCN (icode) (scratch, op0, op1);
5256 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5257 _lower_. We use one compare, but look in different bits of the
5258 CR for each variant.
5260 There are 2 elements in each SPE simd type (upper/lower). The CR
5261 bits are set as follows:
5263 BIT0 | BIT 1 | BIT 2 | BIT 3
5264 U | L | (U | L) | (U & L)
5266 So, for an "all" relationship, BIT 3 would be set.
5267 For an "any" relationship, BIT 2 would be set. Etc.
5269 Following traditional nomenclature, these bits map to:
5271 BIT0 | BIT 1 | BIT 2 | BIT 3
5274 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5279 /* All variant. OV bit. */
5281 /* We need to get to the OV bit, which is the ORDERED bit. We
5282 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5283 that's ugly and will trigger a validate_condition_mode abort.
5284 So let's just use another pattern. */
5285 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5287 /* Any variant. EQ bit. */
5291 /* Upper variant. LT bit. */
5295 /* Lower variant. GT bit. */
5300 error ("argument 1 of __builtin_spe_predicate is out of range");
5304 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5305 emit_move_insn (target, tmp);
5310 /* The evsel builtins look like this:
5312 e = __builtin_spe_evsel_OP (a, b, c, d);
5316 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5317 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5321 spe_expand_evsel_builtin (icode, arglist, target)
5322 enum insn_code icode;
5327 tree arg0 = TREE_VALUE (arglist);
5328 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5329 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5330 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5331 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5332 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5333 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5334 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5335 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5336 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5341 if (arg0 == error_mark_node || arg1 == error_mark_node
5342 || arg2 == error_mark_node || arg3 == error_mark_node)
5346 || GET_MODE (target) != mode0
5347 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5348 target = gen_reg_rtx (mode0);
5350 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5351 op0 = copy_to_mode_reg (mode0, op0);
5352 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5353 op1 = copy_to_mode_reg (mode0, op1);
5354 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5355 op2 = copy_to_mode_reg (mode0, op2);
5356 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5357 op3 = copy_to_mode_reg (mode0, op3);
5359 /* Generate the compare. */
5360 scratch = gen_reg_rtx (CCmode);
5361 pat = GEN_FCN (icode) (scratch, op0, op1);
5366 if (mode0 == V2SImode)
5367 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5369 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5374 /* Expand an expression EXP that calls a built-in function,
5375 with result going to TARGET if that's convenient
5376 (and in mode MODE if that's convenient).
5377 SUBTARGET may be used as the target for computing one of EXP's operands.
5378 IGNORE is nonzero if the value is to be ignored. */
5381 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5384 rtx subtarget ATTRIBUTE_UNUSED;
5385 enum machine_mode mode ATTRIBUTE_UNUSED;
5386 int ignore ATTRIBUTE_UNUSED;
5388 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5389 tree arglist = TREE_OPERAND (exp, 1);
5390 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5391 struct builtin_description *d;
5398 ret = altivec_expand_builtin (exp, target, &success);
5405 ret = spe_expand_builtin (exp, target, &success);
5411 if (TARGET_ALTIVEC || TARGET_SPE)
5413 /* Handle simple unary operations. */
5414 d = (struct builtin_description *) bdesc_1arg;
5415 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5416 if (d->code == fcode)
5417 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5419 /* Handle simple binary operations. */
5420 d = (struct builtin_description *) bdesc_2arg;
5421 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5422 if (d->code == fcode)
5423 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5425 /* Handle simple ternary operations. */
5426 d = (struct builtin_description *) bdesc_3arg;
5427 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5428 if (d->code == fcode)
5429 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5437 rs6000_init_builtins ()
5439 opaque_V2SI_type_node = copy_node (V2SI_type_node);
5440 opaque_V2SF_type_node = copy_node (V2SF_type_node);
5443 spe_init_builtins ();
5445 altivec_init_builtins ();
5446 if (TARGET_ALTIVEC || TARGET_SPE)
5447 rs6000_common_init_builtins ();
5450 /* Search through a set of builtins and enable the mask bits.
5451 DESC is an array of builtins.
5452 SIZE is the total number of builtins.
5453 START is the builtin enum at which to start.
5454 END is the builtin enum at which to end. */
5456 enable_mask_for_builtins (desc, size, start, end)
5457 struct builtin_description *desc;
5459 enum rs6000_builtins start, end;
5463 for (i = 0; i < size; ++i)
5464 if (desc[i].code == start)
5470 for (; i < size; ++i)
5472 /* Flip all the bits on. */
5473 desc[i].mask = target_flags;
5474 if (desc[i].code == end)
5480 spe_init_builtins ()
5482 tree endlink = void_list_node;
5483 tree puint_type_node = build_pointer_type (unsigned_type_node);
5484 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5485 tree pv2si_type_node = build_pointer_type (opaque_V2SI_type_node);
5486 struct builtin_description *d;
5489 tree v2si_ftype_4_v2si
5490 = build_function_type
5491 (opaque_V2SI_type_node,
5492 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5493 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5494 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5495 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5498 tree v2sf_ftype_4_v2sf
5499 = build_function_type
5500 (opaque_V2SF_type_node,
5501 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5502 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5503 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5504 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5507 tree int_ftype_int_v2si_v2si
5508 = build_function_type
5510 tree_cons (NULL_TREE, integer_type_node,
5511 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5512 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5515 tree int_ftype_int_v2sf_v2sf
5516 = build_function_type
5518 tree_cons (NULL_TREE, integer_type_node,
5519 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5520 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5523 tree void_ftype_v2si_puint_int
5524 = build_function_type (void_type_node,
5525 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5526 tree_cons (NULL_TREE, puint_type_node,
5527 tree_cons (NULL_TREE,
5531 tree void_ftype_v2si_puint_char
5532 = build_function_type (void_type_node,
5533 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5534 tree_cons (NULL_TREE, puint_type_node,
5535 tree_cons (NULL_TREE,
5539 tree void_ftype_v2si_pv2si_int
5540 = build_function_type (void_type_node,
5541 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5542 tree_cons (NULL_TREE, pv2si_type_node,
5543 tree_cons (NULL_TREE,
5547 tree void_ftype_v2si_pv2si_char
5548 = build_function_type (void_type_node,
5549 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5550 tree_cons (NULL_TREE, pv2si_type_node,
5551 tree_cons (NULL_TREE,
5556 = build_function_type (void_type_node,
5557 tree_cons (NULL_TREE, integer_type_node, endlink));
5560 = build_function_type (integer_type_node,
5561 tree_cons (NULL_TREE, void_type_node, endlink));
5563 tree v2si_ftype_pv2si_int
5564 = build_function_type (opaque_V2SI_type_node,
5565 tree_cons (NULL_TREE, pv2si_type_node,
5566 tree_cons (NULL_TREE, integer_type_node,
5569 tree v2si_ftype_puint_int
5570 = build_function_type (opaque_V2SI_type_node,
5571 tree_cons (NULL_TREE, puint_type_node,
5572 tree_cons (NULL_TREE, integer_type_node,
5575 tree v2si_ftype_pushort_int
5576 = build_function_type (opaque_V2SI_type_node,
5577 tree_cons (NULL_TREE, pushort_type_node,
5578 tree_cons (NULL_TREE, integer_type_node,
5581 /* The initialization of the simple binary and unary builtins is
5582 done in rs6000_common_init_builtins, but we have to enable the
5583 mask bits here manually because we have run out of `target_flags'
5584 bits. We really need to redesign this mask business. */
5586 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5587 ARRAY_SIZE (bdesc_2arg),
5590 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5591 ARRAY_SIZE (bdesc_1arg),
5593 SPE_BUILTIN_EVSUBFUSIAAW);
5594 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5595 ARRAY_SIZE (bdesc_spe_predicates),
5596 SPE_BUILTIN_EVCMPEQ,
5597 SPE_BUILTIN_EVFSTSTLT);
5598 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5599 ARRAY_SIZE (bdesc_spe_evsel),
5600 SPE_BUILTIN_EVSEL_CMPGTS,
5601 SPE_BUILTIN_EVSEL_FSTSTEQ);
5603 /* Initialize irregular SPE builtins. */
5605 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5606 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5607 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5608 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5609 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5610 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5611 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5612 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5613 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5614 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5615 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5616 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5617 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5618 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5619 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5620 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5623 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5624 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5625 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5626 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5627 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5628 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5629 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5630 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5631 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5632 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5633 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5634 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5635 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5636 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5637 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5638 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5639 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5640 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5641 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5642 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5643 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5644 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5647 d = (struct builtin_description *) bdesc_spe_predicates;
5648 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5652 switch (insn_data[d->icode].operand[1].mode)
5655 type = int_ftype_int_v2si_v2si;
5658 type = int_ftype_int_v2sf_v2sf;
5664 def_builtin (d->mask, d->name, type, d->code);
5667 /* Evsel predicates. */
5668 d = (struct builtin_description *) bdesc_spe_evsel;
5669 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5673 switch (insn_data[d->icode].operand[1].mode)
5676 type = v2si_ftype_4_v2si;
5679 type = v2sf_ftype_4_v2sf;
5685 def_builtin (d->mask, d->name, type, d->code);
5690 altivec_init_builtins ()
5692 struct builtin_description *d;
5693 struct builtin_description_predicates *dp;
5695 tree pfloat_type_node = build_pointer_type (float_type_node);
5696 tree pint_type_node = build_pointer_type (integer_type_node);
5697 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5698 tree pchar_type_node = build_pointer_type (char_type_node);
5700 tree pvoid_type_node = build_pointer_type (void_type_node);
5702 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5703 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5704 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5705 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5707 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5709 tree int_ftype_int_v4si_v4si
5710 = build_function_type_list (integer_type_node,
5711 integer_type_node, V4SI_type_node,
5712 V4SI_type_node, NULL_TREE);
5713 tree v4sf_ftype_pcfloat
5714 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5715 tree void_ftype_pfloat_v4sf
5716 = build_function_type_list (void_type_node,
5717 pfloat_type_node, V4SF_type_node, NULL_TREE);
5718 tree v4si_ftype_pcint
5719 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5720 tree void_ftype_pint_v4si
5721 = build_function_type_list (void_type_node,
5722 pint_type_node, V4SI_type_node, NULL_TREE);
5723 tree v8hi_ftype_pcshort
5724 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5725 tree void_ftype_pshort_v8hi
5726 = build_function_type_list (void_type_node,
5727 pshort_type_node, V8HI_type_node, NULL_TREE);
5728 tree v16qi_ftype_pcchar
5729 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5730 tree void_ftype_pchar_v16qi
5731 = build_function_type_list (void_type_node,
5732 pchar_type_node, V16QI_type_node, NULL_TREE);
5733 tree void_ftype_v4si
5734 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5735 tree v8hi_ftype_void
5736 = build_function_type (V8HI_type_node, void_list_node);
5737 tree void_ftype_void
5738 = build_function_type (void_type_node, void_list_node);
5740 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5742 tree v16qi_ftype_int_pcvoid
5743 = build_function_type_list (V16QI_type_node,
5744 integer_type_node, pcvoid_type_node, NULL_TREE);
5745 tree v8hi_ftype_int_pcvoid
5746 = build_function_type_list (V8HI_type_node,
5747 integer_type_node, pcvoid_type_node, NULL_TREE);
5748 tree v4si_ftype_int_pcvoid
5749 = build_function_type_list (V4SI_type_node,
5750 integer_type_node, pcvoid_type_node, NULL_TREE);
5752 tree void_ftype_v4si_int_pvoid
5753 = build_function_type_list (void_type_node,
5754 V4SI_type_node, integer_type_node,
5755 pvoid_type_node, NULL_TREE);
5756 tree void_ftype_v16qi_int_pvoid
5757 = build_function_type_list (void_type_node,
5758 V16QI_type_node, integer_type_node,
5759 pvoid_type_node, NULL_TREE);
5760 tree void_ftype_v8hi_int_pvoid
5761 = build_function_type_list (void_type_node,
5762 V8HI_type_node, integer_type_node,
5763 pvoid_type_node, NULL_TREE);
5764 tree int_ftype_int_v8hi_v8hi
5765 = build_function_type_list (integer_type_node,
5766 integer_type_node, V8HI_type_node,
5767 V8HI_type_node, NULL_TREE);
5768 tree int_ftype_int_v16qi_v16qi
5769 = build_function_type_list (integer_type_node,
5770 integer_type_node, V16QI_type_node,
5771 V16QI_type_node, NULL_TREE);
5772 tree int_ftype_int_v4sf_v4sf
5773 = build_function_type_list (integer_type_node,
5774 integer_type_node, V4SF_type_node,
5775 V4SF_type_node, NULL_TREE);
5776 tree v4si_ftype_v4si
5777 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5778 tree v8hi_ftype_v8hi
5779 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5780 tree v16qi_ftype_v16qi
5781 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5782 tree v4sf_ftype_v4sf
5783 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5784 tree void_ftype_pcvoid_int_char
5785 = build_function_type_list (void_type_node,
5786 pcvoid_type_node, integer_type_node,
5787 char_type_node, NULL_TREE);
5789 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5790 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5791 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5792 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5793 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5794 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5795 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5796 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5797 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5798 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5799 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5800 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5801 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5802 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5803 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5804 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5805 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5806 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5807 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5808 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5809 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5810 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5811 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5812 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5813 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5814 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5815 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5816 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5817 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5818 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5819 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5820 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5822 /* Add the DST variants. */
5823 d = (struct builtin_description *) bdesc_dst;
5824 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5825 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5827 /* Initialize the predicates. */
5828 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5829 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5831 enum machine_mode mode1;
5834 mode1 = insn_data[dp->icode].operand[1].mode;
5839 type = int_ftype_int_v4si_v4si;
5842 type = int_ftype_int_v8hi_v8hi;
5845 type = int_ftype_int_v16qi_v16qi;
5848 type = int_ftype_int_v4sf_v4sf;
5854 def_builtin (dp->mask, dp->name, type, dp->code);
5857 /* Initialize the abs* operators. */
5858 d = (struct builtin_description *) bdesc_abs;
5859 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5861 enum machine_mode mode0;
5864 mode0 = insn_data[d->icode].operand[0].mode;
5869 type = v4si_ftype_v4si;
5872 type = v8hi_ftype_v8hi;
5875 type = v16qi_ftype_v16qi;
5878 type = v4sf_ftype_v4sf;
5884 def_builtin (d->mask, d->name, type, d->code);
5889 rs6000_common_init_builtins ()
5891 struct builtin_description *d;
5894 tree v4sf_ftype_v4sf_v4sf_v16qi
5895 = build_function_type_list (V4SF_type_node,
5896 V4SF_type_node, V4SF_type_node,
5897 V16QI_type_node, NULL_TREE);
5898 tree v4si_ftype_v4si_v4si_v16qi
5899 = build_function_type_list (V4SI_type_node,
5900 V4SI_type_node, V4SI_type_node,
5901 V16QI_type_node, NULL_TREE);
5902 tree v8hi_ftype_v8hi_v8hi_v16qi
5903 = build_function_type_list (V8HI_type_node,
5904 V8HI_type_node, V8HI_type_node,
5905 V16QI_type_node, NULL_TREE);
5906 tree v16qi_ftype_v16qi_v16qi_v16qi
5907 = build_function_type_list (V16QI_type_node,
5908 V16QI_type_node, V16QI_type_node,
5909 V16QI_type_node, NULL_TREE);
5910 tree v4si_ftype_char
5911 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5912 tree v8hi_ftype_char
5913 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5914 tree v16qi_ftype_char
5915 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5916 tree v8hi_ftype_v16qi
5917 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5918 tree v4sf_ftype_v4sf
5919 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5921 tree v2si_ftype_v2si_v2si
5922 = build_function_type_list (opaque_V2SI_type_node,
5923 opaque_V2SI_type_node,
5924 opaque_V2SI_type_node, NULL_TREE);
5926 tree v2sf_ftype_v2sf_v2sf
5927 = build_function_type_list (opaque_V2SF_type_node,
5928 opaque_V2SF_type_node,
5929 opaque_V2SF_type_node, NULL_TREE);
5931 tree v2si_ftype_int_int
5932 = build_function_type_list (opaque_V2SI_type_node,
5933 integer_type_node, integer_type_node,
5936 tree v2si_ftype_v2si
5937 = build_function_type_list (opaque_V2SI_type_node,
5938 opaque_V2SI_type_node, NULL_TREE);
5940 tree v2sf_ftype_v2sf
5941 = build_function_type_list (opaque_V2SF_type_node,
5942 opaque_V2SF_type_node, NULL_TREE);
5944 tree v2sf_ftype_v2si
5945 = build_function_type_list (opaque_V2SF_type_node,
5946 opaque_V2SI_type_node, NULL_TREE);
5948 tree v2si_ftype_v2sf
5949 = build_function_type_list (opaque_V2SI_type_node,
5950 opaque_V2SF_type_node, NULL_TREE);
5952 tree v2si_ftype_v2si_char
5953 = build_function_type_list (opaque_V2SI_type_node,
5954 opaque_V2SI_type_node,
5955 char_type_node, NULL_TREE);
5957 tree v2si_ftype_int_char
5958 = build_function_type_list (opaque_V2SI_type_node,
5959 integer_type_node, char_type_node, NULL_TREE);
5961 tree v2si_ftype_char
5962 = build_function_type_list (opaque_V2SI_type_node,
5963 char_type_node, NULL_TREE);
5965 tree int_ftype_int_int
5966 = build_function_type_list (integer_type_node,
5967 integer_type_node, integer_type_node,
5970 tree v4si_ftype_v4si_v4si
5971 = build_function_type_list (V4SI_type_node,
5972 V4SI_type_node, V4SI_type_node, NULL_TREE);
5973 tree v4sf_ftype_v4si_char
5974 = build_function_type_list (V4SF_type_node,
5975 V4SI_type_node, char_type_node, NULL_TREE);
5976 tree v4si_ftype_v4sf_char
5977 = build_function_type_list (V4SI_type_node,
5978 V4SF_type_node, char_type_node, NULL_TREE);
5979 tree v4si_ftype_v4si_char
5980 = build_function_type_list (V4SI_type_node,
5981 V4SI_type_node, char_type_node, NULL_TREE);
5982 tree v8hi_ftype_v8hi_char
5983 = build_function_type_list (V8HI_type_node,
5984 V8HI_type_node, char_type_node, NULL_TREE);
5985 tree v16qi_ftype_v16qi_char
5986 = build_function_type_list (V16QI_type_node,
5987 V16QI_type_node, char_type_node, NULL_TREE);
5988 tree v16qi_ftype_v16qi_v16qi_char
5989 = build_function_type_list (V16QI_type_node,
5990 V16QI_type_node, V16QI_type_node,
5991 char_type_node, NULL_TREE);
5992 tree v8hi_ftype_v8hi_v8hi_char
5993 = build_function_type_list (V8HI_type_node,
5994 V8HI_type_node, V8HI_type_node,
5995 char_type_node, NULL_TREE);
5996 tree v4si_ftype_v4si_v4si_char
5997 = build_function_type_list (V4SI_type_node,
5998 V4SI_type_node, V4SI_type_node,
5999 char_type_node, NULL_TREE);
6000 tree v4sf_ftype_v4sf_v4sf_char
6001 = build_function_type_list (V4SF_type_node,
6002 V4SF_type_node, V4SF_type_node,
6003 char_type_node, NULL_TREE);
6004 tree v4sf_ftype_v4sf_v4sf
6005 = build_function_type_list (V4SF_type_node,
6006 V4SF_type_node, V4SF_type_node, NULL_TREE);
6007 tree v4sf_ftype_v4sf_v4sf_v4si
6008 = build_function_type_list (V4SF_type_node,
6009 V4SF_type_node, V4SF_type_node,
6010 V4SI_type_node, NULL_TREE);
6011 tree v4sf_ftype_v4sf_v4sf_v4sf
6012 = build_function_type_list (V4SF_type_node,
6013 V4SF_type_node, V4SF_type_node,
6014 V4SF_type_node, NULL_TREE);
6015 tree v4si_ftype_v4si_v4si_v4si
6016 = build_function_type_list (V4SI_type_node,
6017 V4SI_type_node, V4SI_type_node,
6018 V4SI_type_node, NULL_TREE);
6019 tree v8hi_ftype_v8hi_v8hi
6020 = build_function_type_list (V8HI_type_node,
6021 V8HI_type_node, V8HI_type_node, NULL_TREE);
6022 tree v8hi_ftype_v8hi_v8hi_v8hi
6023 = build_function_type_list (V8HI_type_node,
6024 V8HI_type_node, V8HI_type_node,
6025 V8HI_type_node, NULL_TREE);
6026 tree v4si_ftype_v8hi_v8hi_v4si
6027 = build_function_type_list (V4SI_type_node,
6028 V8HI_type_node, V8HI_type_node,
6029 V4SI_type_node, NULL_TREE);
6030 tree v4si_ftype_v16qi_v16qi_v4si
6031 = build_function_type_list (V4SI_type_node,
6032 V16QI_type_node, V16QI_type_node,
6033 V4SI_type_node, NULL_TREE);
6034 tree v16qi_ftype_v16qi_v16qi
6035 = build_function_type_list (V16QI_type_node,
6036 V16QI_type_node, V16QI_type_node, NULL_TREE);
6037 tree v4si_ftype_v4sf_v4sf
6038 = build_function_type_list (V4SI_type_node,
6039 V4SF_type_node, V4SF_type_node, NULL_TREE);
6040 tree v8hi_ftype_v16qi_v16qi
6041 = build_function_type_list (V8HI_type_node,
6042 V16QI_type_node, V16QI_type_node, NULL_TREE);
6043 tree v4si_ftype_v8hi_v8hi
6044 = build_function_type_list (V4SI_type_node,
6045 V8HI_type_node, V8HI_type_node, NULL_TREE);
6046 tree v8hi_ftype_v4si_v4si
6047 = build_function_type_list (V8HI_type_node,
6048 V4SI_type_node, V4SI_type_node, NULL_TREE);
6049 tree v16qi_ftype_v8hi_v8hi
6050 = build_function_type_list (V16QI_type_node,
6051 V8HI_type_node, V8HI_type_node, NULL_TREE);
6052 tree v4si_ftype_v16qi_v4si
6053 = build_function_type_list (V4SI_type_node,
6054 V16QI_type_node, V4SI_type_node, NULL_TREE);
6055 tree v4si_ftype_v16qi_v16qi
6056 = build_function_type_list (V4SI_type_node,
6057 V16QI_type_node, V16QI_type_node, NULL_TREE);
6058 tree v4si_ftype_v8hi_v4si
6059 = build_function_type_list (V4SI_type_node,
6060 V8HI_type_node, V4SI_type_node, NULL_TREE);
6061 tree v4si_ftype_v8hi
6062 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6063 tree int_ftype_v4si_v4si
6064 = build_function_type_list (integer_type_node,
6065 V4SI_type_node, V4SI_type_node, NULL_TREE);
6066 tree int_ftype_v4sf_v4sf
6067 = build_function_type_list (integer_type_node,
6068 V4SF_type_node, V4SF_type_node, NULL_TREE);
6069 tree int_ftype_v16qi_v16qi
6070 = build_function_type_list (integer_type_node,
6071 V16QI_type_node, V16QI_type_node, NULL_TREE);
6072 tree int_ftype_v8hi_v8hi
6073 = build_function_type_list (integer_type_node,
6074 V8HI_type_node, V8HI_type_node, NULL_TREE);
6076 /* Add the simple ternary operators. */
6077 d = (struct builtin_description *) bdesc_3arg;
6078 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6081 enum machine_mode mode0, mode1, mode2, mode3;
6084 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6087 mode0 = insn_data[d->icode].operand[0].mode;
6088 mode1 = insn_data[d->icode].operand[1].mode;
6089 mode2 = insn_data[d->icode].operand[2].mode;
6090 mode3 = insn_data[d->icode].operand[3].mode;
6092 /* When all four are of the same mode. */
6093 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6098 type = v4si_ftype_v4si_v4si_v4si;
6101 type = v4sf_ftype_v4sf_v4sf_v4sf;
6104 type = v8hi_ftype_v8hi_v8hi_v8hi;
6107 type = v16qi_ftype_v16qi_v16qi_v16qi;
6113 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6118 type = v4si_ftype_v4si_v4si_v16qi;
6121 type = v4sf_ftype_v4sf_v4sf_v16qi;
6124 type = v8hi_ftype_v8hi_v8hi_v16qi;
6127 type = v16qi_ftype_v16qi_v16qi_v16qi;
6133 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6134 && mode3 == V4SImode)
6135 type = v4si_ftype_v16qi_v16qi_v4si;
6136 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6137 && mode3 == V4SImode)
6138 type = v4si_ftype_v8hi_v8hi_v4si;
6139 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6140 && mode3 == V4SImode)
6141 type = v4sf_ftype_v4sf_v4sf_v4si;
6143 /* vchar, vchar, vchar, 4 bit literal. */
6144 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6146 type = v16qi_ftype_v16qi_v16qi_char;
6148 /* vshort, vshort, vshort, 4 bit literal. */
6149 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6151 type = v8hi_ftype_v8hi_v8hi_char;
6153 /* vint, vint, vint, 4 bit literal. */
6154 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6156 type = v4si_ftype_v4si_v4si_char;
6158 /* vfloat, vfloat, vfloat, 4 bit literal. */
6159 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6161 type = v4sf_ftype_v4sf_v4sf_char;
6166 def_builtin (d->mask, d->name, type, d->code);
6169 /* Add the simple binary operators. */
6170 d = (struct builtin_description *) bdesc_2arg;
6171 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6173 enum machine_mode mode0, mode1, mode2;
6176 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6179 mode0 = insn_data[d->icode].operand[0].mode;
6180 mode1 = insn_data[d->icode].operand[1].mode;
6181 mode2 = insn_data[d->icode].operand[2].mode;
6183 /* When all three operands are of the same mode. */
6184 if (mode0 == mode1 && mode1 == mode2)
6189 type = v4sf_ftype_v4sf_v4sf;
6192 type = v4si_ftype_v4si_v4si;
6195 type = v16qi_ftype_v16qi_v16qi;
6198 type = v8hi_ftype_v8hi_v8hi;
6201 type = v2si_ftype_v2si_v2si;
6204 type = v2sf_ftype_v2sf_v2sf;
6207 type = int_ftype_int_int;
6214 /* A few other combos we really don't want to do manually. */
6216 /* vint, vfloat, vfloat. */
6217 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6218 type = v4si_ftype_v4sf_v4sf;
6220 /* vshort, vchar, vchar. */
6221 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6222 type = v8hi_ftype_v16qi_v16qi;
6224 /* vint, vshort, vshort. */
6225 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6226 type = v4si_ftype_v8hi_v8hi;
6228 /* vshort, vint, vint. */
6229 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6230 type = v8hi_ftype_v4si_v4si;
6232 /* vchar, vshort, vshort. */
6233 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6234 type = v16qi_ftype_v8hi_v8hi;
6236 /* vint, vchar, vint. */
6237 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6238 type = v4si_ftype_v16qi_v4si;
6240 /* vint, vchar, vchar. */
6241 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6242 type = v4si_ftype_v16qi_v16qi;
6244 /* vint, vshort, vint. */
6245 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6246 type = v4si_ftype_v8hi_v4si;
6248 /* vint, vint, 5 bit literal. */
6249 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6250 type = v4si_ftype_v4si_char;
6252 /* vshort, vshort, 5 bit literal. */
6253 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6254 type = v8hi_ftype_v8hi_char;
6256 /* vchar, vchar, 5 bit literal. */
6257 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6258 type = v16qi_ftype_v16qi_char;
6260 /* vfloat, vint, 5 bit literal. */
6261 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6262 type = v4sf_ftype_v4si_char;
6264 /* vint, vfloat, 5 bit literal. */
6265 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6266 type = v4si_ftype_v4sf_char;
6268 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6269 type = v2si_ftype_int_int;
6271 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6272 type = v2si_ftype_v2si_char;
6274 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6275 type = v2si_ftype_int_char;
6278 else if (mode0 == SImode)
6283 type = int_ftype_v4si_v4si;
6286 type = int_ftype_v4sf_v4sf;
6289 type = int_ftype_v16qi_v16qi;
6292 type = int_ftype_v8hi_v8hi;
6302 def_builtin (d->mask, d->name, type, d->code);
6305 /* Add the simple unary operators. */
6306 d = (struct builtin_description *) bdesc_1arg;
6307 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6309 enum machine_mode mode0, mode1;
6312 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6315 mode0 = insn_data[d->icode].operand[0].mode;
6316 mode1 = insn_data[d->icode].operand[1].mode;
6318 if (mode0 == V4SImode && mode1 == QImode)
6319 type = v4si_ftype_char;
6320 else if (mode0 == V8HImode && mode1 == QImode)
6321 type = v8hi_ftype_char;
6322 else if (mode0 == V16QImode && mode1 == QImode)
6323 type = v16qi_ftype_char;
6324 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6325 type = v4sf_ftype_v4sf;
6326 else if (mode0 == V8HImode && mode1 == V16QImode)
6327 type = v8hi_ftype_v16qi;
6328 else if (mode0 == V4SImode && mode1 == V8HImode)
6329 type = v4si_ftype_v8hi;
6330 else if (mode0 == V2SImode && mode1 == V2SImode)
6331 type = v2si_ftype_v2si;
6332 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6333 type = v2sf_ftype_v2sf;
6334 else if (mode0 == V2SFmode && mode1 == V2SImode)
6335 type = v2sf_ftype_v2si;
6336 else if (mode0 == V2SImode && mode1 == V2SFmode)
6337 type = v2si_ftype_v2sf;
6338 else if (mode0 == V2SImode && mode1 == QImode)
6339 type = v2si_ftype_char;
6343 def_builtin (d->mask, d->name, type, d->code);
6348 /* Expand a block move operation, and return 1 if successful. Return 0
6349 if we should let the compiler generate normal code.
6351 operands[0] is the destination
6352 operands[1] is the source
6353 operands[2] is the length
6354 operands[3] is the alignment */
6356 #define MAX_MOVE_REG 4
6359 expand_block_move (operands)
6362 rtx orig_dest = operands[0];
6363 rtx orig_src = operands[1];
6364 rtx bytes_rtx = operands[2];
6365 rtx align_rtx = operands[3];
6366 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6372 /* If this is not a fixed size move, just call memcpy */
6376 /* If this is not a fixed size alignment, abort */
6377 if (GET_CODE (align_rtx) != CONST_INT)
6379 align = INTVAL (align_rtx);
6381 /* Anything to move? */
6382 bytes = INTVAL (bytes_rtx);
6386 /* store_one_arg depends on expand_block_move to handle at least the size of
6387 reg_parm_stack_space. */
6388 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6391 if (TARGET_STRING) /* string instructions are available */
6393 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6396 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6397 rtx (*mov) PARAMS ((rtx, rtx));
6399 enum machine_mode mode = BLKmode;
6402 if (bytes > 24 /* move up to 32 bytes at a time */
6410 && ! fixed_regs[12])
6412 move_bytes = (bytes > 32) ? 32 : bytes;
6413 gen_func.movstrsi = gen_movstrsi_8reg;
6415 else if (bytes > 16 /* move up to 24 bytes at a time */
6421 && ! fixed_regs[10])
6423 move_bytes = (bytes > 24) ? 24 : bytes;
6424 gen_func.movstrsi = gen_movstrsi_6reg;
6426 else if (bytes > 8 /* move up to 16 bytes at a time */
6432 move_bytes = (bytes > 16) ? 16 : bytes;
6433 gen_func.movstrsi = gen_movstrsi_4reg;
6435 else if (bytes >= 8 && TARGET_POWERPC64
6436 /* 64-bit loads and stores require word-aligned
6438 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6442 gen_func.mov = gen_movdi;
6444 else if (bytes > 4 && !TARGET_POWERPC64)
6445 { /* move up to 8 bytes at a time */
6446 move_bytes = (bytes > 8) ? 8 : bytes;
6447 gen_func.movstrsi = gen_movstrsi_2reg;
6449 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6450 { /* move 4 bytes */
6453 gen_func.mov = gen_movsi;
6455 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6456 { /* move 2 bytes */
6459 gen_func.mov = gen_movhi;
6461 else if (bytes == 1) /* move 1 byte */
6465 gen_func.mov = gen_movqi;
6468 { /* move up to 4 bytes at a time */
6469 move_bytes = (bytes > 4) ? 4 : bytes;
6470 gen_func.movstrsi = gen_movstrsi_1reg;
6473 src = adjust_address (orig_src, mode, offset);
6474 dest = adjust_address (orig_dest, mode, offset);
6476 if (mode == BLKmode)
6478 /* Move the address into scratch registers. The movstrsi
6479 patterns require zero offset. */
6480 if (!REG_P (XEXP (src, 0)))
6482 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6483 src = replace_equiv_address (src, src_reg);
6485 set_mem_size (src, GEN_INT (move_bytes));
6487 if (!REG_P (XEXP (dest, 0)))
6489 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6490 dest = replace_equiv_address (dest, dest_reg);
6492 set_mem_size (dest, GEN_INT (move_bytes));
6494 emit_insn ((*gen_func.movstrsi) (dest, src,
6495 GEN_INT (move_bytes & 31),
6500 rtx tmp_reg = gen_reg_rtx (mode);
6502 emit_insn ((*gen_func.mov) (tmp_reg, src));
6503 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6508 else /* string instructions not available */
6510 rtx stores[MAX_MOVE_REG];
6514 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6516 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6517 enum machine_mode mode;
6518 rtx src, dest, tmp_reg;
6520 /* Generate the appropriate load and store, saving the stores
6522 if (bytes >= 8 && TARGET_POWERPC64
6523 /* 64-bit loads and stores require word-aligned
6525 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6529 gen_mov_func = gen_movdi;
6531 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6535 gen_mov_func = gen_movsi;
6537 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6541 gen_mov_func = gen_movhi;
6547 gen_mov_func = gen_movqi;
6550 src = adjust_address (orig_src, mode, offset);
6551 dest = adjust_address (orig_dest, mode, offset);
6552 tmp_reg = gen_reg_rtx (mode);
6554 emit_insn ((*gen_mov_func) (tmp_reg, src));
6555 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6557 if (num_reg >= MAX_MOVE_REG)
6559 for (i = 0; i < num_reg; i++)
6560 emit_insn (stores[i]);
6565 for (i = 0; i < num_reg; i++)
6566 emit_insn (stores[i]);
6573 /* Return 1 if OP is a load multiple operation. It is known to be a
6574 PARALLEL and the first section will be tested. */
6577 load_multiple_operation (op, mode)
6579 enum machine_mode mode ATTRIBUTE_UNUSED;
6581 int count = XVECLEN (op, 0);
6582 unsigned int dest_regno;
6586 /* Perform a quick check so we don't blow up below. */
6588 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6589 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6590 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6593 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6594 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6596 for (i = 1; i < count; i++)
6598 rtx elt = XVECEXP (op, 0, i);
6600 if (GET_CODE (elt) != SET
6601 || GET_CODE (SET_DEST (elt)) != REG
6602 || GET_MODE (SET_DEST (elt)) != SImode
6603 || REGNO (SET_DEST (elt)) != dest_regno + i
6604 || GET_CODE (SET_SRC (elt)) != MEM
6605 || GET_MODE (SET_SRC (elt)) != SImode
6606 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6607 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6608 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6609 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6616 /* Similar, but tests for store multiple. Here, the second vector element
6617 is a CLOBBER. It will be tested later. */
6620 store_multiple_operation (op, mode)
6622 enum machine_mode mode ATTRIBUTE_UNUSED;
6624 int count = XVECLEN (op, 0) - 1;
6625 unsigned int src_regno;
6629 /* Perform a quick check so we don't blow up below. */
6631 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6632 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6633 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6636 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6637 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6639 for (i = 1; i < count; i++)
6641 rtx elt = XVECEXP (op, 0, i + 1);
6643 if (GET_CODE (elt) != SET
6644 || GET_CODE (SET_SRC (elt)) != REG
6645 || GET_MODE (SET_SRC (elt)) != SImode
6646 || REGNO (SET_SRC (elt)) != src_regno + i
6647 || GET_CODE (SET_DEST (elt)) != MEM
6648 || GET_MODE (SET_DEST (elt)) != SImode
6649 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6650 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6651 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6652 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6659 /* Return a string to perform a load_multiple operation.
6660 operands[0] is the vector.
6661 operands[1] is the source address.
6662 operands[2] is the first destination register. */
6665 rs6000_output_load_multiple (operands)
6668 /* We have to handle the case where the pseudo used to contain the address
6669 is assigned to one of the output registers. */
6671 int words = XVECLEN (operands[0], 0);
6674 if (XVECLEN (operands[0], 0) == 1)
6675 return "{l|lwz} %2,0(%1)";
6677 for (i = 0; i < words; i++)
6678 if (refers_to_regno_p (REGNO (operands[2]) + i,
6679 REGNO (operands[2]) + i + 1, operands[1], 0))
6683 xop[0] = GEN_INT (4 * (words-1));
6684 xop[1] = operands[1];
6685 xop[2] = operands[2];
6686 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6691 xop[0] = GEN_INT (4 * (words-1));
6692 xop[1] = operands[1];
6693 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6694 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6699 for (j = 0; j < words; j++)
6702 xop[0] = GEN_INT (j * 4);
6703 xop[1] = operands[1];
6704 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6705 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6707 xop[0] = GEN_INT (i * 4);
6708 xop[1] = operands[1];
6709 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6714 return "{lsi|lswi} %2,%1,%N0";
6717 /* Return 1 for a parallel vrsave operation. */
6720 vrsave_operation (op, mode)
6722 enum machine_mode mode ATTRIBUTE_UNUSED;
6724 int count = XVECLEN (op, 0);
6725 unsigned int dest_regno, src_regno;
6729 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6730 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6731 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6734 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6735 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6737 if (dest_regno != VRSAVE_REGNO
6738 && src_regno != VRSAVE_REGNO)
6741 for (i = 1; i < count; i++)
6743 rtx elt = XVECEXP (op, 0, i);
6745 if (GET_CODE (elt) != CLOBBER
6746 && GET_CODE (elt) != SET)
6753 /* Return 1 for an PARALLEL suitable for mtcrf. */
6756 mtcrf_operation (op, mode)
6758 enum machine_mode mode ATTRIBUTE_UNUSED;
6760 int count = XVECLEN (op, 0);
6764 /* Perform a quick check so we don't blow up below. */
6766 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6767 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6768 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6770 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6772 if (GET_CODE (src_reg) != REG
6773 || GET_MODE (src_reg) != SImode
6774 || ! INT_REGNO_P (REGNO (src_reg)))
6777 for (i = 0; i < count; i++)
6779 rtx exp = XVECEXP (op, 0, i);
6783 if (GET_CODE (exp) != SET
6784 || GET_CODE (SET_DEST (exp)) != REG
6785 || GET_MODE (SET_DEST (exp)) != CCmode
6786 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6788 unspec = SET_SRC (exp);
6789 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6791 if (GET_CODE (unspec) != UNSPEC
6792 || XINT (unspec, 1) != 20
6793 || XVECLEN (unspec, 0) != 2
6794 || XVECEXP (unspec, 0, 0) != src_reg
6795 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6796 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6802 /* Return 1 for an PARALLEL suitable for lmw. */
6805 lmw_operation (op, mode)
6807 enum machine_mode mode ATTRIBUTE_UNUSED;
6809 int count = XVECLEN (op, 0);
6810 unsigned int dest_regno;
6812 unsigned int base_regno;
6813 HOST_WIDE_INT offset;
6816 /* Perform a quick check so we don't blow up below. */
6818 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6819 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6820 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6823 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6824 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6827 || count != 32 - (int) dest_regno)
6830 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6833 base_regno = REGNO (src_addr);
6834 if (base_regno == 0)
6837 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6839 offset = INTVAL (XEXP (src_addr, 1));
6840 base_regno = REGNO (XEXP (src_addr, 0));
6845 for (i = 0; i < count; i++)
6847 rtx elt = XVECEXP (op, 0, i);
6850 HOST_WIDE_INT newoffset;
6852 if (GET_CODE (elt) != SET
6853 || GET_CODE (SET_DEST (elt)) != REG
6854 || GET_MODE (SET_DEST (elt)) != SImode
6855 || REGNO (SET_DEST (elt)) != dest_regno + i
6856 || GET_CODE (SET_SRC (elt)) != MEM
6857 || GET_MODE (SET_SRC (elt)) != SImode)
6859 newaddr = XEXP (SET_SRC (elt), 0);
6860 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6865 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6867 addr_reg = XEXP (newaddr, 0);
6868 newoffset = INTVAL (XEXP (newaddr, 1));
6872 if (REGNO (addr_reg) != base_regno
6873 || newoffset != offset + 4 * i)
6880 /* Return 1 for an PARALLEL suitable for stmw. */
6883 stmw_operation (op, mode)
6885 enum machine_mode mode ATTRIBUTE_UNUSED;
6887 int count = XVECLEN (op, 0);
6888 unsigned int src_regno;
6890 unsigned int base_regno;
6891 HOST_WIDE_INT offset;
6894 /* Perform a quick check so we don't blow up below. */
6896 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6897 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6898 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6901 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6902 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6905 || count != 32 - (int) src_regno)
6908 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6911 base_regno = REGNO (dest_addr);
6912 if (base_regno == 0)
6915 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6917 offset = INTVAL (XEXP (dest_addr, 1));
6918 base_regno = REGNO (XEXP (dest_addr, 0));
6923 for (i = 0; i < count; i++)
6925 rtx elt = XVECEXP (op, 0, i);
6928 HOST_WIDE_INT newoffset;
6930 if (GET_CODE (elt) != SET
6931 || GET_CODE (SET_SRC (elt)) != REG
6932 || GET_MODE (SET_SRC (elt)) != SImode
6933 || REGNO (SET_SRC (elt)) != src_regno + i
6934 || GET_CODE (SET_DEST (elt)) != MEM
6935 || GET_MODE (SET_DEST (elt)) != SImode)
6937 newaddr = XEXP (SET_DEST (elt), 0);
6938 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6943 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6945 addr_reg = XEXP (newaddr, 0);
6946 newoffset = INTVAL (XEXP (newaddr, 1));
6950 if (REGNO (addr_reg) != base_regno
6951 || newoffset != offset + 4 * i)
6958 /* A validation routine: say whether CODE, a condition code, and MODE
6959 match. The other alternatives either don't make sense or should
6960 never be generated. */
6963 validate_condition_mode (code, mode)
6965 enum machine_mode mode;
6967 if (GET_RTX_CLASS (code) != '<'
6968 || GET_MODE_CLASS (mode) != MODE_CC)
6971 /* These don't make sense. */
6972 if ((code == GT || code == LT || code == GE || code == LE)
6973 && mode == CCUNSmode)
6976 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6977 && mode != CCUNSmode)
6980 if (mode != CCFPmode
6981 && (code == ORDERED || code == UNORDERED
6982 || code == UNEQ || code == LTGT
6983 || code == UNGT || code == UNLT
6984 || code == UNGE || code == UNLE))
6987 /* These should never be generated except for
6988 flag_unsafe_math_optimizations and flag_finite_math_only. */
6989 if (mode == CCFPmode
6990 && ! flag_unsafe_math_optimizations
6991 && ! flag_finite_math_only
6992 && (code == LE || code == GE
6993 || code == UNEQ || code == LTGT
6994 || code == UNGT || code == UNLT))
6997 /* These are invalid; the information is not there. */
6998 if (mode == CCEQmode
6999 && code != EQ && code != NE)
7003 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7004 We only check the opcode against the mode of the CC value here. */
7007 branch_comparison_operator (op, mode)
7009 enum machine_mode mode ATTRIBUTE_UNUSED;
7011 enum rtx_code code = GET_CODE (op);
7012 enum machine_mode cc_mode;
7014 if (GET_RTX_CLASS (code) != '<')
7017 cc_mode = GET_MODE (XEXP (op, 0));
7018 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7021 validate_condition_mode (code, cc_mode);
7026 /* Return 1 if OP is a comparison operation that is valid for a branch
7027 insn and which is true if the corresponding bit in the CC register
7031 branch_positive_comparison_operator (op, mode)
7033 enum machine_mode mode;
7037 if (! branch_comparison_operator (op, mode))
7040 code = GET_CODE (op);
7041 return (code == EQ || code == LT || code == GT
7042 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7043 || code == LTU || code == GTU
7044 || code == UNORDERED);
7047 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
7048 We check the opcode against the mode of the CC value and disallow EQ or
7049 NE comparisons for integers. */
7052 scc_comparison_operator (op, mode)
7054 enum machine_mode mode;
7056 enum rtx_code code = GET_CODE (op);
7057 enum machine_mode cc_mode;
7059 if (GET_MODE (op) != mode && mode != VOIDmode)
7062 if (GET_RTX_CLASS (code) != '<')
7065 cc_mode = GET_MODE (XEXP (op, 0));
7066 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7069 validate_condition_mode (code, cc_mode);
7071 if (code == NE && cc_mode != CCFPmode)
7078 trap_comparison_operator (op, mode)
7080 enum machine_mode mode;
7082 if (mode != VOIDmode && mode != GET_MODE (op))
7084 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7088 boolean_operator (op, mode)
7090 enum machine_mode mode ATTRIBUTE_UNUSED;
7092 enum rtx_code code = GET_CODE (op);
7093 return (code == AND || code == IOR || code == XOR);
7097 boolean_or_operator (op, mode)
7099 enum machine_mode mode ATTRIBUTE_UNUSED;
7101 enum rtx_code code = GET_CODE (op);
7102 return (code == IOR || code == XOR);
7106 min_max_operator (op, mode)
7108 enum machine_mode mode ATTRIBUTE_UNUSED;
7110 enum rtx_code code = GET_CODE (op);
7111 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7114 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7115 mask required to convert the result of a rotate insn into a shift
7116 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7119 includes_lshift_p (shiftop, andop)
7123 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7125 shift_mask <<= INTVAL (shiftop);
7127 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7130 /* Similar, but for right shift. */
7133 includes_rshift_p (shiftop, andop)
7137 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7139 shift_mask >>= INTVAL (shiftop);
7141 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7144 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7145 to perform a left shift. It must have exactly SHIFTOP least
7146 significant 0's, then one or more 1's, then zero or more 0's. */
7149 includes_rldic_lshift_p (shiftop, andop)
7153 if (GET_CODE (andop) == CONST_INT)
7155 HOST_WIDE_INT c, lsb, shift_mask;
7158 if (c == 0 || c == ~0)
7162 shift_mask <<= INTVAL (shiftop);
7164 /* Find the least significant one bit. */
7167 /* It must coincide with the LSB of the shift mask. */
7168 if (-lsb != shift_mask)
7171 /* Invert to look for the next transition (if any). */
7174 /* Remove the low group of ones (originally low group of zeros). */
7177 /* Again find the lsb, and check we have all 1's above. */
7181 else if (GET_CODE (andop) == CONST_DOUBLE
7182 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7184 HOST_WIDE_INT low, high, lsb;
7185 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7187 low = CONST_DOUBLE_LOW (andop);
7188 if (HOST_BITS_PER_WIDE_INT < 64)
7189 high = CONST_DOUBLE_HIGH (andop);
7191 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7192 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7195 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7197 shift_mask_high = ~0;
7198 if (INTVAL (shiftop) > 32)
7199 shift_mask_high <<= INTVAL (shiftop) - 32;
7203 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7210 return high == -lsb;
7213 shift_mask_low = ~0;
7214 shift_mask_low <<= INTVAL (shiftop);
7218 if (-lsb != shift_mask_low)
7221 if (HOST_BITS_PER_WIDE_INT < 64)
7226 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7229 return high == -lsb;
7233 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7239 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7240 to perform a left shift. It must have SHIFTOP or more least
7241 signifigant 0's, with the remainder of the word 1's. */
7244 includes_rldicr_lshift_p (shiftop, andop)
7248 if (GET_CODE (andop) == CONST_INT)
7250 HOST_WIDE_INT c, lsb, shift_mask;
7253 shift_mask <<= INTVAL (shiftop);
7256 /* Find the least signifigant one bit. */
7259 /* It must be covered by the shift mask.
7260 This test also rejects c == 0. */
7261 if ((lsb & shift_mask) == 0)
7264 /* Check we have all 1's above the transition, and reject all 1's. */
7265 return c == -lsb && lsb != 1;
7267 else if (GET_CODE (andop) == CONST_DOUBLE
7268 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7270 HOST_WIDE_INT low, lsb, shift_mask_low;
7272 low = CONST_DOUBLE_LOW (andop);
7274 if (HOST_BITS_PER_WIDE_INT < 64)
7276 HOST_WIDE_INT high, shift_mask_high;
7278 high = CONST_DOUBLE_HIGH (andop);
7282 shift_mask_high = ~0;
7283 if (INTVAL (shiftop) > 32)
7284 shift_mask_high <<= INTVAL (shiftop) - 32;
7288 if ((lsb & shift_mask_high) == 0)
7291 return high == -lsb;
7297 shift_mask_low = ~0;
7298 shift_mask_low <<= INTVAL (shiftop);
7302 if ((lsb & shift_mask_low) == 0)
7305 return low == -lsb && lsb != 1;
7311 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7312 for lfq and stfq insns.
7314 Note reg1 and reg2 *must* be hard registers. To be sure we will
7315 abort if we are passed pseudo registers. */
7318 registers_ok_for_quad_peep (reg1, reg2)
7321 /* We might have been passed a SUBREG. */
7322 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7325 return (REGNO (reg1) == REGNO (reg2) - 1);
7328 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7329 addr1 and addr2 must be in consecutive memory locations
7330 (addr2 == addr1 + 8). */
7333 addrs_ok_for_quad_peep (addr1, addr2)
7340 /* Extract an offset (if used) from the first addr. */
7341 if (GET_CODE (addr1) == PLUS)
7343 /* If not a REG, return zero. */
7344 if (GET_CODE (XEXP (addr1, 0)) != REG)
7348 reg1 = REGNO (XEXP (addr1, 0));
7349 /* The offset must be constant! */
7350 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7352 offset1 = INTVAL (XEXP (addr1, 1));
7355 else if (GET_CODE (addr1) != REG)
7359 reg1 = REGNO (addr1);
7360 /* This was a simple (mem (reg)) expression. Offset is 0. */
7364 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7365 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7366 register as addr1. */
7367 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7369 if (GET_CODE (addr2) != PLUS)
7372 if (GET_CODE (XEXP (addr2, 0)) != REG
7373 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7376 if (reg1 != REGNO (XEXP (addr2, 0)))
7379 /* The offset for the second addr must be 8 more than the first addr. */
7380 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7383 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7388 /* Return the register class of a scratch register needed to copy IN into
7389 or out of a register in CLASS in MODE. If it can be done directly,
7390 NO_REGS is returned. */
7393 secondary_reload_class (class, mode, in)
7394 enum reg_class class;
7395 enum machine_mode mode ATTRIBUTE_UNUSED;
7400 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7402 && MACHOPIC_INDIRECT
7406 /* We cannot copy a symbolic operand directly into anything
7407 other than BASE_REGS for TARGET_ELF. So indicate that a
7408 register from BASE_REGS is needed as an intermediate
7411 On Darwin, pic addresses require a load from memory, which
7412 needs a base register. */
7413 if (class != BASE_REGS
7414 && (GET_CODE (in) == SYMBOL_REF
7415 || GET_CODE (in) == HIGH
7416 || GET_CODE (in) == LABEL_REF
7417 || GET_CODE (in) == CONST))
7421 if (GET_CODE (in) == REG)
7424 if (regno >= FIRST_PSEUDO_REGISTER)
7426 regno = true_regnum (in);
7427 if (regno >= FIRST_PSEUDO_REGISTER)
7431 else if (GET_CODE (in) == SUBREG)
7433 regno = true_regnum (in);
7434 if (regno >= FIRST_PSEUDO_REGISTER)
7440 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7442 if (class == GENERAL_REGS || class == BASE_REGS
7443 || (regno >= 0 && INT_REGNO_P (regno)))
7446 /* Constants, memory, and FP registers can go into FP registers. */
7447 if ((regno == -1 || FP_REGNO_P (regno))
7448 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7451 /* Memory, and AltiVec registers can go into AltiVec registers. */
7452 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7453 && class == ALTIVEC_REGS)
7456 /* We can copy among the CR registers. */
7457 if ((class == CR_REGS || class == CR0_REGS)
7458 && regno >= 0 && CR_REGNO_P (regno))
7461 /* Otherwise, we need GENERAL_REGS. */
7462 return GENERAL_REGS;
7465 /* Given a comparison operation, return the bit number in CCR to test. We
7466 know this is a valid comparison.
7468 SCC_P is 1 if this is for an scc. That means that %D will have been
7469 used instead of %C, so the bits will be in different places.
7471 Return -1 if OP isn't a valid comparison for some reason. */
7478 enum rtx_code code = GET_CODE (op);
7479 enum machine_mode cc_mode;
7484 if (GET_RTX_CLASS (code) != '<')
7489 if (GET_CODE (reg) != REG
7490 || ! CR_REGNO_P (REGNO (reg)))
7493 cc_mode = GET_MODE (reg);
7494 cc_regnum = REGNO (reg);
7495 base_bit = 4 * (cc_regnum - CR0_REGNO);
7497 validate_condition_mode (code, cc_mode);
7502 if (TARGET_E500 && !TARGET_FPRS
7503 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7504 return base_bit + 1;
7505 return scc_p ? base_bit + 3 : base_bit + 2;
7507 if (TARGET_E500 && !TARGET_FPRS
7508 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7509 return base_bit + 1;
7510 return base_bit + 2;
7511 case GT: case GTU: case UNLE:
7512 return base_bit + 1;
7513 case LT: case LTU: case UNGE:
7515 case ORDERED: case UNORDERED:
7516 return base_bit + 3;
7519 /* If scc, we will have done a cror to put the bit in the
7520 unordered position. So test that bit. For integer, this is ! LT
7521 unless this is an scc insn. */
7522 return scc_p ? base_bit + 3 : base_bit;
7525 return scc_p ? base_bit + 3 : base_bit + 1;
7532 /* Return the GOT register. */
7535 rs6000_got_register (value)
7536 rtx value ATTRIBUTE_UNUSED;
7538 /* The second flow pass currently (June 1999) can't update
7539 regs_ever_live without disturbing other parts of the compiler, so
7540 update it here to make the prolog/epilogue code happy. */
7541 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7542 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7544 current_function_uses_pic_offset_table = 1;
7546 return pic_offset_table_rtx;
7549 /* Function to init struct machine_function.
7550 This will be called, via a pointer variable,
7551 from push_function_context. */
7553 static struct machine_function *
7554 rs6000_init_machine_status ()
7556 return ggc_alloc_cleared (sizeof (machine_function));
7559 /* These macros test for integers and extract the low-order bits. */
7561 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7562 && GET_MODE (X) == VOIDmode)
7564 #define INT_LOWPART(X) \
7565 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7572 unsigned long val = INT_LOWPART (op);
7574 /* If the high bit is zero, the value is the first 1 bit we find
7576 if ((val & 0x80000000) == 0)
7578 if ((val & 0xffffffff) == 0)
7582 while (((val <<= 1) & 0x80000000) == 0)
7587 /* If the high bit is set and the low bit is not, or the mask is all
7588 1's, the value is zero. */
7589 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7592 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7595 while (((val >>= 1) & 1) != 0)
7606 unsigned long val = INT_LOWPART (op);
7608 /* If the low bit is zero, the value is the first 1 bit we find from
7612 if ((val & 0xffffffff) == 0)
7616 while (((val >>= 1) & 1) == 0)
7622 /* If the low bit is set and the high bit is not, or the mask is all
7623 1's, the value is 31. */
7624 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7627 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7630 while (((val <<= 1) & 0x80000000) != 0)
7636 /* Print an operand. Recognize special options, documented below. */
7639 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7640 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7642 #define SMALL_DATA_RELOC "sda21"
7643 #define SMALL_DATA_REG 0
7647 print_operand (file, x, code)
7654 unsigned HOST_WIDE_INT uval;
7659 /* Write out an instruction after the call which may be replaced
7660 with glue code by the loader. This depends on the AIX version. */
7661 asm_fprintf (file, RS6000_CALL_GLUE);
7664 /* %a is output_address. */
7667 /* If X is a constant integer whose low-order 5 bits are zero,
7668 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7669 in the AIX assembler where "sri" with a zero shift count
7670 writes a trash instruction. */
7671 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7678 /* If constant, low-order 16 bits of constant, unsigned.
7679 Otherwise, write normally. */
7681 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7683 print_operand (file, x, 0);
7687 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7688 for 64-bit mask direction. */
7689 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7692 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7696 /* There used to be a comment for 'C' reading "This is an
7697 optional cror needed for certain floating-point
7698 comparisons. Otherwise write nothing." */
7700 /* Similar, except that this is for an scc, so we must be able to
7701 encode the test in a single bit that is one. We do the above
7702 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7703 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7704 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7706 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7708 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7710 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7713 else if (GET_CODE (x) == NE)
7715 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7717 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7718 base_bit + 2, base_bit + 2);
7720 else if (TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT
7721 && GET_CODE (x) == EQ
7722 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7724 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7726 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7727 base_bit + 1, base_bit + 1);
7732 /* X is a CR register. Print the number of the EQ bit of the CR */
7733 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7734 output_operand_lossage ("invalid %%E value");
7736 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7740 /* X is a CR register. Print the shift count needed to move it
7741 to the high-order four bits. */
7742 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7743 output_operand_lossage ("invalid %%f value");
7745 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7749 /* Similar, but print the count for the rotate in the opposite
7751 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7752 output_operand_lossage ("invalid %%F value");
7754 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7758 /* X is a constant integer. If it is negative, print "m",
7759 otherwise print "z". This is to make an aze or ame insn. */
7760 if (GET_CODE (x) != CONST_INT)
7761 output_operand_lossage ("invalid %%G value");
7762 else if (INTVAL (x) >= 0)
7769 /* If constant, output low-order five bits. Otherwise, write
7772 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7774 print_operand (file, x, 0);
7778 /* If constant, output low-order six bits. Otherwise, write
7781 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7783 print_operand (file, x, 0);
7787 /* Print `i' if this is a constant, else nothing. */
7793 /* Write the bit number in CCR for jump. */
7796 output_operand_lossage ("invalid %%j code");
7798 fprintf (file, "%d", i);
7802 /* Similar, but add one for shift count in rlinm for scc and pass
7803 scc flag to `ccr_bit'. */
7806 output_operand_lossage ("invalid %%J code");
7808 /* If we want bit 31, write a shift count of zero, not 32. */
7809 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7813 /* X must be a constant. Write the 1's complement of the
7816 output_operand_lossage ("invalid %%k value");
7818 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7822 /* X must be a symbolic constant on ELF. Write an
7823 expression suitable for an 'addi' that adds in the low 16
7825 if (GET_CODE (x) != CONST)
7827 print_operand_address (file, x);
7832 if (GET_CODE (XEXP (x, 0)) != PLUS
7833 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7834 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7835 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7836 output_operand_lossage ("invalid %%K value");
7837 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7839 /* For GNU as, there must be a non-alphanumeric character
7840 between 'l' and the number. The '-' is added by
7841 print_operand() already. */
7842 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7844 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7848 /* %l is output_asm_label. */
7851 /* Write second word of DImode or DFmode reference. Works on register
7852 or non-indexed memory only. */
7853 if (GET_CODE (x) == REG)
7854 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7855 else if (GET_CODE (x) == MEM)
7857 /* Handle possible auto-increment. Since it is pre-increment and
7858 we have already done it, we can just use an offset of word. */
7859 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7860 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7861 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7864 output_address (XEXP (adjust_address_nv (x, SImode,
7868 if (small_data_operand (x, GET_MODE (x)))
7869 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7870 reg_names[SMALL_DATA_REG]);
7875 /* MB value for a mask operand. */
7876 if (! mask_operand (x, SImode))
7877 output_operand_lossage ("invalid %%m value");
7879 fprintf (file, "%d", extract_MB (x));
7883 /* ME value for a mask operand. */
7884 if (! mask_operand (x, SImode))
7885 output_operand_lossage ("invalid %%M value");
7887 fprintf (file, "%d", extract_ME (x));
7890 /* %n outputs the negative of its operand. */
7893 /* Write the number of elements in the vector times 4. */
7894 if (GET_CODE (x) != PARALLEL)
7895 output_operand_lossage ("invalid %%N value");
7897 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7901 /* Similar, but subtract 1 first. */
7902 if (GET_CODE (x) != PARALLEL)
7903 output_operand_lossage ("invalid %%O value");
7905 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7909 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7911 || INT_LOWPART (x) < 0
7912 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7913 output_operand_lossage ("invalid %%p value");
7915 fprintf (file, "%d", i);
7919 /* The operand must be an indirect memory reference. The result
7920 is the register number. */
7921 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7922 || REGNO (XEXP (x, 0)) >= 32)
7923 output_operand_lossage ("invalid %%P value");
7925 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7929 /* This outputs the logical code corresponding to a boolean
7930 expression. The expression may have one or both operands
7931 negated (if one, only the first one). For condition register
7932 logical operations, it will also treat the negated
7933 CR codes as NOTs, but not handle NOTs of them. */
7935 const char *const *t = 0;
7937 enum rtx_code code = GET_CODE (x);
7938 static const char * const tbl[3][3] = {
7939 { "and", "andc", "nor" },
7940 { "or", "orc", "nand" },
7941 { "xor", "eqv", "xor" } };
7945 else if (code == IOR)
7947 else if (code == XOR)
7950 output_operand_lossage ("invalid %%q value");
7952 if (GET_CODE (XEXP (x, 0)) != NOT)
7956 if (GET_CODE (XEXP (x, 1)) == NOT)
7967 /* X is a CR register. Print the mask for `mtcrf'. */
7968 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7969 output_operand_lossage ("invalid %%R value");
7971 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7975 /* Low 5 bits of 32 - value */
7977 output_operand_lossage ("invalid %%s value");
7979 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7983 /* PowerPC64 mask position. All 0's is excluded.
7984 CONST_INT 32-bit mask is considered sign-extended so any
7985 transition must occur within the CONST_INT, not on the boundary. */
7986 if (! mask64_operand (x, DImode))
7987 output_operand_lossage ("invalid %%S value");
7989 uval = INT_LOWPART (x);
7991 if (uval & 1) /* Clear Left */
7993 #if HOST_BITS_PER_WIDE_INT > 64
7994 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7998 else /* Clear Right */
8001 #if HOST_BITS_PER_WIDE_INT > 64
8002 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8010 fprintf (file, "%d", i);
8014 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8015 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8018 /* Bit 3 is OV bit. */
8019 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8021 /* If we want bit 31, write a shift count of zero, not 32. */
8022 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8026 /* Print the symbolic name of a branch target register. */
8027 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8028 && REGNO (x) != COUNT_REGISTER_REGNUM))
8029 output_operand_lossage ("invalid %%T value");
8030 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8031 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8033 fputs ("ctr", file);
8037 /* High-order 16 bits of constant for use in unsigned operand. */
8039 output_operand_lossage ("invalid %%u value");
8041 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8042 (INT_LOWPART (x) >> 16) & 0xffff);
8046 /* High-order 16 bits of constant for use in signed operand. */
8048 output_operand_lossage ("invalid %%v value");
8050 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8051 (INT_LOWPART (x) >> 16) & 0xffff);
8055 /* Print `u' if this has an auto-increment or auto-decrement. */
8056 if (GET_CODE (x) == MEM
8057 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8058 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8063 /* Print the trap code for this operand. */
8064 switch (GET_CODE (x))
8067 fputs ("eq", file); /* 4 */
8070 fputs ("ne", file); /* 24 */
8073 fputs ("lt", file); /* 16 */
8076 fputs ("le", file); /* 20 */
8079 fputs ("gt", file); /* 8 */
8082 fputs ("ge", file); /* 12 */
8085 fputs ("llt", file); /* 2 */
8088 fputs ("lle", file); /* 6 */
8091 fputs ("lgt", file); /* 1 */
8094 fputs ("lge", file); /* 5 */
8102 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8105 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8106 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8108 print_operand (file, x, 0);
8112 /* MB value for a PowerPC64 rldic operand. */
8113 val = (GET_CODE (x) == CONST_INT
8114 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8119 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8120 if ((val <<= 1) < 0)
8123 #if HOST_BITS_PER_WIDE_INT == 32
8124 if (GET_CODE (x) == CONST_INT && i >= 0)
8125 i += 32; /* zero-extend high-part was all 0's */
8126 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8128 val = CONST_DOUBLE_LOW (x);
8135 for ( ; i < 64; i++)
8136 if ((val <<= 1) < 0)
8141 fprintf (file, "%d", i + 1);
8145 if (GET_CODE (x) == MEM
8146 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
8151 /* Like 'L', for third word of TImode */
8152 if (GET_CODE (x) == REG)
8153 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8154 else if (GET_CODE (x) == MEM)
8156 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8157 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8158 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8160 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8161 if (small_data_operand (x, GET_MODE (x)))
8162 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8163 reg_names[SMALL_DATA_REG]);
8168 /* X is a SYMBOL_REF. Write out the name preceded by a
8169 period and without any trailing data in brackets. Used for function
8170 names. If we are configured for System V (or the embedded ABI) on
8171 the PowerPC, do not emit the period, since those systems do not use
8172 TOCs and the like. */
8173 if (GET_CODE (x) != SYMBOL_REF)
8176 if (XSTR (x, 0)[0] != '.')
8178 switch (DEFAULT_ABI)
8188 case ABI_AIX_NODESC:
8194 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8196 assemble_name (file, XSTR (x, 0));
8201 /* Like 'L', for last word of TImode. */
8202 if (GET_CODE (x) == REG)
8203 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8204 else if (GET_CODE (x) == MEM)
8206 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8207 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8208 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8210 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8211 if (small_data_operand (x, GET_MODE (x)))
8212 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8213 reg_names[SMALL_DATA_REG]);
8217 /* Print AltiVec or SPE memory operand. */
8222 if (GET_CODE (x) != MEM)
8230 if (GET_CODE (tmp) == REG)
8232 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8235 /* Handle [reg+UIMM]. */
8236 else if (GET_CODE (tmp) == PLUS &&
8237 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8241 if (GET_CODE (XEXP (tmp, 0)) != REG)
8244 x = INTVAL (XEXP (tmp, 1));
8245 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8249 /* Fall through. Must be [reg+reg]. */
8251 if (GET_CODE (tmp) == REG)
8252 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8253 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8255 if (REGNO (XEXP (tmp, 0)) == 0)
8256 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8257 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8259 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8260 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8268 if (GET_CODE (x) == REG)
8269 fprintf (file, "%s", reg_names[REGNO (x)]);
8270 else if (GET_CODE (x) == MEM)
8272 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8273 know the width from the mode. */
8274 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8275 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8276 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8277 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8278 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8279 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8281 output_address (XEXP (x, 0));
8284 output_addr_const (file, x);
8288 output_operand_lossage ("invalid %%xn code");
8292 /* Print the address of an operand. */
8295 print_operand_address (file, x)
8299 if (GET_CODE (x) == REG)
8300 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8301 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8302 || GET_CODE (x) == LABEL_REF)
8304 output_addr_const (file, x);
8305 if (small_data_operand (x, GET_MODE (x)))
8306 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8307 reg_names[SMALL_DATA_REG]);
8308 else if (TARGET_TOC)
8311 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8313 if (REGNO (XEXP (x, 0)) == 0)
8314 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8315 reg_names[ REGNO (XEXP (x, 0)) ]);
8317 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8318 reg_names[ REGNO (XEXP (x, 1)) ]);
8320 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8322 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8323 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8326 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8327 && CONSTANT_P (XEXP (x, 1)))
8329 output_addr_const (file, XEXP (x, 1));
8330 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8334 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8335 && CONSTANT_P (XEXP (x, 1)))
8337 fprintf (file, "lo16(");
8338 output_addr_const (file, XEXP (x, 1));
8339 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8342 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8344 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8346 rtx contains_minus = XEXP (x, 1);
8350 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8351 turn it into (sym) for output_addr_const. */
8352 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8353 contains_minus = XEXP (contains_minus, 0);
8355 minus = XEXP (contains_minus, 0);
8356 symref = XEXP (minus, 0);
8357 XEXP (contains_minus, 0) = symref;
8362 name = XSTR (symref, 0);
8363 newname = alloca (strlen (name) + sizeof ("@toc"));
8364 strcpy (newname, name);
8365 strcat (newname, "@toc");
8366 XSTR (symref, 0) = newname;
8368 output_addr_const (file, XEXP (x, 1));
8370 XSTR (symref, 0) = name;
8371 XEXP (contains_minus, 0) = minus;
8374 output_addr_const (file, XEXP (x, 1));
8376 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8382 /* Target hook for assembling integer objects. The PowerPC version has
8383 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8384 is defined. It also needs to handle DI-mode objects on 64-bit
8388 rs6000_assemble_integer (x, size, aligned_p)
8393 #ifdef RELOCATABLE_NEEDS_FIXUP
8394 /* Special handling for SI values. */
8395 if (size == 4 && aligned_p)
8397 extern int in_toc_section PARAMS ((void));
8398 static int recurse = 0;
8400 /* For -mrelocatable, we mark all addresses that need to be fixed up
8401 in the .fixup section. */
8402 if (TARGET_RELOCATABLE
8403 && !in_toc_section ()
8404 && !in_text_section ()
8406 && GET_CODE (x) != CONST_INT
8407 && GET_CODE (x) != CONST_DOUBLE
8413 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8415 ASM_OUTPUT_LABEL (asm_out_file, buf);
8416 fprintf (asm_out_file, "\t.long\t(");
8417 output_addr_const (asm_out_file, x);
8418 fprintf (asm_out_file, ")@fixup\n");
8419 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8420 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8421 fprintf (asm_out_file, "\t.long\t");
8422 assemble_name (asm_out_file, buf);
8423 fprintf (asm_out_file, "\n\t.previous\n");
8427 /* Remove initial .'s to turn a -mcall-aixdesc function
8428 address into the address of the descriptor, not the function
8430 else if (GET_CODE (x) == SYMBOL_REF
8431 && XSTR (x, 0)[0] == '.'
8432 && DEFAULT_ABI == ABI_AIX)
8434 const char *name = XSTR (x, 0);
8435 while (*name == '.')
8438 fprintf (asm_out_file, "\t.long\t%s\n", name);
8442 #endif /* RELOCATABLE_NEEDS_FIXUP */
8443 return default_assemble_integer (x, size, aligned_p);
8446 #ifdef HAVE_GAS_HIDDEN
8447 /* Emit an assembler directive to set symbol visibility for DECL to
8451 rs6000_assemble_visibility (decl, vis)
8455 /* Functions need to have their entry point symbol visibility set as
8456 well as their descriptor symbol visibility. */
8457 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8459 static const char * const visibility_types[] = {
8460 NULL, "internal", "hidden", "protected"
8463 const char *name, *type;
8465 name = ((* targetm.strip_name_encoding)
8466 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8467 type = visibility_types[vis];
8469 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8470 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8473 default_assemble_visibility (decl, vis);
8478 rs6000_reverse_condition (mode, code)
8479 enum machine_mode mode;
8482 /* Reversal of FP compares takes care -- an ordered compare
8483 becomes an unordered compare and vice versa. */
8484 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8485 return reverse_condition_maybe_unordered (code);
8487 return reverse_condition (code);
8490 /* Generate a compare for CODE. Return a brand-new rtx that
8491 represents the result of the compare. */
8494 rs6000_generate_compare (code)
8497 enum machine_mode comp_mode;
8500 if (rs6000_compare_fp_p)
8501 comp_mode = CCFPmode;
8502 else if (code == GTU || code == LTU
8503 || code == GEU || code == LEU)
8504 comp_mode = CCUNSmode;
8508 /* First, the compare. */
8509 compare_result = gen_reg_rtx (comp_mode);
8511 /* SPE FP compare instructions on the GPRs. Yuck! */
8512 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
8513 && rs6000_compare_fp_p)
8515 rtx cmp, or1, or2, or_result, compare_result2;
8523 cmp = flag_unsafe_math_optimizations
8524 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8526 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8527 rs6000_compare_op1);
8535 cmp = flag_unsafe_math_optimizations
8536 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8538 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8539 rs6000_compare_op1);
8547 cmp = flag_unsafe_math_optimizations
8548 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8550 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8551 rs6000_compare_op1);
8557 /* Synthesize LE and GE from LT/GT || EQ. */
8558 if (code == LE || code == GE || code == LEU || code == GEU)
8560 /* Synthesize GE/LE frome GT/LT || EQ. */
8566 case LE: code = LT; break;
8567 case GE: code = GT; break;
8568 case LEU: code = LT; break;
8569 case GEU: code = GT; break;
8573 or1 = gen_reg_rtx (SImode);
8574 or2 = gen_reg_rtx (SImode);
8575 or_result = gen_reg_rtx (CCEQmode);
8576 compare_result2 = gen_reg_rtx (CCFPmode);
8579 cmp = flag_unsafe_math_optimizations
8580 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8582 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8583 rs6000_compare_op1);
8586 /* The MC8540 FP compare instructions set the CR bits
8587 differently than other PPC compare instructions. For
8588 that matter, there is no generic test instruction, but a
8589 testgt, testlt, and testeq. For a true condition, bit 2
8590 is set (x1xx) in the CR. Following the traditional CR
8596 ... bit 2 would be a GT CR alias, so later on we
8597 look in the GT bits for the branch instructions.
8598 However, we must be careful to emit correct RTL in
8599 the meantime, so optimizations don't get confused. */
8601 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8602 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8604 /* OR them together. */
8605 cmp = gen_rtx_SET (VOIDmode, or_result,
8606 gen_rtx_COMPARE (CCEQmode,
8607 gen_rtx_IOR (SImode, or1, or2),
8609 compare_result = or_result;
8614 /* We only care about 1 bit (x1xx), so map everything to NE to
8615 maintain rtl sanity. We'll get to the right bit (x1xx) at
8616 code output time. */
8617 if (code == NE || code == LTGT)
8618 /* Do the inverse here because we have no cmpne
8619 instruction. We use the cmpeq instruction and expect
8620 to get a 0 instead. */
8629 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8630 gen_rtx_COMPARE (comp_mode,
8632 rs6000_compare_op1)));
8634 /* Some kinds of FP comparisons need an OR operation;
8635 except for flag_unsafe_math_optimizations we don't bother. */
8636 if (rs6000_compare_fp_p
8637 && ! flag_unsafe_math_optimizations
8638 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
8639 && (code == LE || code == GE
8640 || code == UNEQ || code == LTGT
8641 || code == UNGT || code == UNLT))
8643 enum rtx_code or1, or2;
8644 rtx or1_rtx, or2_rtx, compare2_rtx;
8645 rtx or_result = gen_reg_rtx (CCEQmode);
8649 case LE: or1 = LT; or2 = EQ; break;
8650 case GE: or1 = GT; or2 = EQ; break;
8651 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8652 case LTGT: or1 = LT; or2 = GT; break;
8653 case UNGT: or1 = UNORDERED; or2 = GT; break;
8654 case UNLT: or1 = UNORDERED; or2 = LT; break;
8657 validate_condition_mode (or1, comp_mode);
8658 validate_condition_mode (or2, comp_mode);
8659 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8660 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8661 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8662 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8664 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8666 compare_result = or_result;
8670 validate_condition_mode (code, GET_MODE (compare_result));
8672 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8676 /* Emit the RTL for an sCOND pattern. */
8679 rs6000_emit_sCOND (code, result)
8684 enum machine_mode op_mode;
8686 condition_rtx = rs6000_generate_compare (code);
8688 op_mode = GET_MODE (rs6000_compare_op0);
8689 if (op_mode == VOIDmode)
8690 op_mode = GET_MODE (rs6000_compare_op1);
8692 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8694 PUT_MODE (condition_rtx, DImode);
8695 convert_move (result, condition_rtx, 0);
8699 PUT_MODE (condition_rtx, SImode);
8700 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8704 /* Emit a branch of kind CODE to location LOC. */
8707 rs6000_emit_cbranch (code, loc)
8711 rtx condition_rtx, loc_ref;
8713 condition_rtx = rs6000_generate_compare (code);
8714 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8715 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8716 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8720 /* Return the string to output a conditional branch to LABEL, which is
8721 the operand number of the label, or -1 if the branch is really a
8724 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8725 condition code register and its mode specifies what kind of
8728 REVERSED is nonzero if we should reverse the sense of the comparison.
8730 INSN is the insn. */
8733 output_cbranch (op, label, reversed, insn)
8739 static char string[64];
8740 enum rtx_code code = GET_CODE (op);
8741 rtx cc_reg = XEXP (op, 0);
8742 enum machine_mode mode = GET_MODE (cc_reg);
8743 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8744 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8745 int really_reversed = reversed ^ need_longbranch;
8751 validate_condition_mode (code, mode);
8753 /* Work out which way this really branches. We could use
8754 reverse_condition_maybe_unordered here always but this
8755 makes the resulting assembler clearer. */
8756 if (really_reversed)
8758 /* Reversal of FP compares takes care -- an ordered compare
8759 becomes an unordered compare and vice versa. */
8760 if (mode == CCFPmode)
8761 code = reverse_condition_maybe_unordered (code);
8763 code = reverse_condition (code);
8766 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
8768 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8771 /* Opposite of GT. */
8773 else if (code == NE)
8781 /* Not all of these are actually distinct opcodes, but
8782 we distinguish them for clarity of the resulting assembler. */
8784 ccode = "ne"; break;
8786 ccode = "eq"; break;
8788 ccode = "ge"; break;
8789 case GT: case GTU: case UNGT:
8790 ccode = "gt"; break;
8792 ccode = "le"; break;
8793 case LT: case LTU: case UNLT:
8794 ccode = "lt"; break;
8795 case UNORDERED: ccode = "un"; break;
8796 case ORDERED: ccode = "nu"; break;
8797 case UNGE: ccode = "nl"; break;
8798 case UNLE: ccode = "ng"; break;
8803 /* Maybe we have a guess as to how likely the branch is.
8804 The old mnemonics don't have a way to specify this information. */
8806 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8807 if (note != NULL_RTX)
8809 /* PROB is the difference from 50%. */
8810 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8811 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8813 /* Only hint for highly probable/improbable branches on newer
8814 cpus as static prediction overrides processor dynamic
8815 prediction. For older cpus we may as well always hint, but
8816 assume not taken for branches that are very close to 50% as a
8817 mispredicted taken branch is more expensive than a
8818 mispredicted not-taken branch. */
8820 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8822 if (abs (prob) > REG_BR_PROB_BASE / 20
8823 && ((prob > 0) ^ need_longbranch))
8831 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8833 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8835 /* We need to escape any '%' characters in the reg_names string.
8836 Assume they'd only be the first character... */
8837 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8839 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8843 /* If the branch distance was too far, we may have to use an
8844 unconditional branch to go the distance. */
8845 if (need_longbranch)
8846 s += sprintf (s, ",$+8\n\tb %s", label);
8848 s += sprintf (s, ",%s", label);
8854 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8855 operands of the last comparison is nonzero/true, FALSE_COND if it
8856 is zero/false. Return 0 if the hardware has no such operation. */
8859 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8865 enum rtx_code code = GET_CODE (op);
8866 rtx op0 = rs6000_compare_op0;
8867 rtx op1 = rs6000_compare_op1;
8869 enum machine_mode compare_mode = GET_MODE (op0);
8870 enum machine_mode result_mode = GET_MODE (dest);
8873 /* These modes should always match. */
8874 if (GET_MODE (op1) != compare_mode
8875 /* In the isel case however, we can use a compare immediate, so
8876 op1 may be a small constant. */
8877 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8879 if (GET_MODE (true_cond) != result_mode)
8881 if (GET_MODE (false_cond) != result_mode)
8884 /* First, work out if the hardware can do this at all, or
8885 if it's too slow... */
8886 if (! rs6000_compare_fp_p)
8889 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8893 /* Eliminate half of the comparisons by switching operands, this
8894 makes the remaining code simpler. */
8895 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8896 || code == LTGT || code == LT)
8898 code = reverse_condition_maybe_unordered (code);
8900 true_cond = false_cond;
8904 /* UNEQ and LTGT take four instructions for a comparison with zero,
8905 it'll probably be faster to use a branch here too. */
8909 if (GET_CODE (op1) == CONST_DOUBLE)
8910 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8912 /* We're going to try to implement comparisons by performing
8913 a subtract, then comparing against zero. Unfortunately,
8914 Inf - Inf is NaN which is not zero, and so if we don't
8915 know that the operand is finite and the comparison
8916 would treat EQ different to UNORDERED, we can't do it. */
8917 if (! flag_unsafe_math_optimizations
8918 && code != GT && code != UNGE
8919 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8920 /* Constructs of the form (a OP b ? a : b) are safe. */
8921 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8922 || (! rtx_equal_p (op0, true_cond)
8923 && ! rtx_equal_p (op1, true_cond))))
8925 /* At this point we know we can use fsel. */
8927 /* Reduce the comparison to a comparison against zero. */
8928 temp = gen_reg_rtx (compare_mode);
8929 emit_insn (gen_rtx_SET (VOIDmode, temp,
8930 gen_rtx_MINUS (compare_mode, op0, op1)));
8932 op1 = CONST0_RTX (compare_mode);
8934 /* If we don't care about NaNs we can reduce some of the comparisons
8935 down to faster ones. */
8936 if (flag_unsafe_math_optimizations)
8942 true_cond = false_cond;
8955 /* Now, reduce everything down to a GE. */
8962 temp = gen_reg_rtx (compare_mode);
8963 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8968 temp = gen_reg_rtx (compare_mode);
8969 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8974 temp = gen_reg_rtx (compare_mode);
8975 emit_insn (gen_rtx_SET (VOIDmode, temp,
8976 gen_rtx_NEG (compare_mode,
8977 gen_rtx_ABS (compare_mode, op0))));
8982 temp = gen_reg_rtx (result_mode);
8983 emit_insn (gen_rtx_SET (VOIDmode, temp,
8984 gen_rtx_IF_THEN_ELSE (result_mode,
8985 gen_rtx_GE (VOIDmode,
8987 true_cond, false_cond)));
8989 true_cond = false_cond;
8991 temp = gen_reg_rtx (compare_mode);
8992 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8997 temp = gen_reg_rtx (result_mode);
8998 emit_insn (gen_rtx_SET (VOIDmode, temp,
8999 gen_rtx_IF_THEN_ELSE (result_mode,
9000 gen_rtx_GE (VOIDmode,
9002 true_cond, false_cond)));
9004 false_cond = true_cond;
9006 temp = gen_reg_rtx (compare_mode);
9007 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9015 emit_insn (gen_rtx_SET (VOIDmode, dest,
9016 gen_rtx_IF_THEN_ELSE (result_mode,
9017 gen_rtx_GE (VOIDmode,
9019 true_cond, false_cond)));
9023 /* Same as above, but for ints (isel). */
9026 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9032 rtx condition_rtx, cr;
9034 /* All isel implementations thus far are 32-bits. */
9035 if (GET_MODE (rs6000_compare_op0) != SImode)
9038 /* We still have to do the compare, because isel doesn't do a
9039 compare, it just looks at the CRx bits set by a previous compare
9041 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9042 cr = XEXP (condition_rtx, 0);
9044 if (GET_MODE (cr) == CCmode)
9045 emit_insn (gen_isel_signed (dest, condition_rtx,
9046 true_cond, false_cond, cr));
9048 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9049 true_cond, false_cond, cr));
9055 output_isel (operands)
9060 code = GET_CODE (operands[1]);
9061 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9063 PUT_CODE (operands[1], reverse_condition (code));
9064 return "isel %0,%3,%2,%j1";
9067 return "isel %0,%2,%3,%j1";
9071 rs6000_emit_minmax (dest, code, op0, op1)
9077 enum machine_mode mode = GET_MODE (op0);
9081 if (code == SMAX || code == SMIN)
9086 if (code == SMAX || code == UMAX)
9087 target = emit_conditional_move (dest, c, op0, op1, mode,
9090 target = emit_conditional_move (dest, c, op0, op1, mode,
9092 if (target == NULL_RTX)
9095 emit_move_insn (dest, target);
9098 /* This page contains routines that are used to determine what the
9099 function prologue and epilogue code will do and write them out. */
9101 /* Return the first fixed-point register that is required to be
9102 saved. 32 if none. */
9105 first_reg_to_save ()
9109 /* Find lowest numbered live register. */
9110 for (first_reg = 13; first_reg <= 31; first_reg++)
9111 if (regs_ever_live[first_reg]
9112 && (! call_used_regs[first_reg]
9113 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9114 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9115 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9120 && current_function_uses_pic_offset_table
9121 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9122 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9128 /* Similar, for FP regs. */
9131 first_fp_reg_to_save ()
9135 /* Find lowest numbered live register. */
9136 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9137 if (regs_ever_live[first_reg])
9143 /* Similar, for AltiVec regs. */
9146 first_altivec_reg_to_save ()
9150 /* Stack frame remains as is unless we are in AltiVec ABI. */
9151 if (! TARGET_ALTIVEC_ABI)
9152 return LAST_ALTIVEC_REGNO + 1;
9154 /* Find lowest numbered live register. */
9155 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9156 if (regs_ever_live[i])
9162 /* Return a 32-bit mask of the AltiVec registers we need to set in
9163 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9164 the 32-bit word is 0. */
9167 compute_vrsave_mask ()
9169 unsigned int i, mask = 0;
9171 /* First, find out if we use _any_ altivec registers. */
9172 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9173 if (regs_ever_live[i])
9174 mask |= ALTIVEC_REG_BIT (i);
9179 /* Next, remove the argument registers from the set. These must
9180 be in the VRSAVE mask set by the caller, so we don't need to add
9181 them in again. More importantly, the mask we compute here is
9182 used to generate CLOBBERs in the set_vrsave insn, and we do not
9183 wish the argument registers to die. */
9184 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9185 mask &= ~ALTIVEC_REG_BIT (i);
9187 /* Similarly, remove the return value from the set. */
9190 diddle_return_value (is_altivec_return_reg, &yes);
9192 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9199 is_altivec_return_reg (reg, xyes)
9203 bool *yes = (bool *) xyes;
9204 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9209 /* Calculate the stack information for the current function. This is
9210 complicated by having two separate calling sequences, the AIX calling
9211 sequence and the V.4 calling sequence.
9213 AIX (and Darwin/Mac OS X) stack frames look like:
9215 SP----> +---------------------------------------+
9216 | back chain to caller | 0 0
9217 +---------------------------------------+
9218 | saved CR | 4 8 (8-11)
9219 +---------------------------------------+
9221 +---------------------------------------+
9222 | reserved for compilers | 12 24
9223 +---------------------------------------+
9224 | reserved for binders | 16 32
9225 +---------------------------------------+
9226 | saved TOC pointer | 20 40
9227 +---------------------------------------+
9228 | Parameter save area (P) | 24 48
9229 +---------------------------------------+
9230 | Alloca space (A) | 24+P etc.
9231 +---------------------------------------+
9232 | Local variable space (L) | 24+P+A
9233 +---------------------------------------+
9234 | Float/int conversion temporary (X) | 24+P+A+L
9235 +---------------------------------------+
9236 | Save area for AltiVec registers (W) | 24+P+A+L+X
9237 +---------------------------------------+
9238 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9239 +---------------------------------------+
9240 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9241 +---------------------------------------+
9242 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9243 +---------------------------------------+
9244 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9245 +---------------------------------------+
9246 old SP->| back chain to caller's caller |
9247 +---------------------------------------+
9249 The required alignment for AIX configurations is two words (i.e., 8
9253 V.4 stack frames look like:
9255 SP----> +---------------------------------------+
9256 | back chain to caller | 0
9257 +---------------------------------------+
9258 | caller's saved LR | 4
9259 +---------------------------------------+
9260 | Parameter save area (P) | 8
9261 +---------------------------------------+
9262 | Alloca space (A) | 8+P
9263 +---------------------------------------+
9264 | Varargs save area (V) | 8+P+A
9265 +---------------------------------------+
9266 | Local variable space (L) | 8+P+A+V
9267 +---------------------------------------+
9268 | Float/int conversion temporary (X) | 8+P+A+V+L
9269 +---------------------------------------+
9270 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9271 +---------------------------------------+
9272 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9273 +---------------------------------------+
9274 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9275 +---------------------------------------+
9276 | SPE: area for 64-bit GP registers |
9277 +---------------------------------------+
9278 | SPE alignment padding |
9279 +---------------------------------------+
9280 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9281 +---------------------------------------+
9282 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9283 +---------------------------------------+
9284 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9285 +---------------------------------------+
9286 old SP->| back chain to caller's caller |
9287 +---------------------------------------+
9289 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9290 given. (But note below and in sysv4.h that we require only 8 and
9291 may round up the size of our stack frame anyways. The historical
9292 reason is early versions of powerpc-linux which didn't properly
9293 align the stack at program startup. A happy side-effect is that
9294 -mno-eabi libraries can be used with -meabi programs.)
9296 The EABI configuration defaults to the V.4 layout, unless
9297 -mcall-aix is used, in which case the AIX layout is used. However,
9298 the stack alignment requirements may differ. If -mno-eabi is not
9299 given, the required stack alignment is 8 bytes; if -mno-eabi is
9300 given, the required alignment is 16 bytes. (But see V.4 comment
9303 #ifndef ABI_STACK_BOUNDARY
9304 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9308 rs6000_stack_info ()
9310 static rs6000_stack_t info, zero_info;
9311 rs6000_stack_t *info_ptr = &info;
9312 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9316 /* Zero all fields portably. */
9321 /* Cache value so we don't rescan instruction chain over and over. */
9322 if (cfun->machine->insn_chain_scanned_p == 0)
9324 cfun->machine->insn_chain_scanned_p = 1;
9325 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9329 /* Select which calling sequence. */
9330 info_ptr->abi = DEFAULT_ABI;
9332 /* Calculate which registers need to be saved & save area size. */
9333 info_ptr->first_gp_reg_save = first_reg_to_save ();
9334 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9335 even if it currently looks like we won't. */
9336 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9337 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9338 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9339 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9340 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9342 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9344 /* For the SPE, we have an additional upper 32-bits on each GPR.
9345 Ideally we should save the entire 64-bits only when the upper
9346 half is used in SIMD instructions. Since we only record
9347 registers live (not the size they are used in), this proves
9348 difficult because we'd have to traverse the instruction chain at
9349 the right time, taking reload into account. This is a real pain,
9350 so we opt to save the GPRs in 64-bits always if but one register
9351 gets used in 64-bits. Otherwise, all the registers in the frame
9352 get saved in 32-bits.
9354 So... since when we save all GPRs (except the SP) in 64-bits, the
9355 traditional GP save area will be empty. */
9356 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9357 info_ptr->gp_size = 0;
9359 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9360 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9362 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9363 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9364 - info_ptr->first_altivec_reg_save);
9366 /* Does this function call anything? */
9367 info_ptr->calls_p = (! current_function_is_leaf
9368 || cfun->machine->ra_needs_full_frame);
9370 /* Determine if we need to save the link register. */
9371 if (rs6000_ra_ever_killed ()
9372 || (DEFAULT_ABI == ABI_AIX
9373 && current_function_profile
9374 && !TARGET_PROFILE_KERNEL)
9375 #ifdef TARGET_RELOCATABLE
9376 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9378 || (info_ptr->first_fp_reg_save != 64
9379 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9380 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9381 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9382 || (DEFAULT_ABI == ABI_DARWIN
9384 && current_function_uses_pic_offset_table)
9385 || info_ptr->calls_p)
9387 info_ptr->lr_save_p = 1;
9388 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9391 /* Determine if we need to save the condition code registers. */
9392 if (regs_ever_live[CR2_REGNO]
9393 || regs_ever_live[CR3_REGNO]
9394 || regs_ever_live[CR4_REGNO])
9396 info_ptr->cr_save_p = 1;
9397 if (DEFAULT_ABI == ABI_V4)
9398 info_ptr->cr_size = reg_size;
9401 /* If the current function calls __builtin_eh_return, then we need
9402 to allocate stack space for registers that will hold data for
9403 the exception handler. */
9404 if (current_function_calls_eh_return)
9407 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9410 /* SPE saves EH registers in 64-bits. */
9411 ehrd_size = i * (TARGET_SPE_ABI
9412 && info_ptr->spe_64bit_regs_used != 0
9413 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9418 /* Determine various sizes. */
9419 info_ptr->reg_size = reg_size;
9420 info_ptr->fixed_size = RS6000_SAVE_AREA;
9421 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9422 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9423 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9426 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9427 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9429 info_ptr->spe_gp_size = 0;
9431 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9433 info_ptr->vrsave_mask = compute_vrsave_mask ();
9434 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9438 info_ptr->vrsave_mask = 0;
9439 info_ptr->vrsave_size = 0;
9442 /* Calculate the offsets. */
9443 switch (DEFAULT_ABI)
9450 case ABI_AIX_NODESC:
9452 info_ptr->fp_save_offset = - info_ptr->fp_size;
9453 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9455 if (TARGET_ALTIVEC_ABI)
9457 info_ptr->vrsave_save_offset
9458 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9460 /* Align stack so vector save area is on a quadword boundary. */
9461 if (info_ptr->altivec_size != 0)
9462 info_ptr->altivec_padding_size
9463 = 16 - (-info_ptr->vrsave_save_offset % 16);
9465 info_ptr->altivec_padding_size = 0;
9467 info_ptr->altivec_save_offset
9468 = info_ptr->vrsave_save_offset
9469 - info_ptr->altivec_padding_size
9470 - info_ptr->altivec_size;
9472 /* Adjust for AltiVec case. */
9473 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9476 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9477 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9478 info_ptr->lr_save_offset = 2*reg_size;
9482 info_ptr->fp_save_offset = - info_ptr->fp_size;
9483 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9484 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9486 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9488 /* Align stack so SPE GPR save area is aligned on a
9489 double-word boundary. */
9490 if (info_ptr->spe_gp_size != 0)
9491 info_ptr->spe_padding_size
9492 = 8 - (-info_ptr->cr_save_offset % 8);
9494 info_ptr->spe_padding_size = 0;
9496 info_ptr->spe_gp_save_offset
9497 = info_ptr->cr_save_offset
9498 - info_ptr->spe_padding_size
9499 - info_ptr->spe_gp_size;
9501 /* Adjust for SPE case. */
9502 info_ptr->toc_save_offset
9503 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9505 else if (TARGET_ALTIVEC_ABI)
9507 info_ptr->vrsave_save_offset
9508 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9510 /* Align stack so vector save area is on a quadword boundary. */
9511 if (info_ptr->altivec_size != 0)
9512 info_ptr->altivec_padding_size
9513 = 16 - (-info_ptr->vrsave_save_offset % 16);
9515 info_ptr->altivec_padding_size = 0;
9517 info_ptr->altivec_save_offset
9518 = info_ptr->vrsave_save_offset
9519 - info_ptr->altivec_padding_size
9520 - info_ptr->altivec_size;
9522 /* Adjust for AltiVec case. */
9523 info_ptr->toc_save_offset
9524 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9527 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9528 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9529 info_ptr->lr_save_offset = reg_size;
9533 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9535 + info_ptr->altivec_size
9536 + info_ptr->altivec_padding_size
9537 + info_ptr->vrsave_size
9538 + info_ptr->spe_gp_size
9539 + info_ptr->spe_padding_size
9543 + info_ptr->vrsave_size
9544 + info_ptr->toc_size,
9545 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9548 total_raw_size = (info_ptr->vars_size
9549 + info_ptr->parm_size
9550 + info_ptr->save_size
9551 + info_ptr->varargs_size
9552 + info_ptr->fixed_size);
9554 info_ptr->total_size =
9555 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9557 /* Determine if we need to allocate any stack frame:
9559 For AIX we need to push the stack if a frame pointer is needed
9560 (because the stack might be dynamically adjusted), if we are
9561 debugging, if we make calls, or if the sum of fp_save, gp_save,
9562 and local variables are more than the space needed to save all
9563 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9564 + 18*8 = 288 (GPR13 reserved).
9566 For V.4 we don't have the stack cushion that AIX uses, but assume
9567 that the debugger can handle stackless frames. */
9569 if (info_ptr->calls_p)
9570 info_ptr->push_p = 1;
9572 else if (DEFAULT_ABI == ABI_V4)
9573 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9575 else if (frame_pointer_needed)
9576 info_ptr->push_p = 1;
9578 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
9579 info_ptr->push_p = 1;
9583 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
9585 /* Zero offsets if we're not saving those registers. */
9586 if (info_ptr->fp_size == 0)
9587 info_ptr->fp_save_offset = 0;
9589 if (info_ptr->gp_size == 0)
9590 info_ptr->gp_save_offset = 0;
9592 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9593 info_ptr->altivec_save_offset = 0;
9595 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9596 info_ptr->vrsave_save_offset = 0;
9598 if (! TARGET_SPE_ABI
9599 || info_ptr->spe_64bit_regs_used == 0
9600 || info_ptr->spe_gp_size == 0)
9601 info_ptr->spe_gp_save_offset = 0;
9603 if (! info_ptr->lr_save_p)
9604 info_ptr->lr_save_offset = 0;
9606 if (! info_ptr->cr_save_p)
9607 info_ptr->cr_save_offset = 0;
9609 if (! info_ptr->toc_save_p)
9610 info_ptr->toc_save_offset = 0;
9615 /* Return true if the current function uses any GPRs in 64-bit SIMD
9619 spe_func_has_64bit_regs_p ()
9623 /* Functions that save and restore all the call-saved registers will
9624 need to save/restore the registers in 64-bits. */
9625 if (current_function_calls_eh_return
9626 || current_function_calls_setjmp
9627 || current_function_has_nonlocal_goto)
9630 insns = get_insns ();
9632 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
9639 if (GET_CODE (i) == SET
9640 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
9649 debug_stack_info (info)
9650 rs6000_stack_t *info;
9652 const char *abi_string;
9655 info = rs6000_stack_info ();
9657 fprintf (stderr, "\nStack information for function %s:\n",
9658 ((current_function_decl && DECL_NAME (current_function_decl))
9659 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9664 default: abi_string = "Unknown"; break;
9665 case ABI_NONE: abi_string = "NONE"; break;
9667 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9668 case ABI_DARWIN: abi_string = "Darwin"; break;
9669 case ABI_V4: abi_string = "V.4"; break;
9672 fprintf (stderr, "\tABI = %5s\n", abi_string);
9674 if (TARGET_ALTIVEC_ABI)
9675 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9678 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9680 if (info->first_gp_reg_save != 32)
9681 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9683 if (info->first_fp_reg_save != 64)
9684 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9686 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9687 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9688 info->first_altivec_reg_save);
9690 if (info->lr_save_p)
9691 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9693 if (info->cr_save_p)
9694 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9696 if (info->toc_save_p)
9697 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9699 if (info->vrsave_mask)
9700 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9703 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9706 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9708 if (info->gp_save_offset)
9709 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9711 if (info->fp_save_offset)
9712 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9714 if (info->altivec_save_offset)
9715 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9716 info->altivec_save_offset);
9718 if (info->spe_gp_save_offset)
9719 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9720 info->spe_gp_save_offset);
9722 if (info->vrsave_save_offset)
9723 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9724 info->vrsave_save_offset);
9726 if (info->lr_save_offset)
9727 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9729 if (info->cr_save_offset)
9730 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9732 if (info->toc_save_offset)
9733 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9735 if (info->varargs_save_offset)
9736 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9738 if (info->total_size)
9739 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9741 if (info->varargs_size)
9742 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9744 if (info->vars_size)
9745 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9747 if (info->parm_size)
9748 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9750 if (info->fixed_size)
9751 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9754 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9756 if (info->spe_gp_size)
9757 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9760 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9762 if (info->altivec_size)
9763 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9765 if (info->vrsave_size)
9766 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9768 if (info->altivec_padding_size)
9769 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9770 info->altivec_padding_size);
9772 if (info->spe_padding_size)
9773 fprintf (stderr, "\tspe_padding_size = %5d\n",
9774 info->spe_padding_size);
9777 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9780 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9783 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9785 if (info->save_size)
9786 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9788 if (info->reg_size != 4)
9789 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9791 fprintf (stderr, "\n");
9795 rs6000_return_addr (count, frame)
9799 /* Currently we don't optimize very well between prolog and body
9800 code and for PIC code the code can be actually quite bad, so
9801 don't try to be too clever here. */
9802 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
9804 cfun->machine->ra_needs_full_frame = 1;
9811 plus_constant (copy_to_reg
9812 (gen_rtx_MEM (Pmode,
9813 memory_address (Pmode, frame))),
9814 RETURN_ADDRESS_OFFSET)));
9817 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9820 /* Say whether a function is a candidate for sibcall handling or not.
9821 We do not allow indirect calls to be optimized into sibling calls.
9822 Also, we can't do it if there are any vector parameters; there's
9823 nowhere to put the VRsave code so it works; note that functions with
9824 vector parameters are required to have a prototype, so the argument
9825 type info must be available here. (The tail recursion case can work
9826 with vector parameters, but there's no way to distinguish here.) */
9828 rs6000_function_ok_for_sibcall (decl, exp)
9830 tree exp ATTRIBUTE_UNUSED;
9835 if (TARGET_ALTIVEC_VRSAVE)
9837 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
9838 type; type = TREE_CHAIN (type))
9840 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9844 if (DEFAULT_ABI == ABI_DARWIN
9845 || (*targetm.binds_local_p) (decl))
9847 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
9849 if (!lookup_attribute ("longcall", attr_list)
9850 || lookup_attribute ("shortcall", attr_list))
9858 rs6000_ra_ever_killed ()
9864 /* Irritatingly, there are two kinds of thunks -- those created with
9865 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9866 through the regular part of the compiler. This is a very hacky
9867 way to tell them apart. */
9868 if (current_function_is_thunk && !no_new_pseudos)
9871 /* regs_ever_live has LR marked as used if any sibcalls are present,
9872 but this should not force saving and restoring in the
9873 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9874 clobbers LR, so that is inappropriate. */
9876 /* Also, the prologue can generate a store into LR that
9877 doesn't really count, like this:
9880 bcl to set PIC register
9884 When we're called from the epilogue, we need to avoid counting
9887 push_topmost_sequence ();
9889 pop_topmost_sequence ();
9890 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9892 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9896 if (FIND_REG_INC_NOTE (insn, reg))
9898 else if (GET_CODE (insn) == CALL_INSN
9899 && !SIBLING_CALL_P (insn))
9901 else if (set_of (reg, insn) != NULL_RTX
9902 && !prologue_epilogue_contains (insn))
9909 /* Add a REG_MAYBE_DEAD note to the insn. */
9911 rs6000_maybe_dead (insn)
9914 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9919 /* Emit instructions needed to load the TOC register.
9920 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9921 a constant pool; or for SVR4 -fpic. */
9924 rs6000_emit_load_toc_table (fromprolog)
9928 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9930 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9932 rtx temp = (fromprolog
9933 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9934 : gen_reg_rtx (Pmode));
9935 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
9937 rs6000_maybe_dead (insn);
9938 insn = emit_move_insn (dest, temp);
9940 rs6000_maybe_dead (insn);
9942 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9945 rtx tempLR = (fromprolog
9946 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9947 : gen_reg_rtx (Pmode));
9948 rtx temp0 = (fromprolog
9949 ? gen_rtx_REG (Pmode, 0)
9950 : gen_reg_rtx (Pmode));
9953 /* possibly create the toc section */
9954 if (! toc_initialized)
9957 function_section (current_function_decl);
9964 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9965 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9967 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9968 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9970 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9972 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9973 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9980 static int reload_toc_labelno = 0;
9982 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9984 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9985 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9987 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
9988 emit_move_insn (dest, tempLR);
9989 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
9991 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
9993 rs6000_maybe_dead (insn);
9995 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9997 /* This is for AIX code running in non-PIC ELF32. */
10000 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10001 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10003 insn = emit_insn (gen_elf_high (dest, realsym));
10005 rs6000_maybe_dead (insn);
10006 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10008 rs6000_maybe_dead (insn);
10010 else if (DEFAULT_ABI == ABI_AIX)
10013 insn = emit_insn (gen_load_toc_aix_si (dest));
10015 insn = emit_insn (gen_load_toc_aix_di (dest));
10017 rs6000_maybe_dead (insn);
10024 get_TOC_alias_set ()
10026 static int set = -1;
10028 set = new_alias_set ();
10032 /* This retuns nonzero if the current function uses the TOC. This is
10033 determined by the presence of (unspec ... 7), which is generated by
10034 the various load_toc_* patterns. */
10041 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10044 rtx pat = PATTERN (insn);
10047 if (GET_CODE (pat) == PARALLEL)
10048 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10049 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
10050 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
10057 create_TOC_reference (symbol)
10060 return gen_rtx_PLUS (Pmode,
10061 gen_rtx_REG (Pmode, TOC_REGISTER),
10062 gen_rtx_CONST (Pmode,
10063 gen_rtx_MINUS (Pmode, symbol,
10064 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10068 /* __throw will restore its own return address to be the same as the
10069 return address of the function that the throw is being made to.
10070 This is unfortunate, because we want to check the original
10071 return address to see if we need to restore the TOC.
10072 So we have to squirrel it away here.
10073 This is used only in compiling __throw and __rethrow.
10075 Most of this code should be removed by CSE. */
10076 static rtx insn_after_throw;
10078 /* This does the saving... */
10080 rs6000_aix_emit_builtin_unwind_init ()
10083 rtx stack_top = gen_reg_rtx (Pmode);
10084 rtx opcode_addr = gen_reg_rtx (Pmode);
10086 insn_after_throw = gen_reg_rtx (SImode);
10088 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10089 emit_move_insn (stack_top, mem);
10091 mem = gen_rtx_MEM (Pmode,
10092 gen_rtx_PLUS (Pmode, stack_top,
10093 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10094 emit_move_insn (opcode_addr, mem);
10095 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10098 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10099 in _eh.o). Only used on AIX.
10101 The idea is that on AIX, function calls look like this:
10102 bl somefunction-trampoline
10106 somefunction-trampoline:
10108 ... load function address in the count register ...
10110 or like this, if the linker determines that this is not a cross-module call
10111 and so the TOC need not be restored:
10114 or like this, if the compiler could determine that this is not a
10117 now, the tricky bit here is that register 2 is saved and restored
10118 by the _linker_, so we can't readily generate debugging information
10119 for it. So we need to go back up the call chain looking at the
10120 insns at return addresses to see which calls saved the TOC register
10121 and so see where it gets restored from.
10123 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10124 just before the actual epilogue.
10126 On the bright side, this incurs no space or time overhead unless an
10127 exception is thrown, except for the extra code in libgcc.a.
10129 The parameter STACKSIZE is a register containing (at runtime)
10130 the amount to be popped off the stack in addition to the stack frame
10131 of this routine (which will be __throw or __rethrow, and so is
10132 guaranteed to have a stack frame). */
10135 rs6000_emit_eh_toc_restore (stacksize)
10139 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10140 rtx tocompare = gen_reg_rtx (SImode);
10141 rtx opcode = gen_reg_rtx (SImode);
10142 rtx opcode_addr = gen_reg_rtx (Pmode);
10144 rtx loop_start = gen_label_rtx ();
10145 rtx no_toc_restore_needed = gen_label_rtx ();
10146 rtx loop_exit = gen_label_rtx ();
10148 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10149 set_mem_alias_set (mem, rs6000_sr_alias_set);
10150 emit_move_insn (bottom_of_stack, mem);
10152 top_of_stack = expand_binop (Pmode, add_optab,
10153 bottom_of_stack, stacksize,
10154 NULL_RTX, 1, OPTAB_WIDEN);
10156 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10157 : 0xE8410028, SImode));
10159 if (insn_after_throw == NULL_RTX)
10161 emit_move_insn (opcode, insn_after_throw);
10163 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10164 emit_label (loop_start);
10166 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10167 SImode, NULL_RTX, NULL_RTX,
10168 no_toc_restore_needed);
10170 mem = gen_rtx_MEM (Pmode,
10171 gen_rtx_PLUS (Pmode, bottom_of_stack,
10172 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10173 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10175 emit_label (no_toc_restore_needed);
10176 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10177 Pmode, NULL_RTX, NULL_RTX,
10180 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10181 set_mem_alias_set (mem, rs6000_sr_alias_set);
10182 emit_move_insn (bottom_of_stack, mem);
10184 mem = gen_rtx_MEM (Pmode,
10185 gen_rtx_PLUS (Pmode, bottom_of_stack,
10186 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10187 emit_move_insn (opcode_addr, mem);
10188 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10190 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10191 emit_jump (loop_start);
10192 emit_note (NULL, NOTE_INSN_LOOP_END);
10193 emit_label (loop_exit);
10195 #endif /* TARGET_AIX */
10197 /* This ties together stack memory (MEM with an alias set of
10198 rs6000_sr_alias_set) and the change to the stack pointer. */
10201 rs6000_emit_stack_tie ()
10203 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10205 set_mem_alias_set (mem, rs6000_sr_alias_set);
10206 emit_insn (gen_stack_tie (mem));
10209 /* Emit the correct code for allocating stack space, as insns.
10210 If COPY_R12, make sure a copy of the old frame is left in r12.
10211 The generated code may use hard register 0 as a temporary. */
10214 rs6000_emit_allocate_stack (size, copy_r12)
10215 HOST_WIDE_INT size;
10219 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10220 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10221 rtx todec = GEN_INT (-size);
10223 if (current_function_limit_stack)
10225 if (REG_P (stack_limit_rtx)
10226 && REGNO (stack_limit_rtx) > 1
10227 && REGNO (stack_limit_rtx) <= 31)
10229 emit_insn (TARGET_32BIT
10230 ? gen_addsi3 (tmp_reg,
10233 : gen_adddi3 (tmp_reg,
10237 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10240 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10242 && DEFAULT_ABI == ABI_V4)
10244 rtx toload = gen_rtx_CONST (VOIDmode,
10245 gen_rtx_PLUS (Pmode,
10249 emit_insn (gen_elf_high (tmp_reg, toload));
10250 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10251 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10255 warning ("stack limit expression is not supported");
10258 if (copy_r12 || ! TARGET_UPDATE)
10259 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10265 /* Need a note here so that try_split doesn't get confused. */
10266 if (get_last_insn() == NULL_RTX)
10267 emit_note (0, NOTE_INSN_DELETED);
10268 insn = emit_move_insn (tmp_reg, todec);
10269 try_split (PATTERN (insn), insn, 0);
10273 insn = emit_insn (TARGET_32BIT
10274 ? gen_movsi_update (stack_reg, stack_reg,
10276 : gen_movdi_update (stack_reg, stack_reg,
10277 todec, stack_reg));
10281 insn = emit_insn (TARGET_32BIT
10282 ? gen_addsi3 (stack_reg, stack_reg, todec)
10283 : gen_adddi3 (stack_reg, stack_reg, todec));
10284 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10285 gen_rtx_REG (Pmode, 12));
10288 RTX_FRAME_RELATED_P (insn) = 1;
10290 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10291 gen_rtx_SET (VOIDmode, stack_reg,
10292 gen_rtx_PLUS (Pmode, stack_reg,
10297 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10298 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10299 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10300 deduce these equivalences by itself so it wasn't necessary to hold
10301 its hand so much. */
10304 rs6000_frame_related (insn, reg, val, reg2, rreg)
10313 /* copy_rtx will not make unique copies of registers, so we need to
10314 ensure we don't have unwanted sharing here. */
10316 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10319 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10321 real = copy_rtx (PATTERN (insn));
10323 if (reg2 != NULL_RTX)
10324 real = replace_rtx (real, reg2, rreg);
10326 real = replace_rtx (real, reg,
10327 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10328 STACK_POINTER_REGNUM),
10331 /* We expect that 'real' is either a SET or a PARALLEL containing
10332 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10333 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10335 if (GET_CODE (real) == SET)
10339 temp = simplify_rtx (SET_SRC (set));
10341 SET_SRC (set) = temp;
10342 temp = simplify_rtx (SET_DEST (set));
10344 SET_DEST (set) = temp;
10345 if (GET_CODE (SET_DEST (set)) == MEM)
10347 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10349 XEXP (SET_DEST (set), 0) = temp;
10352 else if (GET_CODE (real) == PARALLEL)
10355 for (i = 0; i < XVECLEN (real, 0); i++)
10356 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10358 rtx set = XVECEXP (real, 0, i);
10360 temp = simplify_rtx (SET_SRC (set));
10362 SET_SRC (set) = temp;
10363 temp = simplify_rtx (SET_DEST (set));
10365 SET_DEST (set) = temp;
10366 if (GET_CODE (SET_DEST (set)) == MEM)
10368 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10370 XEXP (SET_DEST (set), 0) = temp;
10372 RTX_FRAME_RELATED_P (set) = 1;
10379 real = spe_synthesize_frame_save (real);
10381 RTX_FRAME_RELATED_P (insn) = 1;
10382 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10387 /* Given an SPE frame note, return a PARALLEL of SETs with the
10388 original note, plus a synthetic register save. */
10391 spe_synthesize_frame_save (real)
10394 rtx synth, offset, reg, real2;
10396 if (GET_CODE (real) != SET
10397 || GET_MODE (SET_SRC (real)) != V2SImode)
10400 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10401 frame related note. The parallel contains a set of the register
10402 being saved, and another set to a synthetic register (n+1200).
10403 This is so we can differentiate between 64-bit and 32-bit saves.
10404 Words cannot describe this nastiness. */
10406 if (GET_CODE (SET_DEST (real)) != MEM
10407 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10408 || GET_CODE (SET_SRC (real)) != REG)
10412 (set (mem (plus (reg x) (const y)))
10415 (set (mem (plus (reg x) (const y+4)))
10419 real2 = copy_rtx (real);
10420 PUT_MODE (SET_DEST (real2), SImode);
10421 reg = SET_SRC (real2);
10422 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10423 synth = copy_rtx (real2);
10425 if (BYTES_BIG_ENDIAN)
10427 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10428 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10431 reg = SET_SRC (synth);
10433 synth = replace_rtx (synth, reg,
10434 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10436 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10437 synth = replace_rtx (synth, offset,
10438 GEN_INT (INTVAL (offset)
10439 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10441 RTX_FRAME_RELATED_P (synth) = 1;
10442 RTX_FRAME_RELATED_P (real2) = 1;
10443 if (BYTES_BIG_ENDIAN)
10444 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10446 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10451 /* Returns an insn that has a vrsave set operation with the
10452 appropriate CLOBBERs. */
10455 generate_set_vrsave (reg, info, epiloguep)
10457 rs6000_stack_t *info;
10461 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10462 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10465 = gen_rtx_SET (VOIDmode,
10467 gen_rtx_UNSPEC_VOLATILE (SImode,
10468 gen_rtvec (2, reg, vrsave),
10473 /* We need to clobber the registers in the mask so the scheduler
10474 does not move sets to VRSAVE before sets of AltiVec registers.
10476 However, if the function receives nonlocal gotos, reload will set
10477 all call saved registers live. We will end up with:
10479 (set (reg 999) (mem))
10480 (parallel [ (set (reg vrsave) (unspec blah))
10481 (clobber (reg 999))])
10483 The clobber will cause the store into reg 999 to be dead, and
10484 flow will attempt to delete an epilogue insn. In this case, we
10485 need an unspec use/set of the register. */
10487 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10488 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10490 if (!epiloguep || call_used_regs [i])
10491 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10492 gen_rtx_REG (V4SImode, i));
10495 rtx reg = gen_rtx_REG (V4SImode, i);
10498 = gen_rtx_SET (VOIDmode,
10500 gen_rtx_UNSPEC (V4SImode,
10501 gen_rtvec (1, reg), 27));
10505 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10507 for (i = 0; i < nclobs; ++i)
10508 XVECEXP (insn, 0, i) = clobs[i];
10513 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10514 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10517 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10520 enum machine_mode mode;
10521 unsigned int regno;
10525 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10526 rtx replacea, replaceb;
10528 int_rtx = GEN_INT (offset);
10530 /* Some cases that need register indexed addressing. */
10531 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10533 && SPE_VECTOR_MODE (mode)
10534 && !SPE_CONST_OFFSET_OK (offset)))
10536 /* Whomever calls us must make sure r11 is available in the
10537 flow path of instructions in the prologue. */
10538 offset_rtx = gen_rtx_REG (Pmode, 11);
10539 emit_move_insn (offset_rtx, int_rtx);
10541 replacea = offset_rtx;
10542 replaceb = int_rtx;
10546 offset_rtx = int_rtx;
10547 replacea = NULL_RTX;
10548 replaceb = NULL_RTX;
10551 reg = gen_rtx_REG (mode, regno);
10552 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10553 mem = gen_rtx_MEM (mode, addr);
10554 set_mem_alias_set (mem, rs6000_sr_alias_set);
10556 insn = emit_move_insn (mem, reg);
10558 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10561 /* Emit an offset memory reference suitable for a frame store, while
10562 converting to a valid addressing mode. */
10565 gen_frame_mem_offset (mode, reg, offset)
10566 enum machine_mode mode;
10570 rtx int_rtx, offset_rtx;
10572 int_rtx = GEN_INT (offset);
10574 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10576 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10577 emit_move_insn (offset_rtx, int_rtx);
10580 offset_rtx = int_rtx;
10582 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10585 /* Emit function prologue as insns. */
10588 rs6000_emit_prologue ()
10590 rs6000_stack_t *info = rs6000_stack_info ();
10591 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10592 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10593 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10594 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10595 rtx frame_reg_rtx = sp_reg_rtx;
10596 rtx cr_save_rtx = NULL;
10598 int saving_FPRs_inline;
10599 int using_store_multiple;
10600 HOST_WIDE_INT sp_offset = 0;
10602 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10604 reg_mode = V2SImode;
10608 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10609 && (!TARGET_SPE_ABI
10610 || info->spe_64bit_regs_used == 0)
10611 && info->first_gp_reg_save < 31);
10612 saving_FPRs_inline = (info->first_fp_reg_save == 64
10613 || FP_SAVE_INLINE (info->first_fp_reg_save));
10615 /* For V.4, update stack before we do any saving and set back pointer. */
10616 if (info->push_p && DEFAULT_ABI == ABI_V4)
10618 if (info->total_size < 32767)
10619 sp_offset = info->total_size;
10621 frame_reg_rtx = frame_ptr_rtx;
10622 rs6000_emit_allocate_stack (info->total_size,
10623 (frame_reg_rtx != sp_reg_rtx
10624 && (info->cr_save_p
10626 || info->first_fp_reg_save < 64
10627 || info->first_gp_reg_save < 32
10629 if (frame_reg_rtx != sp_reg_rtx)
10630 rs6000_emit_stack_tie ();
10633 /* Save AltiVec registers if needed. */
10634 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10638 /* There should be a non inline version of this, for when we
10639 are saving lots of vector registers. */
10640 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10641 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10643 rtx areg, savereg, mem;
10646 offset = info->altivec_save_offset + sp_offset
10647 + 16 * (i - info->first_altivec_reg_save);
10649 savereg = gen_rtx_REG (V4SImode, i);
10651 areg = gen_rtx_REG (Pmode, 0);
10652 emit_move_insn (areg, GEN_INT (offset));
10654 /* AltiVec addressing mode is [reg+reg]. */
10655 mem = gen_rtx_MEM (V4SImode,
10656 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10658 set_mem_alias_set (mem, rs6000_sr_alias_set);
10660 insn = emit_move_insn (mem, savereg);
10662 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10663 areg, GEN_INT (offset));
10667 /* VRSAVE is a bit vector representing which AltiVec registers
10668 are used. The OS uses this to determine which vector
10669 registers to save on a context switch. We need to save
10670 VRSAVE on the stack frame, add whatever AltiVec registers we
10671 used in this function, and do the corresponding magic in the
10674 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10676 rtx reg, mem, vrsave;
10679 /* Get VRSAVE onto a GPR. */
10680 reg = gen_rtx_REG (SImode, 12);
10681 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10683 emit_insn (gen_get_vrsave_internal (reg));
10685 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10688 offset = info->vrsave_save_offset + sp_offset;
10690 = gen_rtx_MEM (SImode,
10691 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10692 set_mem_alias_set (mem, rs6000_sr_alias_set);
10693 insn = emit_move_insn (mem, reg);
10695 /* Include the registers in the mask. */
10696 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10698 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10701 /* If we use the link register, get it into r0. */
10702 if (info->lr_save_p)
10703 emit_move_insn (gen_rtx_REG (Pmode, 0),
10704 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10706 /* If we need to save CR, put it into r12. */
10707 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10709 cr_save_rtx = gen_rtx_REG (SImode, 12);
10710 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10713 /* Do any required saving of fpr's. If only one or two to save, do
10714 it ourselves. Otherwise, call function. */
10715 if (saving_FPRs_inline)
10718 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10719 if ((regs_ever_live[info->first_fp_reg_save+i]
10720 && ! call_used_regs[info->first_fp_reg_save+i]))
10721 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10722 info->first_fp_reg_save + i,
10723 info->fp_save_offset + sp_offset + 8 * i,
10726 else if (info->first_fp_reg_save != 64)
10730 const char *alloc_rname;
10732 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10734 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10735 gen_rtx_REG (Pmode,
10736 LINK_REGISTER_REGNUM));
10737 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10738 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10739 alloc_rname = ggc_strdup (rname);
10740 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10741 gen_rtx_SYMBOL_REF (Pmode,
10743 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10745 rtx addr, reg, mem;
10746 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10747 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10748 GEN_INT (info->fp_save_offset
10749 + sp_offset + 8*i));
10750 mem = gen_rtx_MEM (DFmode, addr);
10751 set_mem_alias_set (mem, rs6000_sr_alias_set);
10753 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10755 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10756 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10757 NULL_RTX, NULL_RTX);
10760 /* Save GPRs. This is done as a PARALLEL if we are using
10761 the store-multiple instructions. */
10762 if (using_store_multiple)
10766 p = rtvec_alloc (32 - info->first_gp_reg_save);
10767 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10769 rtx addr, reg, mem;
10770 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10771 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10772 GEN_INT (info->gp_save_offset
10775 mem = gen_rtx_MEM (reg_mode, addr);
10776 set_mem_alias_set (mem, rs6000_sr_alias_set);
10778 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10780 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10781 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10782 NULL_RTX, NULL_RTX);
10787 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10788 if ((regs_ever_live[info->first_gp_reg_save+i]
10789 && ! call_used_regs[info->first_gp_reg_save+i])
10790 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10791 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10792 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10794 rtx addr, reg, mem;
10795 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10797 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10799 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10802 if (!SPE_CONST_OFFSET_OK (offset))
10804 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10805 emit_move_insn (b, GEN_INT (offset));
10808 b = GEN_INT (offset);
10810 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10811 mem = gen_rtx_MEM (V2SImode, addr);
10812 set_mem_alias_set (mem, rs6000_sr_alias_set);
10813 insn = emit_move_insn (mem, reg);
10815 if (GET_CODE (b) == CONST_INT)
10816 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10817 NULL_RTX, NULL_RTX);
10819 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10820 b, GEN_INT (offset));
10824 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10825 GEN_INT (info->gp_save_offset
10828 mem = gen_rtx_MEM (reg_mode, addr);
10829 set_mem_alias_set (mem, rs6000_sr_alias_set);
10831 insn = emit_move_insn (mem, reg);
10832 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10833 NULL_RTX, NULL_RTX);
10838 /* ??? There's no need to emit actual instructions here, but it's the
10839 easiest way to get the frame unwind information emitted. */
10840 if (current_function_calls_eh_return)
10842 unsigned int i, regno;
10846 regno = EH_RETURN_DATA_REGNO (i);
10847 if (regno == INVALID_REGNUM)
10850 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10851 info->ehrd_offset + sp_offset
10852 + reg_size * (int) i,
10857 /* Save lr if we used it. */
10858 if (info->lr_save_p)
10860 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10861 GEN_INT (info->lr_save_offset + sp_offset));
10862 rtx reg = gen_rtx_REG (Pmode, 0);
10863 rtx mem = gen_rtx_MEM (Pmode, addr);
10864 /* This should not be of rs6000_sr_alias_set, because of
10865 __builtin_return_address. */
10867 insn = emit_move_insn (mem, reg);
10868 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10869 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10872 /* Save CR if we use any that must be preserved. */
10873 if (info->cr_save_p)
10875 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10876 GEN_INT (info->cr_save_offset + sp_offset));
10877 rtx mem = gen_rtx_MEM (SImode, addr);
10879 set_mem_alias_set (mem, rs6000_sr_alias_set);
10881 /* If r12 was used to hold the original sp, copy cr into r0 now
10883 if (REGNO (frame_reg_rtx) == 12)
10885 cr_save_rtx = gen_rtx_REG (SImode, 0);
10886 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10888 insn = emit_move_insn (mem, cr_save_rtx);
10890 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10891 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10892 OK. All we have to do is specify that _one_ condition code
10893 register is saved in this stack slot. The thrower's epilogue
10894 will then restore all the call-saved registers.
10895 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10896 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10897 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10900 /* Update stack and set back pointer unless this is V.4,
10901 for which it was done previously. */
10902 if (info->push_p && DEFAULT_ABI != ABI_V4)
10903 rs6000_emit_allocate_stack (info->total_size, FALSE);
10905 /* Set frame pointer, if needed. */
10906 if (frame_pointer_needed)
10908 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10910 RTX_FRAME_RELATED_P (insn) = 1;
10913 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10914 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10915 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10916 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10918 /* If emit_load_toc_table will use the link register, we need to save
10919 it. We use R11 for this purpose because emit_load_toc_table
10920 can use register 0. This allows us to use a plain 'blr' to return
10921 from the procedure more often. */
10922 int save_LR_around_toc_setup = (TARGET_ELF
10923 && DEFAULT_ABI != ABI_AIX
10925 && ! info->lr_save_p
10926 && EXIT_BLOCK_PTR->pred != NULL);
10927 if (save_LR_around_toc_setup)
10928 emit_move_insn (gen_rtx_REG (Pmode, 11),
10929 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10931 rs6000_emit_load_toc_table (TRUE);
10933 if (save_LR_around_toc_setup)
10934 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10935 gen_rtx_REG (Pmode, 11));
10939 if (DEFAULT_ABI == ABI_DARWIN
10940 && flag_pic && current_function_uses_pic_offset_table)
10942 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10943 const char *picbase = machopic_function_base_name ();
10944 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
10946 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10948 rs6000_maybe_dead (
10949 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10950 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10955 /* Write function prologue. */
10958 rs6000_output_function_prologue (file, size)
10960 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10962 rs6000_stack_t *info = rs6000_stack_info ();
10964 if (TARGET_DEBUG_STACK)
10965 debug_stack_info (info);
10967 /* Write .extern for any function we will call to save and restore
10969 if (info->first_fp_reg_save < 64
10970 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10971 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10972 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10973 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10974 RESTORE_FP_SUFFIX);
10976 /* Write .extern for AIX common mode routines, if needed. */
10977 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10979 fputs ("\t.extern __mulh\n", file);
10980 fputs ("\t.extern __mull\n", file);
10981 fputs ("\t.extern __divss\n", file);
10982 fputs ("\t.extern __divus\n", file);
10983 fputs ("\t.extern __quoss\n", file);
10984 fputs ("\t.extern __quous\n", file);
10985 common_mode_defined = 1;
10988 if (! HAVE_prologue)
10992 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10993 the "toplevel" insn chain. */
10994 emit_note (0, NOTE_INSN_DELETED);
10995 rs6000_emit_prologue ();
10996 emit_note (0, NOTE_INSN_DELETED);
10998 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11002 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11004 INSN_ADDRESSES_NEW (insn, addr);
11009 if (TARGET_DEBUG_STACK)
11010 debug_rtx_list (get_insns (), 100);
11011 final (get_insns (), file, FALSE, FALSE);
11015 rs6000_pic_labelno++;
11018 /* Emit function epilogue as insns.
11020 At present, dwarf2out_frame_debug_expr doesn't understand
11021 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11022 anywhere in the epilogue. Most of the insns below would in any case
11023 need special notes to explain where r11 is in relation to the stack. */
11026 rs6000_emit_epilogue (sibcall)
11029 rs6000_stack_t *info;
11030 int restoring_FPRs_inline;
11031 int using_load_multiple;
11032 int using_mfcr_multiple;
11033 int use_backchain_to_restore_sp;
11035 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11036 rtx frame_reg_rtx = sp_reg_rtx;
11037 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11038 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11041 info = rs6000_stack_info ();
11043 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11045 reg_mode = V2SImode;
11049 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11050 && (!TARGET_SPE_ABI
11051 || info->spe_64bit_regs_used == 0)
11052 && info->first_gp_reg_save < 31);
11053 restoring_FPRs_inline = (sibcall
11054 || current_function_calls_eh_return
11055 || info->first_fp_reg_save == 64
11056 || FP_SAVE_INLINE (info->first_fp_reg_save));
11057 use_backchain_to_restore_sp = (frame_pointer_needed
11058 || current_function_calls_alloca
11059 || info->total_size > 32767);
11060 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11061 || rs6000_cpu == PROCESSOR_PPC603
11062 || rs6000_cpu == PROCESSOR_PPC750
11065 /* If we have a frame pointer, a call to alloca, or a large stack
11066 frame, restore the old stack pointer using the backchain. Otherwise,
11067 we know what size to update it with. */
11068 if (use_backchain_to_restore_sp)
11070 /* Under V.4, don't reset the stack pointer until after we're done
11071 loading the saved registers. */
11072 if (DEFAULT_ABI == ABI_V4)
11073 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11075 emit_move_insn (frame_reg_rtx,
11076 gen_rtx_MEM (Pmode, sp_reg_rtx));
11079 else if (info->push_p)
11081 if (DEFAULT_ABI == ABI_V4)
11082 sp_offset = info->total_size;
11085 emit_insn (TARGET_32BIT
11086 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11087 GEN_INT (info->total_size))
11088 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11089 GEN_INT (info->total_size)));
11093 /* Restore AltiVec registers if needed. */
11094 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11098 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11099 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11101 rtx addr, areg, mem;
11103 areg = gen_rtx_REG (Pmode, 0);
11105 (areg, GEN_INT (info->altivec_save_offset
11107 + 16 * (i - info->first_altivec_reg_save)));
11109 /* AltiVec addressing mode is [reg+reg]. */
11110 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11111 mem = gen_rtx_MEM (V4SImode, addr);
11112 set_mem_alias_set (mem, rs6000_sr_alias_set);
11114 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11118 /* Restore VRSAVE if needed. */
11119 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11121 rtx addr, mem, reg;
11123 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11124 GEN_INT (info->vrsave_save_offset + sp_offset));
11125 mem = gen_rtx_MEM (SImode, addr);
11126 set_mem_alias_set (mem, rs6000_sr_alias_set);
11127 reg = gen_rtx_REG (SImode, 12);
11128 emit_move_insn (reg, mem);
11130 emit_insn (generate_set_vrsave (reg, info, 1));
11133 /* Get the old lr if we saved it. */
11134 if (info->lr_save_p)
11136 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11137 info->lr_save_offset + sp_offset);
11139 set_mem_alias_set (mem, rs6000_sr_alias_set);
11141 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11144 /* Get the old cr if we saved it. */
11145 if (info->cr_save_p)
11147 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11148 GEN_INT (info->cr_save_offset + sp_offset));
11149 rtx mem = gen_rtx_MEM (SImode, addr);
11151 set_mem_alias_set (mem, rs6000_sr_alias_set);
11153 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11156 /* Set LR here to try to overlap restores below. */
11157 if (info->lr_save_p)
11158 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11159 gen_rtx_REG (Pmode, 0));
11161 /* Load exception handler data registers, if needed. */
11162 if (current_function_calls_eh_return)
11164 unsigned int i, regno;
11170 regno = EH_RETURN_DATA_REGNO (i);
11171 if (regno == INVALID_REGNUM)
11174 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11175 info->ehrd_offset + sp_offset
11176 + reg_size * (int) i);
11177 set_mem_alias_set (mem, rs6000_sr_alias_set);
11179 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11183 /* Restore GPRs. This is done as a PARALLEL if we are using
11184 the load-multiple instructions. */
11185 if (using_load_multiple)
11188 p = rtvec_alloc (32 - info->first_gp_reg_save);
11189 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11191 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11192 GEN_INT (info->gp_save_offset
11195 rtx mem = gen_rtx_MEM (reg_mode, addr);
11197 set_mem_alias_set (mem, rs6000_sr_alias_set);
11200 gen_rtx_SET (VOIDmode,
11201 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11204 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11207 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11208 if ((regs_ever_live[info->first_gp_reg_save+i]
11209 && ! call_used_regs[info->first_gp_reg_save+i])
11210 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11211 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11212 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11214 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11215 GEN_INT (info->gp_save_offset
11218 rtx mem = gen_rtx_MEM (reg_mode, addr);
11220 /* Restore 64-bit quantities for SPE. */
11221 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11223 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11226 if (!SPE_CONST_OFFSET_OK (offset))
11228 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11229 emit_move_insn (b, GEN_INT (offset));
11232 b = GEN_INT (offset);
11234 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11235 mem = gen_rtx_MEM (V2SImode, addr);
11238 set_mem_alias_set (mem, rs6000_sr_alias_set);
11240 emit_move_insn (gen_rtx_REG (reg_mode,
11241 info->first_gp_reg_save + i), mem);
11244 /* Restore fpr's if we need to do it without calling a function. */
11245 if (restoring_FPRs_inline)
11246 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11247 if ((regs_ever_live[info->first_fp_reg_save+i]
11248 && ! call_used_regs[info->first_fp_reg_save+i]))
11251 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11252 GEN_INT (info->fp_save_offset
11255 mem = gen_rtx_MEM (DFmode, addr);
11256 set_mem_alias_set (mem, rs6000_sr_alias_set);
11258 emit_move_insn (gen_rtx_REG (DFmode,
11259 info->first_fp_reg_save + i),
11263 /* If we saved cr, restore it here. Just those that were used. */
11264 if (info->cr_save_p)
11266 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11269 if (using_mfcr_multiple)
11271 for (i = 0; i < 8; i++)
11272 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11278 if (using_mfcr_multiple && count > 1)
11283 p = rtvec_alloc (count);
11286 for (i = 0; i < 8; i++)
11287 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11289 rtvec r = rtvec_alloc (2);
11290 RTVEC_ELT (r, 0) = r12_rtx;
11291 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11292 RTVEC_ELT (p, ndx) =
11293 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11294 gen_rtx_UNSPEC (CCmode, r, 20));
11297 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11302 for (i = 0; i < 8; i++)
11303 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11305 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11311 /* If this is V.4, unwind the stack pointer after all of the loads
11312 have been done. We need to emit a block here so that sched
11313 doesn't decide to move the sp change before the register restores
11314 (which may not have any obvious dependency on the stack). This
11315 doesn't hurt performance, because there is no scheduling that can
11316 be done after this point. */
11317 if (DEFAULT_ABI == ABI_V4)
11319 if (frame_reg_rtx != sp_reg_rtx)
11320 rs6000_emit_stack_tie ();
11322 if (use_backchain_to_restore_sp)
11324 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11326 else if (sp_offset != 0)
11328 emit_insn (TARGET_32BIT
11329 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11330 GEN_INT (sp_offset))
11331 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11332 GEN_INT (sp_offset)));
11336 if (current_function_calls_eh_return)
11338 rtx sa = EH_RETURN_STACKADJ_RTX;
11339 emit_insn (TARGET_32BIT
11340 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11341 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11347 if (! restoring_FPRs_inline)
11348 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11350 p = rtvec_alloc (2);
11352 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11353 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11354 gen_rtx_REG (Pmode,
11355 LINK_REGISTER_REGNUM));
11357 /* If we have to restore more than two FP registers, branch to the
11358 restore function. It will return to our caller. */
11359 if (! restoring_FPRs_inline)
11363 const char *alloc_rname;
11365 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11366 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11367 alloc_rname = ggc_strdup (rname);
11368 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11369 gen_rtx_SYMBOL_REF (Pmode,
11372 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11375 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11376 GEN_INT (info->fp_save_offset + 8*i));
11377 mem = gen_rtx_MEM (DFmode, addr);
11378 set_mem_alias_set (mem, rs6000_sr_alias_set);
11380 RTVEC_ELT (p, i+3) =
11381 gen_rtx_SET (VOIDmode,
11382 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11387 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11391 /* Write function epilogue. */
11394 rs6000_output_function_epilogue (file, size)
11396 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11398 rs6000_stack_t *info = rs6000_stack_info ();
11400 if (! HAVE_epilogue)
11402 rtx insn = get_last_insn ();
11403 /* If the last insn was a BARRIER, we don't have to write anything except
11404 the trace table. */
11405 if (GET_CODE (insn) == NOTE)
11406 insn = prev_nonnote_insn (insn);
11407 if (insn == 0 || GET_CODE (insn) != BARRIER)
11409 /* This is slightly ugly, but at least we don't have two
11410 copies of the epilogue-emitting code. */
11413 /* A NOTE_INSN_DELETED is supposed to be at the start
11414 and end of the "toplevel" insn chain. */
11415 emit_note (0, NOTE_INSN_DELETED);
11416 rs6000_emit_epilogue (FALSE);
11417 emit_note (0, NOTE_INSN_DELETED);
11419 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11423 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11425 INSN_ADDRESSES_NEW (insn, addr);
11430 if (TARGET_DEBUG_STACK)
11431 debug_rtx_list (get_insns (), 100);
11432 final (get_insns (), file, FALSE, FALSE);
11437 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11440 We don't output a traceback table if -finhibit-size-directive was
11441 used. The documentation for -finhibit-size-directive reads
11442 ``don't output a @code{.size} assembler directive, or anything
11443 else that would cause trouble if the function is split in the
11444 middle, and the two halves are placed at locations far apart in
11445 memory.'' The traceback table has this property, since it
11446 includes the offset from the start of the function to the
11447 traceback table itself.
11449 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11450 different traceback table. */
11451 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11452 && rs6000_traceback != traceback_none)
11454 const char *fname = NULL;
11455 const char *language_string = lang_hooks.name;
11456 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11458 int optional_tbtab;
11460 if (rs6000_traceback == traceback_full)
11461 optional_tbtab = 1;
11462 else if (rs6000_traceback == traceback_part)
11463 optional_tbtab = 0;
11465 optional_tbtab = !optimize_size && !TARGET_ELF;
11467 if (optional_tbtab)
11469 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11470 while (*fname == '.') /* V.4 encodes . in the name */
11473 /* Need label immediately before tbtab, so we can compute
11474 its offset from the function start. */
11475 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11476 ASM_OUTPUT_LABEL (file, fname);
11479 /* The .tbtab pseudo-op can only be used for the first eight
11480 expressions, since it can't handle the possibly variable
11481 length fields that follow. However, if you omit the optional
11482 fields, the assembler outputs zeros for all optional fields
11483 anyways, giving each variable length field is minimum length
11484 (as defined in sys/debug.h). Thus we can not use the .tbtab
11485 pseudo-op at all. */
11487 /* An all-zero word flags the start of the tbtab, for debuggers
11488 that have to find it by searching forward from the entry
11489 point or from the current pc. */
11490 fputs ("\t.long 0\n", file);
11492 /* Tbtab format type. Use format type 0. */
11493 fputs ("\t.byte 0,", file);
11495 /* Language type. Unfortunately, there doesn't seem to be any
11496 official way to get this info, so we use language_string. C
11497 is 0. C++ is 9. No number defined for Obj-C, so use the
11498 value for C for now. There is no official value for Java,
11499 although IBM appears to be using 13. There is no official value
11500 for Chill, so we've chosen 44 pseudo-randomly. */
11501 if (! strcmp (language_string, "GNU C")
11502 || ! strcmp (language_string, "GNU Objective-C"))
11504 else if (! strcmp (language_string, "GNU F77"))
11506 else if (! strcmp (language_string, "GNU Ada"))
11508 else if (! strcmp (language_string, "GNU Pascal"))
11510 else if (! strcmp (language_string, "GNU C++"))
11512 else if (! strcmp (language_string, "GNU Java"))
11514 else if (! strcmp (language_string, "GNU CHILL"))
11518 fprintf (file, "%d,", i);
11520 /* 8 single bit fields: global linkage (not set for C extern linkage,
11521 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11522 from start of procedure stored in tbtab, internal function, function
11523 has controlled storage, function has no toc, function uses fp,
11524 function logs/aborts fp operations. */
11525 /* Assume that fp operations are used if any fp reg must be saved. */
11526 fprintf (file, "%d,",
11527 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11529 /* 6 bitfields: function is interrupt handler, name present in
11530 proc table, function calls alloca, on condition directives
11531 (controls stack walks, 3 bits), saves condition reg, saves
11533 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11534 set up as a frame pointer, even when there is no alloca call. */
11535 fprintf (file, "%d,",
11536 ((optional_tbtab << 6)
11537 | ((optional_tbtab & frame_pointer_needed) << 5)
11538 | (info->cr_save_p << 1)
11539 | (info->lr_save_p)));
11541 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11543 fprintf (file, "%d,",
11544 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11546 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11547 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11549 if (optional_tbtab)
11551 /* Compute the parameter info from the function decl argument
11554 int next_parm_info_bit = 31;
11556 for (decl = DECL_ARGUMENTS (current_function_decl);
11557 decl; decl = TREE_CHAIN (decl))
11559 rtx parameter = DECL_INCOMING_RTL (decl);
11560 enum machine_mode mode = GET_MODE (parameter);
11562 if (GET_CODE (parameter) == REG)
11564 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11570 if (mode == SFmode)
11572 else if (mode == DFmode || mode == TFmode)
11577 /* If only one bit will fit, don't or in this entry. */
11578 if (next_parm_info_bit > 0)
11579 parm_info |= (bits << (next_parm_info_bit - 1));
11580 next_parm_info_bit -= 2;
11584 fixed_parms += ((GET_MODE_SIZE (mode)
11585 + (UNITS_PER_WORD - 1))
11587 next_parm_info_bit -= 1;
11593 /* Number of fixed point parameters. */
11594 /* This is actually the number of words of fixed point parameters; thus
11595 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11596 fprintf (file, "%d,", fixed_parms);
11598 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11600 /* This is actually the number of fp registers that hold parameters;
11601 and thus the maximum value is 13. */
11602 /* Set parameters on stack bit if parameters are not in their original
11603 registers, regardless of whether they are on the stack? Xlc
11604 seems to set the bit when not optimizing. */
11605 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11607 if (! optional_tbtab)
11610 /* Optional fields follow. Some are variable length. */
11612 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11613 11 double float. */
11614 /* There is an entry for each parameter in a register, in the order that
11615 they occur in the parameter list. Any intervening arguments on the
11616 stack are ignored. If the list overflows a long (max possible length
11617 34 bits) then completely leave off all elements that don't fit. */
11618 /* Only emit this long if there was at least one parameter. */
11619 if (fixed_parms || float_parms)
11620 fprintf (file, "\t.long %d\n", parm_info);
11622 /* Offset from start of code to tb table. */
11623 fputs ("\t.long ", file);
11624 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11626 RS6000_OUTPUT_BASENAME (file, fname);
11628 assemble_name (file, fname);
11630 fputs ("-.", file);
11632 RS6000_OUTPUT_BASENAME (file, fname);
11634 assemble_name (file, fname);
11638 /* Interrupt handler mask. */
11639 /* Omit this long, since we never set the interrupt handler bit
11642 /* Number of CTL (controlled storage) anchors. */
11643 /* Omit this long, since the has_ctl bit is never set above. */
11645 /* Displacement into stack of each CTL anchor. */
11646 /* Omit this list of longs, because there are no CTL anchors. */
11648 /* Length of function name. */
11651 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11653 /* Function name. */
11654 assemble_string (fname, strlen (fname));
11656 /* Register for alloca automatic storage; this is always reg 31.
11657 Only emit this if the alloca bit was set above. */
11658 if (frame_pointer_needed)
11659 fputs ("\t.byte 31\n", file);
11661 fputs ("\t.align 2\n", file);
11665 /* A C compound statement that outputs the assembler code for a thunk
11666 function, used to implement C++ virtual function calls with
11667 multiple inheritance. The thunk acts as a wrapper around a virtual
11668 function, adjusting the implicit object parameter before handing
11669 control off to the real function.
11671 First, emit code to add the integer DELTA to the location that
11672 contains the incoming first argument. Assume that this argument
11673 contains a pointer, and is the one used to pass the `this' pointer
11674 in C++. This is the incoming argument *before* the function
11675 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11676 values of all other incoming arguments.
11678 After the addition, emit code to jump to FUNCTION, which is a
11679 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11680 not touch the return address. Hence returning from FUNCTION will
11681 return to whoever called the current `thunk'.
11683 The effect must be as if FUNCTION had been called directly with the
11684 adjusted first argument. This macro is responsible for emitting
11685 all of the code for a thunk function; output_function_prologue()
11686 and output_function_epilogue() are not invoked.
11688 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11689 been extracted from it.) It might possibly be useful on some
11690 targets, but probably not.
11692 If you do not define this macro, the target-independent code in the
11693 C++ frontend will generate a less efficient heavyweight thunk that
11694 calls FUNCTION instead of jumping to it. The generic approach does
11695 not support varargs. */
11698 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11700 tree thunk_fndecl ATTRIBUTE_UNUSED;
11701 HOST_WIDE_INT delta;
11702 HOST_WIDE_INT vcall_offset;
11705 rtx this, insn, funexp;
11707 reload_completed = 1;
11708 no_new_pseudos = 1;
11710 /* Mark the end of the (empty) prologue. */
11711 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11713 /* Find the "this" pointer. If the function returns a structure,
11714 the structure return pointer is in r3. */
11715 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11716 this = gen_rtx_REG (Pmode, 4);
11718 this = gen_rtx_REG (Pmode, 3);
11720 /* Apply the constant offset, if required. */
11723 rtx delta_rtx = GEN_INT (delta);
11724 emit_insn (TARGET_32BIT
11725 ? gen_addsi3 (this, this, delta_rtx)
11726 : gen_adddi3 (this, this, delta_rtx));
11729 /* Apply the offset from the vtable, if required. */
11732 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11733 rtx tmp = gen_rtx_REG (Pmode, 12);
11735 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11736 emit_insn (TARGET_32BIT
11737 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11738 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11739 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11740 emit_insn (TARGET_32BIT
11741 ? gen_addsi3 (this, this, tmp)
11742 : gen_adddi3 (this, this, tmp));
11745 /* Generate a tail call to the target function. */
11746 if (!TREE_USED (function))
11748 assemble_external (function);
11749 TREE_USED (function) = 1;
11751 funexp = XEXP (DECL_RTL (function), 0);
11753 SYMBOL_REF_FLAG (funexp) = 0;
11754 if (current_file_function_operand (funexp, VOIDmode)
11755 && (! lookup_attribute ("longcall",
11756 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11757 || lookup_attribute ("shortcall",
11758 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11759 SYMBOL_REF_FLAG (funexp) = 1;
11761 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11764 if (MACHOPIC_INDIRECT)
11765 funexp = machopic_indirect_call_target (funexp);
11768 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11769 generate sibcall RTL explicitly to avoid constraint abort. */
11770 insn = emit_call_insn (
11771 gen_rtx_PARALLEL (VOIDmode,
11773 gen_rtx_CALL (VOIDmode,
11774 funexp, const0_rtx),
11775 gen_rtx_USE (VOIDmode, const0_rtx),
11776 gen_rtx_USE (VOIDmode,
11777 gen_rtx_REG (SImode,
11778 LINK_REGISTER_REGNUM)),
11779 gen_rtx_RETURN (VOIDmode))));
11780 SIBLING_CALL_P (insn) = 1;
11783 /* Run just enough of rest_of_compilation to get the insns emitted.
11784 There's not really enough bulk here to make other passes such as
11785 instruction scheduling worth while. Note that use_thunk calls
11786 assemble_start_function and assemble_end_function. */
11787 insn = get_insns ();
11788 shorten_branches (insn);
11789 final_start_function (insn, file, 1);
11790 final (insn, file, 1, 0);
11791 final_end_function ();
11793 reload_completed = 0;
11794 no_new_pseudos = 0;
11797 /* A quick summary of the various types of 'constant-pool tables'
11800 Target Flags Name One table per
11801 AIX (none) AIX TOC object file
11802 AIX -mfull-toc AIX TOC object file
11803 AIX -mminimal-toc AIX minimal TOC translation unit
11804 SVR4/EABI (none) SVR4 SDATA object file
11805 SVR4/EABI -fpic SVR4 pic object file
11806 SVR4/EABI -fPIC SVR4 PIC translation unit
11807 SVR4/EABI -mrelocatable EABI TOC function
11808 SVR4/EABI -maix AIX TOC object file
11809 SVR4/EABI -maix -mminimal-toc
11810 AIX minimal TOC translation unit
11812 Name Reg. Set by entries contains:
11813 made by addrs? fp? sum?
11815 AIX TOC 2 crt0 as Y option option
11816 AIX minimal TOC 30 prolog gcc Y Y option
11817 SVR4 SDATA 13 crt0 gcc N Y N
11818 SVR4 pic 30 prolog ld Y not yet N
11819 SVR4 PIC 30 prolog gcc Y option option
11820 EABI TOC 30 prolog gcc Y option option
11824 /* Hash functions for the hash table. */
11827 rs6000_hash_constant (k)
11830 enum rtx_code code = GET_CODE (k);
11831 enum machine_mode mode = GET_MODE (k);
11832 unsigned result = (code << 3) ^ mode;
11833 const char *format;
11836 format = GET_RTX_FORMAT (code);
11837 flen = strlen (format);
11843 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11846 if (mode != VOIDmode)
11847 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11859 for (; fidx < flen; fidx++)
11860 switch (format[fidx])
11865 const char *str = XSTR (k, fidx);
11866 len = strlen (str);
11867 result = result * 613 + len;
11868 for (i = 0; i < len; i++)
11869 result = result * 613 + (unsigned) str[i];
11874 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11878 result = result * 613 + (unsigned) XINT (k, fidx);
11881 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11882 result = result * 613 + (unsigned) XWINT (k, fidx);
11886 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11887 result = result * 613 + (unsigned) (XWINT (k, fidx)
11901 toc_hash_function (hash_entry)
11902 const void * hash_entry;
11904 const struct toc_hash_struct *thc =
11905 (const struct toc_hash_struct *) hash_entry;
11906 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11909 /* Compare H1 and H2 for equivalence. */
11912 toc_hash_eq (h1, h2)
11916 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11917 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11919 if (((const struct toc_hash_struct *) h1)->key_mode
11920 != ((const struct toc_hash_struct *) h2)->key_mode)
11923 return rtx_equal_p (r1, r2);
11926 /* These are the names given by the C++ front-end to vtables, and
11927 vtable-like objects. Ideally, this logic should not be here;
11928 instead, there should be some programmatic way of inquiring as
11929 to whether or not an object is a vtable. */
11931 #define VTABLE_NAME_P(NAME) \
11932 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11933 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11934 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11935 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11938 rs6000_output_symbol_ref (file, x)
11942 /* Currently C++ toc references to vtables can be emitted before it
11943 is decided whether the vtable is public or private. If this is
11944 the case, then the linker will eventually complain that there is
11945 a reference to an unknown section. Thus, for vtables only,
11946 we emit the TOC reference to reference the symbol and not the
11948 const char *name = XSTR (x, 0);
11950 if (VTABLE_NAME_P (name))
11952 RS6000_OUTPUT_BASENAME (file, name);
11955 assemble_name (file, name);
11958 /* Output a TOC entry. We derive the entry name from what is being
11962 output_toc (file, x, labelno, mode)
11966 enum machine_mode mode;
11969 const char *name = buf;
11970 const char *real_name;
11977 /* When the linker won't eliminate them, don't output duplicate
11978 TOC entries (this happens on AIX if there is any kind of TOC,
11979 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11981 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
11983 struct toc_hash_struct *h;
11986 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11987 time because GGC is not initialised at that point. */
11988 if (toc_hash_table == NULL)
11989 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
11990 toc_hash_eq, NULL);
11992 h = ggc_alloc (sizeof (*h));
11994 h->key_mode = mode;
11995 h->labelno = labelno;
11997 found = htab_find_slot (toc_hash_table, h, 1);
11998 if (*found == NULL)
12000 else /* This is indeed a duplicate.
12001 Set this label equal to that label. */
12003 fputs ("\t.set ", file);
12004 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12005 fprintf (file, "%d,", labelno);
12006 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12007 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12013 /* If we're going to put a double constant in the TOC, make sure it's
12014 aligned properly when strict alignment is on. */
12015 if (GET_CODE (x) == CONST_DOUBLE
12016 && STRICT_ALIGNMENT
12017 && GET_MODE_BITSIZE (mode) >= 64
12018 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12019 ASM_OUTPUT_ALIGN (file, 3);
12022 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12024 /* Handle FP constants specially. Note that if we have a minimal
12025 TOC, things we put here aren't actually in the TOC, so we can allow
12027 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12029 REAL_VALUE_TYPE rv;
12032 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12033 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12037 if (TARGET_MINIMAL_TOC)
12038 fputs (DOUBLE_INT_ASM_OP, file);
12040 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12041 k[0] & 0xffffffff, k[1] & 0xffffffff,
12042 k[2] & 0xffffffff, k[3] & 0xffffffff);
12043 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12044 k[0] & 0xffffffff, k[1] & 0xffffffff,
12045 k[2] & 0xffffffff, k[3] & 0xffffffff);
12050 if (TARGET_MINIMAL_TOC)
12051 fputs ("\t.long ", file);
12053 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12054 k[0] & 0xffffffff, k[1] & 0xffffffff,
12055 k[2] & 0xffffffff, k[3] & 0xffffffff);
12056 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12057 k[0] & 0xffffffff, k[1] & 0xffffffff,
12058 k[2] & 0xffffffff, k[3] & 0xffffffff);
12062 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12064 REAL_VALUE_TYPE rv;
12067 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12068 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12072 if (TARGET_MINIMAL_TOC)
12073 fputs (DOUBLE_INT_ASM_OP, file);
12075 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12076 k[0] & 0xffffffff, k[1] & 0xffffffff);
12077 fprintf (file, "0x%lx%08lx\n",
12078 k[0] & 0xffffffff, k[1] & 0xffffffff);
12083 if (TARGET_MINIMAL_TOC)
12084 fputs ("\t.long ", file);
12086 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12087 k[0] & 0xffffffff, k[1] & 0xffffffff);
12088 fprintf (file, "0x%lx,0x%lx\n",
12089 k[0] & 0xffffffff, k[1] & 0xffffffff);
12093 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12095 REAL_VALUE_TYPE rv;
12098 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12099 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12103 if (TARGET_MINIMAL_TOC)
12104 fputs (DOUBLE_INT_ASM_OP, file);
12106 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12107 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12112 if (TARGET_MINIMAL_TOC)
12113 fputs ("\t.long ", file);
12115 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12116 fprintf (file, "0x%lx\n", l & 0xffffffff);
12120 else if (GET_MODE (x) == VOIDmode
12121 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12123 unsigned HOST_WIDE_INT low;
12124 HOST_WIDE_INT high;
12126 if (GET_CODE (x) == CONST_DOUBLE)
12128 low = CONST_DOUBLE_LOW (x);
12129 high = CONST_DOUBLE_HIGH (x);
12132 #if HOST_BITS_PER_WIDE_INT == 32
12135 high = (low & 0x80000000) ? ~0 : 0;
12139 low = INTVAL (x) & 0xffffffff;
12140 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12144 /* TOC entries are always Pmode-sized, but since this
12145 is a bigendian machine then if we're putting smaller
12146 integer constants in the TOC we have to pad them.
12147 (This is still a win over putting the constants in
12148 a separate constant pool, because then we'd have
12149 to have both a TOC entry _and_ the actual constant.)
12151 For a 32-bit target, CONST_INT values are loaded and shifted
12152 entirely within `low' and can be stored in one TOC entry. */
12154 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12155 abort ();/* It would be easy to make this work, but it doesn't now. */
12157 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12159 #if HOST_BITS_PER_WIDE_INT == 32
12160 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12161 POINTER_SIZE, &low, &high, 0);
12164 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12165 high = (HOST_WIDE_INT) low >> 32;
12172 if (TARGET_MINIMAL_TOC)
12173 fputs (DOUBLE_INT_ASM_OP, file);
12175 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12176 (long) high & 0xffffffff, (long) low & 0xffffffff);
12177 fprintf (file, "0x%lx%08lx\n",
12178 (long) high & 0xffffffff, (long) low & 0xffffffff);
12183 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12185 if (TARGET_MINIMAL_TOC)
12186 fputs ("\t.long ", file);
12188 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12189 (long) high & 0xffffffff, (long) low & 0xffffffff);
12190 fprintf (file, "0x%lx,0x%lx\n",
12191 (long) high & 0xffffffff, (long) low & 0xffffffff);
12195 if (TARGET_MINIMAL_TOC)
12196 fputs ("\t.long ", file);
12198 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12199 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12205 if (GET_CODE (x) == CONST)
12207 if (GET_CODE (XEXP (x, 0)) != PLUS)
12210 base = XEXP (XEXP (x, 0), 0);
12211 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12214 if (GET_CODE (base) == SYMBOL_REF)
12215 name = XSTR (base, 0);
12216 else if (GET_CODE (base) == LABEL_REF)
12217 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12218 else if (GET_CODE (base) == CODE_LABEL)
12219 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12223 real_name = (*targetm.strip_name_encoding) (name);
12224 if (TARGET_MINIMAL_TOC)
12225 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12228 fprintf (file, "\t.tc %s", real_name);
12231 fprintf (file, ".N%d", - offset);
12233 fprintf (file, ".P%d", offset);
12235 fputs ("[TC],", file);
12238 /* Currently C++ toc references to vtables can be emitted before it
12239 is decided whether the vtable is public or private. If this is
12240 the case, then the linker will eventually complain that there is
12241 a TOC reference to an unknown section. Thus, for vtables only,
12242 we emit the TOC reference to reference the symbol and not the
12244 if (VTABLE_NAME_P (name))
12246 RS6000_OUTPUT_BASENAME (file, name);
12248 fprintf (file, "%d", offset);
12249 else if (offset > 0)
12250 fprintf (file, "+%d", offset);
12253 output_addr_const (file, x);
12257 /* Output an assembler pseudo-op to write an ASCII string of N characters
12258 starting at P to FILE.
12260 On the RS/6000, we have to do this using the .byte operation and
12261 write out special characters outside the quoted string.
12262 Also, the assembler is broken; very long strings are truncated,
12263 so we must artificially break them up early. */
12266 output_ascii (file, p, n)
12272 int i, count_string;
12273 const char *for_string = "\t.byte \"";
12274 const char *for_decimal = "\t.byte ";
12275 const char *to_close = NULL;
12278 for (i = 0; i < n; i++)
12281 if (c >= ' ' && c < 0177)
12284 fputs (for_string, file);
12287 /* Write two quotes to get one. */
12295 for_decimal = "\"\n\t.byte ";
12299 if (count_string >= 512)
12301 fputs (to_close, file);
12303 for_string = "\t.byte \"";
12304 for_decimal = "\t.byte ";
12312 fputs (for_decimal, file);
12313 fprintf (file, "%d", c);
12315 for_string = "\n\t.byte \"";
12316 for_decimal = ", ";
12322 /* Now close the string if we have written one. Then end the line. */
12324 fputs (to_close, file);
12327 /* Generate a unique section name for FILENAME for a section type
12328 represented by SECTION_DESC. Output goes into BUF.
12330 SECTION_DESC can be any string, as long as it is different for each
12331 possible section type.
12333 We name the section in the same manner as xlc. The name begins with an
12334 underscore followed by the filename (after stripping any leading directory
12335 names) with the last period replaced by the string SECTION_DESC. If
12336 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12340 rs6000_gen_section_name (buf, filename, section_desc)
12342 const char *filename;
12343 const char *section_desc;
12345 const char *q, *after_last_slash, *last_period = 0;
12349 after_last_slash = filename;
12350 for (q = filename; *q; q++)
12353 after_last_slash = q + 1;
12354 else if (*q == '.')
12358 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12359 *buf = (char *) xmalloc (len);
12364 for (q = after_last_slash; *q; q++)
12366 if (q == last_period)
12368 strcpy (p, section_desc);
12369 p += strlen (section_desc);
12373 else if (ISALNUM (*q))
12377 if (last_period == 0)
12378 strcpy (p, section_desc);
12383 /* Emit profile function. */
12386 output_profile_hook (labelno)
12387 int labelno ATTRIBUTE_UNUSED;
12389 if (TARGET_PROFILE_KERNEL)
12392 if (DEFAULT_ABI == ABI_AIX)
12394 #ifdef NO_PROFILE_COUNTERS
12395 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12398 const char *label_name;
12401 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12402 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12403 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12405 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12409 else if (DEFAULT_ABI == ABI_DARWIN)
12411 const char *mcount_name = RS6000_MCOUNT;
12412 int caller_addr_regno = LINK_REGISTER_REGNUM;
12414 /* Be conservative and always set this, at least for now. */
12415 current_function_uses_pic_offset_table = 1;
12418 /* For PIC code, set up a stub and collect the caller's address
12419 from r0, which is where the prologue puts it. */
12420 if (MACHOPIC_INDIRECT)
12422 mcount_name = machopic_stub_name (mcount_name);
12423 if (current_function_uses_pic_offset_table)
12424 caller_addr_regno = 0;
12427 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12429 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12433 /* Write function profiler code. */
12436 output_function_profiler (file, labelno)
12443 switch (DEFAULT_ABI)
12450 /* Fall through. */
12452 case ABI_AIX_NODESC:
12455 warning ("no profiling of 64-bit code for this ABI");
12458 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12459 fprintf (file, "\tmflr %s\n", reg_names[0]);
12462 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12463 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12464 reg_names[0], save_lr, reg_names[1]);
12465 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12466 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12467 assemble_name (file, buf);
12468 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12470 else if (flag_pic > 1)
12472 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12473 reg_names[0], save_lr, reg_names[1]);
12474 /* Now, we need to get the address of the label. */
12475 fputs ("\tbl 1f\n\t.long ", file);
12476 assemble_name (file, buf);
12477 fputs ("-.\n1:", file);
12478 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12479 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12480 reg_names[0], reg_names[11]);
12481 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12482 reg_names[0], reg_names[0], reg_names[11]);
12486 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12487 assemble_name (file, buf);
12488 fputs ("@ha\n", file);
12489 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12490 reg_names[0], save_lr, reg_names[1]);
12491 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12492 assemble_name (file, buf);
12493 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12496 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12498 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12499 reg_names[STATIC_CHAIN_REGNUM],
12501 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12502 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12503 reg_names[STATIC_CHAIN_REGNUM],
12507 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12508 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12513 if (!TARGET_PROFILE_KERNEL)
12515 /* Don't do anything, done in output_profile_hook (). */
12522 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
12523 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
12525 if (current_function_needs_context)
12527 asm_fprintf (file, "\tstd %s,24(%s)\n",
12528 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12529 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12530 asm_fprintf (file, "\tld %s,24(%s)\n",
12531 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12534 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12542 rs6000_use_dfa_pipeline_interface ()
12547 /* Power4 load update and store update instructions are cracked into a
12548 load or store and an integer insn which are executed in the same cycle.
12549 Branches have their own dispatch slot which does not count against the
12550 GCC issue rate, but it changes the program flow so there are no other
12551 instructions to issue in this cycle. */
12554 rs6000_variable_issue (stream, verbose, insn, more)
12555 FILE *stream ATTRIBUTE_UNUSED;
12556 int verbose ATTRIBUTE_UNUSED;
12560 if (GET_CODE (PATTERN (insn)) == USE
12561 || GET_CODE (PATTERN (insn)) == CLOBBER)
12564 if (rs6000_cpu == PROCESSOR_POWER4)
12566 enum attr_type type = get_attr_type (insn);
12567 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12568 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
12570 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12571 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12572 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
12573 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
12574 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
12575 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
12576 || type == TYPE_IDIV || type == TYPE_LDIV)
12577 return more > 2 ? more - 2 : 0;
12583 /* Adjust the cost of a scheduling dependency. Return the new cost of
12584 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12587 rs6000_adjust_cost (insn, link, dep_insn, cost)
12590 rtx dep_insn ATTRIBUTE_UNUSED;
12593 if (! recog_memoized (insn))
12596 if (REG_NOTE_KIND (link) != 0)
12599 if (REG_NOTE_KIND (link) == 0)
12601 /* Data dependency; DEP_INSN writes a register that INSN reads
12602 some cycles later. */
12603 switch (get_attr_type (insn))
12606 /* Tell the first scheduling pass about the latency between
12607 a mtctr and bctr (and mtlr and br/blr). The first
12608 scheduling pass will not know about this latency since
12609 the mtctr instruction, which has the latency associated
12610 to it, will be generated by reload. */
12611 return TARGET_POWER ? 5 : 4;
12613 /* Leave some extra cycles between a compare and its
12614 dependent branch, to inhibit expensive mispredicts. */
12615 if ((rs6000_cpu_attr == CPU_PPC603
12616 || rs6000_cpu_attr == CPU_PPC604
12617 || rs6000_cpu_attr == CPU_PPC604E
12618 || rs6000_cpu_attr == CPU_PPC620
12619 || rs6000_cpu_attr == CPU_PPC630
12620 || rs6000_cpu_attr == CPU_PPC750
12621 || rs6000_cpu_attr == CPU_PPC7400
12622 || rs6000_cpu_attr == CPU_PPC7450
12623 || rs6000_cpu_attr == CPU_POWER4)
12624 && recog_memoized (dep_insn)
12625 && (INSN_CODE (dep_insn) >= 0)
12626 && (get_attr_type (dep_insn) == TYPE_CMP
12627 || get_attr_type (dep_insn) == TYPE_COMPARE
12628 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12629 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
12630 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
12631 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12632 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12633 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12638 /* Fall out to return default cost. */
12644 /* A C statement (sans semicolon) to update the integer scheduling
12645 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12646 INSN earlier, increase the priority to execute INSN later. Do not
12647 define this macro if you do not need to adjust the scheduling
12648 priorities of insns. */
12651 rs6000_adjust_priority (insn, priority)
12652 rtx insn ATTRIBUTE_UNUSED;
12655 /* On machines (like the 750) which have asymmetric integer units,
12656 where one integer unit can do multiply and divides and the other
12657 can't, reduce the priority of multiply/divide so it is scheduled
12658 before other integer operations. */
12661 if (! INSN_P (insn))
12664 if (GET_CODE (PATTERN (insn)) == USE)
12667 switch (rs6000_cpu_attr) {
12669 switch (get_attr_type (insn))
12676 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12677 priority, priority);
12678 if (priority >= 0 && priority < 0x01000000)
12688 /* Return how many instructions the machine can issue per cycle. */
12691 rs6000_issue_rate ()
12693 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12694 if (!reload_completed)
12697 switch (rs6000_cpu_attr) {
12698 case CPU_RIOS1: /* ? */
12700 case CPU_PPC601: /* ? */
12721 /* Return how many instructions to look ahead for better insn
12725 rs6000_use_sched_lookahead ()
12727 if (rs6000_cpu_attr == CPU_PPC8540)
12733 /* Length in units of the trampoline for entering a nested function. */
12736 rs6000_trampoline_size ()
12740 switch (DEFAULT_ABI)
12746 ret = (TARGET_32BIT) ? 12 : 24;
12751 case ABI_AIX_NODESC:
12752 ret = (TARGET_32BIT) ? 40 : 48;
12759 /* Emit RTL insns to initialize the variable parts of a trampoline.
12760 FNADDR is an RTX for the address of the function's pure code.
12761 CXT is an RTX for the static chain value for the function. */
12764 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12769 enum machine_mode pmode = Pmode;
12770 int regsize = (TARGET_32BIT) ? 4 : 8;
12771 rtx ctx_reg = force_reg (pmode, cxt);
12773 switch (DEFAULT_ABI)
12778 /* Macros to shorten the code expansions below. */
12779 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12780 #define MEM_PLUS(addr,offset) \
12781 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12783 /* Under AIX, just build the 3 word function descriptor */
12786 rtx fn_reg = gen_reg_rtx (pmode);
12787 rtx toc_reg = gen_reg_rtx (pmode);
12788 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12789 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12790 emit_move_insn (MEM_DEREF (addr), fn_reg);
12791 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12792 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12796 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12799 case ABI_AIX_NODESC:
12800 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12801 FALSE, VOIDmode, 4,
12803 GEN_INT (rs6000_trampoline_size ()), SImode,
12813 /* Table of valid machine attributes. */
12815 const struct attribute_spec rs6000_attribute_table[] =
12817 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12818 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12819 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12820 { NULL, 0, 0, false, false, false, NULL }
12823 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12824 struct attribute_spec.handler. */
12827 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12830 tree args ATTRIBUTE_UNUSED;
12831 int flags ATTRIBUTE_UNUSED;
12832 bool *no_add_attrs;
12834 if (TREE_CODE (*node) != FUNCTION_TYPE
12835 && TREE_CODE (*node) != FIELD_DECL
12836 && TREE_CODE (*node) != TYPE_DECL)
12838 warning ("`%s' attribute only applies to functions",
12839 IDENTIFIER_POINTER (name));
12840 *no_add_attrs = true;
12846 /* Set longcall attributes on all functions declared when
12847 rs6000_default_long_calls is true. */
12849 rs6000_set_default_type_attributes (type)
12852 if (rs6000_default_long_calls
12853 && (TREE_CODE (type) == FUNCTION_TYPE
12854 || TREE_CODE (type) == METHOD_TYPE))
12855 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12857 TYPE_ATTRIBUTES (type));
12860 /* Return a reference suitable for calling a function with the
12861 longcall attribute. */
12864 rs6000_longcall_ref (call_ref)
12867 const char *call_name;
12870 if (GET_CODE (call_ref) != SYMBOL_REF)
12873 /* System V adds '.' to the internal name, so skip them. */
12874 call_name = XSTR (call_ref, 0);
12875 if (*call_name == '.')
12877 while (*call_name == '.')
12880 node = get_identifier (call_name);
12881 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12884 return force_reg (Pmode, call_ref);
12888 #ifdef USING_ELFOS_H
12890 /* A C statement or statements to switch to the appropriate section
12891 for output of RTX in mode MODE. You can assume that RTX is some
12892 kind of constant in RTL. The argument MODE is redundant except in
12893 the case of a `const_int' rtx. Select the section by calling
12894 `text_section' or one of the alternatives for other sections.
12896 Do not define this macro if you put all constants in the read-only
12900 rs6000_elf_select_rtx_section (mode, x, align)
12901 enum machine_mode mode;
12903 unsigned HOST_WIDE_INT align;
12905 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12908 default_elf_select_rtx_section (mode, x, align);
12911 /* A C statement or statements to switch to the appropriate
12912 section for output of DECL. DECL is either a `VAR_DECL' node
12913 or a constant of some sort. RELOC indicates whether forming
12914 the initial value of DECL requires link-time relocations. */
12917 rs6000_elf_select_section (decl, reloc, align)
12920 unsigned HOST_WIDE_INT align;
12922 /* Pretend that we're always building for a shared library when
12923 ABI_AIX, because otherwise we end up with dynamic relocations
12924 in read-only sections. This happens for function pointers,
12925 references to vtables in typeinfo, and probably other cases. */
12926 default_elf_select_section_1 (decl, reloc, align,
12927 flag_pic || DEFAULT_ABI == ABI_AIX);
12930 /* A C statement to build up a unique section name, expressed as a
12931 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12932 RELOC indicates whether the initial value of EXP requires
12933 link-time relocations. If you do not define this macro, GCC will use
12934 the symbol name prefixed by `.' as the section name. Note - this
12935 macro can now be called for uninitialized data items as well as
12936 initialized data and functions. */
12939 rs6000_elf_unique_section (decl, reloc)
12943 /* As above, pretend that we're always building for a shared library
12944 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
12945 default_unique_section_1 (decl, reloc,
12946 flag_pic || DEFAULT_ABI == ABI_AIX);
12949 /* If we are referencing a function that is static or is known to be
12950 in this file, make the SYMBOL_REF special. We can use this to indicate
12951 that we can branch to this function without emitting a no-op after the
12952 call. For real AIX calling sequences, we also replace the
12953 function name with the real name (1 or 2 leading .'s), rather than
12954 the function descriptor name. This saves a lot of overriding code
12955 to read the prefixes. */
12958 rs6000_elf_encode_section_info (decl, first)
12965 if (TREE_CODE (decl) == FUNCTION_DECL)
12967 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12968 if ((*targetm.binds_local_p) (decl))
12969 SYMBOL_REF_FLAG (sym_ref) = 1;
12971 if (!TARGET_AIX && DEFAULT_ABI == ABI_AIX)
12973 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12974 size_t len2 = strlen (XSTR (sym_ref, 0));
12975 char *str = alloca (len1 + len2 + 1);
12978 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12980 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12983 else if (rs6000_sdata != SDATA_NONE
12984 && DEFAULT_ABI == ABI_V4
12985 && TREE_CODE (decl) == VAR_DECL)
12987 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12988 int size = int_size_in_bytes (TREE_TYPE (decl));
12989 tree section_name = DECL_SECTION_NAME (decl);
12990 const char *name = (char *)0;
12993 if ((*targetm.binds_local_p) (decl))
12994 SYMBOL_REF_FLAG (sym_ref) = 1;
12998 if (TREE_CODE (section_name) == STRING_CST)
13000 name = TREE_STRING_POINTER (section_name);
13001 len = TREE_STRING_LENGTH (section_name);
13008 ? ((len == sizeof (".sdata") - 1
13009 && strcmp (name, ".sdata") == 0)
13010 || (len == sizeof (".sdata2") - 1
13011 && strcmp (name, ".sdata2") == 0)
13012 || (len == sizeof (".sbss") - 1
13013 && strcmp (name, ".sbss") == 0)
13014 || (len == sizeof (".sbss2") - 1
13015 && strcmp (name, ".sbss2") == 0)
13016 || (len == sizeof (".PPC.EMB.sdata0") - 1
13017 && strcmp (name, ".PPC.EMB.sdata0") == 0)
13018 || (len == sizeof (".PPC.EMB.sbss0") - 1
13019 && strcmp (name, ".PPC.EMB.sbss0") == 0))
13020 : (size > 0 && size <= g_switch_value))
13022 size_t len = strlen (XSTR (sym_ref, 0));
13023 char *str = alloca (len + 2);
13026 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13027 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13032 static const char *
13033 rs6000_elf_strip_name_encoding (str)
13036 while (*str == '*' || *str == '@')
13042 rs6000_elf_in_small_data_p (decl)
13045 if (rs6000_sdata == SDATA_NONE)
13048 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13050 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13051 if (strcmp (section, ".sdata") == 0
13052 || strcmp (section, ".sdata2") == 0
13053 || strcmp (section, ".sbss") == 0)
13058 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13061 && size <= g_switch_value
13062 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13069 #endif /* USING_ELFOS_H */
13072 /* Return a REG that occurs in ADDR with coefficient 1.
13073 ADDR can be effectively incremented by incrementing REG.
13075 r0 is special and we must not select it as an address
13076 register by this routine since our caller will try to
13077 increment the returned register via an "la" instruction. */
13080 find_addr_reg (addr)
13083 while (GET_CODE (addr) == PLUS)
13085 if (GET_CODE (XEXP (addr, 0)) == REG
13086 && REGNO (XEXP (addr, 0)) != 0)
13087 addr = XEXP (addr, 0);
13088 else if (GET_CODE (XEXP (addr, 1)) == REG
13089 && REGNO (XEXP (addr, 1)) != 0)
13090 addr = XEXP (addr, 1);
13091 else if (CONSTANT_P (XEXP (addr, 0)))
13092 addr = XEXP (addr, 1);
13093 else if (CONSTANT_P (XEXP (addr, 1)))
13094 addr = XEXP (addr, 0);
13098 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13104 rs6000_fatal_bad_address (op)
13107 fatal_insn ("bad address", op);
13113 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13114 reference and a constant. */
13117 symbolic_operand (op)
13120 switch (GET_CODE (op))
13127 return (GET_CODE (op) == SYMBOL_REF ||
13128 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13129 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13130 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13137 #ifdef RS6000_LONG_BRANCH
13139 static tree stub_list = 0;
13141 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13142 procedure calls to the linked list. */
13145 add_compiler_stub (label_name, function_name, line_number)
13147 tree function_name;
13150 tree stub = build_tree_list (function_name, label_name);
13151 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13152 TREE_CHAIN (stub) = stub_list;
13156 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13157 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13158 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13160 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13161 handling procedure calls from the linked list and initializes the
13165 output_compiler_stub ()
13168 char label_buf[256];
13172 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13174 fprintf (asm_out_file,
13175 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13177 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13178 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13179 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13180 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13182 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13184 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13187 label_buf[0] = '_';
13188 strcpy (label_buf+1,
13189 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13192 strcpy (tmp_buf, "lis r12,hi16(");
13193 strcat (tmp_buf, label_buf);
13194 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13195 strcat (tmp_buf, label_buf);
13196 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13197 output_asm_insn (tmp_buf, 0);
13199 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13200 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13201 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13202 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13208 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13209 already there or not. */
13212 no_previous_def (function_name)
13213 tree function_name;
13216 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13217 if (function_name == STUB_FUNCTION_NAME (stub))
13222 /* GET_PREV_LABEL gets the label name from the previous definition of
13226 get_prev_label (function_name)
13227 tree function_name;
13230 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13231 if (function_name == STUB_FUNCTION_NAME (stub))
13232 return STUB_LABEL_NAME (stub);
13236 /* INSN is either a function call or a millicode call. It may have an
13237 unconditional jump in its delay slot.
13239 CALL_DEST is the routine we are calling. */
13242 output_call (insn, call_dest, operand_number)
13245 int operand_number;
13247 static char buf[256];
13248 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13251 tree funname = get_identifier (XSTR (call_dest, 0));
13253 if (no_previous_def (funname))
13255 int line_number = 0;
13256 rtx label_rtx = gen_label_rtx ();
13257 char *label_buf, temp_buf[256];
13258 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13259 CODE_LABEL_NUMBER (label_rtx));
13260 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13261 labelname = get_identifier (label_buf);
13262 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13264 line_number = NOTE_LINE_NUMBER (insn);
13265 add_compiler_stub (labelname, funname, line_number);
13268 labelname = get_prev_label (funname);
13270 sprintf (buf, "jbsr %%z%d,%.246s",
13271 operand_number, IDENTIFIER_POINTER (labelname));
13276 sprintf (buf, "bl %%z%d", operand_number);
13281 #endif /* RS6000_LONG_BRANCH */
13283 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13285 const char *const symbol_ = (SYMBOL); \
13286 char *buffer_ = (BUF); \
13287 if (symbol_[0] == '"') \
13289 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13291 else if (name_needs_quotes(symbol_)) \
13293 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13297 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13302 /* Generate PIC and indirect symbol stubs. */
13305 machopic_output_stub (file, symb, stub)
13307 const char *symb, *stub;
13309 unsigned int length;
13310 char *symbol_name, *lazy_ptr_name;
13311 char *local_label_0;
13312 static int label = 0;
13314 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13315 symb = (*targetm.strip_name_encoding) (symb);
13319 length = strlen (symb);
13320 symbol_name = alloca (length + 32);
13321 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13323 lazy_ptr_name = alloca (length + 32);
13324 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13326 local_label_0 = alloca (length + 32);
13327 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13330 machopic_picsymbol_stub1_section ();
13332 machopic_symbol_stub1_section ();
13333 fprintf (file, "\t.align 2\n");
13335 fprintf (file, "%s:\n", stub);
13336 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13340 fprintf (file, "\tmflr r0\n");
13341 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13342 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13343 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13344 lazy_ptr_name, local_label_0);
13345 fprintf (file, "\tmtlr r0\n");
13346 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13347 lazy_ptr_name, local_label_0);
13348 fprintf (file, "\tmtctr r12\n");
13349 fprintf (file, "\tbctr\n");
13353 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13354 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13355 fprintf (file, "\tmtctr r12\n");
13356 fprintf (file, "\tbctr\n");
13359 machopic_lazy_symbol_ptr_section ();
13360 fprintf (file, "%s:\n", lazy_ptr_name);
13361 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13362 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13365 /* Legitimize PIC addresses. If the address is already
13366 position-independent, we return ORIG. Newly generated
13367 position-independent addresses go into a reg. This is REG if non
13368 zero, otherwise we allocate register(s) as necessary. */
13370 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13373 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13375 enum machine_mode mode;
13380 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13381 reg = gen_reg_rtx (Pmode);
13383 if (GET_CODE (orig) == CONST)
13385 if (GET_CODE (XEXP (orig, 0)) == PLUS
13386 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13389 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13392 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13395 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13401 if (GET_CODE (offset) == CONST_INT)
13403 if (SMALL_INT (offset))
13404 return plus_constant (base, INTVAL (offset));
13405 else if (! reload_in_progress && ! reload_completed)
13406 offset = force_reg (Pmode, offset);
13409 rtx mem = force_const_mem (Pmode, orig);
13410 return machopic_legitimize_pic_address (mem, Pmode, reg);
13413 return gen_rtx (PLUS, Pmode, base, offset);
13416 /* Fall back on generic machopic code. */
13417 return machopic_legitimize_pic_address (orig, mode, reg);
13420 /* This is just a placeholder to make linking work without having to
13421 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13422 ever needed for Darwin (not too likely!) this would have to get a
13423 real definition. */
13430 #endif /* TARGET_MACHO */
13433 static unsigned int
13434 rs6000_elf_section_type_flags (decl, name, reloc)
13440 = default_section_type_flags_1 (decl, name, reloc,
13441 flag_pic || DEFAULT_ABI == ABI_AIX);
13443 if (TARGET_RELOCATABLE)
13444 flags |= SECTION_WRITE;
13449 /* Record an element in the table of global constructors. SYMBOL is
13450 a SYMBOL_REF of the function to be called; PRIORITY is a number
13451 between 0 and MAX_INIT_PRIORITY.
13453 This differs from default_named_section_asm_out_constructor in
13454 that we have special handling for -mrelocatable. */
13457 rs6000_elf_asm_out_constructor (symbol, priority)
13461 const char *section = ".ctors";
13464 if (priority != DEFAULT_INIT_PRIORITY)
13466 sprintf (buf, ".ctors.%.5u",
13467 /* Invert the numbering so the linker puts us in the proper
13468 order; constructors are run from right to left, and the
13469 linker sorts in increasing order. */
13470 MAX_INIT_PRIORITY - priority);
13474 named_section_flags (section, SECTION_WRITE);
13475 assemble_align (POINTER_SIZE);
13477 if (TARGET_RELOCATABLE)
13479 fputs ("\t.long (", asm_out_file);
13480 output_addr_const (asm_out_file, symbol);
13481 fputs (")@fixup\n", asm_out_file);
13484 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13488 rs6000_elf_asm_out_destructor (symbol, priority)
13492 const char *section = ".dtors";
13495 if (priority != DEFAULT_INIT_PRIORITY)
13497 sprintf (buf, ".dtors.%.5u",
13498 /* Invert the numbering so the linker puts us in the proper
13499 order; constructors are run from right to left, and the
13500 linker sorts in increasing order. */
13501 MAX_INIT_PRIORITY - priority);
13505 named_section_flags (section, SECTION_WRITE);
13506 assemble_align (POINTER_SIZE);
13508 if (TARGET_RELOCATABLE)
13510 fputs ("\t.long (", asm_out_file);
13511 output_addr_const (asm_out_file, symbol);
13512 fputs (")@fixup\n", asm_out_file);
13515 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13521 rs6000_xcoff_asm_globalize_label (stream, name)
13525 fputs (GLOBAL_ASM_OP, stream);
13526 RS6000_OUTPUT_BASENAME (stream, name);
13527 putc ('\n', stream);
13531 rs6000_xcoff_asm_named_section (name, flags)
13533 unsigned int flags;
13536 static const char * const suffix[3] = { "PR", "RO", "RW" };
13538 if (flags & SECTION_CODE)
13540 else if (flags & SECTION_WRITE)
13545 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13546 (flags & SECTION_CODE) ? "." : "",
13547 name, suffix[smclass], flags & SECTION_ENTSIZE);
13551 rs6000_xcoff_select_section (decl, reloc, align)
13554 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13556 if (decl_readonly_section_1 (decl, reloc, 1))
13558 if (TREE_PUBLIC (decl))
13559 read_only_data_section ();
13561 read_only_private_data_section ();
13565 if (TREE_PUBLIC (decl))
13568 private_data_section ();
13573 rs6000_xcoff_unique_section (decl, reloc)
13575 int reloc ATTRIBUTE_UNUSED;
13579 /* Use select_section for private and uninitialized data. */
13580 if (!TREE_PUBLIC (decl)
13581 || DECL_COMMON (decl)
13582 || DECL_INITIAL (decl) == NULL_TREE
13583 || DECL_INITIAL (decl) == error_mark_node
13584 || (flag_zero_initialized_in_bss
13585 && initializer_zerop (DECL_INITIAL (decl))))
13588 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13589 name = (*targetm.strip_name_encoding) (name);
13590 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13593 /* Select section for constant in constant pool.
13595 On RS/6000, all constants are in the private read-only data area.
13596 However, if this is being placed in the TOC it must be output as a
13600 rs6000_xcoff_select_rtx_section (mode, x, align)
13601 enum machine_mode mode;
13603 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13605 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13608 read_only_private_data_section ();
13611 /* Remove any trailing [DS] or the like from the symbol name. */
13613 static const char *
13614 rs6000_xcoff_strip_name_encoding (name)
13620 len = strlen (name);
13621 if (name[len - 1] == ']')
13622 return ggc_alloc_string (name, len - 4);
13627 /* Section attributes. AIX is always PIC. */
13629 static unsigned int
13630 rs6000_xcoff_section_type_flags (decl, name, reloc)
13635 unsigned int align;
13636 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13638 /* Align to at least UNIT size. */
13639 if (flags & SECTION_CODE)
13640 align = MIN_UNITS_PER_WORD;
13642 /* Increase alignment of large objects if not already stricter. */
13643 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13644 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13645 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13647 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13651 rs6000_xcoff_encode_section_info (decl, first)
13653 int first ATTRIBUTE_UNUSED;
13655 if (TREE_CODE (decl) == FUNCTION_DECL
13656 && (*targetm.binds_local_p) (decl))
13657 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13659 #endif /* TARGET_XCOFF */
13662 /* Cross-module name binding. Darwin does not support overriding
13663 functions at dynamic-link time. */
13666 rs6000_binds_local_p (decl)
13669 return default_binds_local_p_1 (decl, 0);
13673 /* Compute a (partial) cost for rtx X. Return true if the complete
13674 cost has been computed, and false if subexpressions should be
13675 scanned. In either case, *TOTAL contains the cost result. */
13678 rs6000_rtx_costs (x, code, outer_code, total)
13680 int code, outer_code ATTRIBUTE_UNUSED;
13685 /* On the RS/6000, if it is valid in the insn, it is free.
13686 So this always returns 0. */
13697 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13698 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
13699 + 0x8000) >= 0x10000)
13700 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13701 ? COSTS_N_INSNS (2)
13702 : COSTS_N_INSNS (1));
13708 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13709 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
13710 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13711 ? COSTS_N_INSNS (2)
13712 : COSTS_N_INSNS (1));
13718 *total = COSTS_N_INSNS (2);
13721 switch (rs6000_cpu)
13723 case PROCESSOR_RIOS1:
13724 case PROCESSOR_PPC405:
13725 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13726 ? COSTS_N_INSNS (5)
13727 : (INTVAL (XEXP (x, 1)) >= -256
13728 && INTVAL (XEXP (x, 1)) <= 255)
13729 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13732 case PROCESSOR_RS64A:
13733 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13734 ? GET_MODE (XEXP (x, 1)) != DImode
13735 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13736 : (INTVAL (XEXP (x, 1)) >= -256
13737 && INTVAL (XEXP (x, 1)) <= 255)
13738 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13741 case PROCESSOR_RIOS2:
13742 case PROCESSOR_MPCCORE:
13743 case PROCESSOR_PPC604e:
13744 *total = COSTS_N_INSNS (2);
13747 case PROCESSOR_PPC601:
13748 *total = COSTS_N_INSNS (5);
13751 case PROCESSOR_PPC603:
13752 case PROCESSOR_PPC7400:
13753 case PROCESSOR_PPC750:
13754 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13755 ? COSTS_N_INSNS (5)
13756 : (INTVAL (XEXP (x, 1)) >= -256
13757 && INTVAL (XEXP (x, 1)) <= 255)
13758 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13761 case PROCESSOR_PPC7450:
13762 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13763 ? COSTS_N_INSNS (4)
13764 : COSTS_N_INSNS (3));
13767 case PROCESSOR_PPC403:
13768 case PROCESSOR_PPC604:
13769 case PROCESSOR_PPC8540:
13770 *total = COSTS_N_INSNS (4);
13773 case PROCESSOR_PPC620:
13774 case PROCESSOR_PPC630:
13775 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13776 ? GET_MODE (XEXP (x, 1)) != DImode
13777 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13778 : (INTVAL (XEXP (x, 1)) >= -256
13779 && INTVAL (XEXP (x, 1)) <= 255)
13780 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13783 case PROCESSOR_POWER4:
13784 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13785 ? GET_MODE (XEXP (x, 1)) != DImode
13786 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
13787 : COSTS_N_INSNS (2));
13796 if (GET_CODE (XEXP (x, 1)) == CONST_INT
13797 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
13799 *total = COSTS_N_INSNS (2);
13806 switch (rs6000_cpu)
13808 case PROCESSOR_RIOS1:
13809 *total = COSTS_N_INSNS (19);
13812 case PROCESSOR_RIOS2:
13813 *total = COSTS_N_INSNS (13);
13816 case PROCESSOR_RS64A:
13817 *total = (GET_MODE (XEXP (x, 1)) != DImode
13818 ? COSTS_N_INSNS (65)
13819 : COSTS_N_INSNS (67));
13822 case PROCESSOR_MPCCORE:
13823 *total = COSTS_N_INSNS (6);
13826 case PROCESSOR_PPC403:
13827 *total = COSTS_N_INSNS (33);
13830 case PROCESSOR_PPC405:
13831 *total = COSTS_N_INSNS (35);
13834 case PROCESSOR_PPC601:
13835 *total = COSTS_N_INSNS (36);
13838 case PROCESSOR_PPC603:
13839 *total = COSTS_N_INSNS (37);
13842 case PROCESSOR_PPC604:
13843 case PROCESSOR_PPC604e:
13844 *total = COSTS_N_INSNS (20);
13847 case PROCESSOR_PPC620:
13848 case PROCESSOR_PPC630:
13849 *total = (GET_MODE (XEXP (x, 1)) != DImode
13850 ? COSTS_N_INSNS (21)
13851 : COSTS_N_INSNS (37));
13854 case PROCESSOR_PPC750:
13855 case PROCESSOR_PPC8540:
13856 case PROCESSOR_PPC7400:
13857 *total = COSTS_N_INSNS (19);
13860 case PROCESSOR_PPC7450:
13861 *total = COSTS_N_INSNS (23);
13864 case PROCESSOR_POWER4:
13865 *total = (GET_MODE (XEXP (x, 1)) != DImode
13866 ? COSTS_N_INSNS (18)
13867 : COSTS_N_INSNS (34));
13875 *total = COSTS_N_INSNS (4);
13879 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13888 /* A C expression returning the cost of moving data from a register of class
13889 CLASS1 to one of CLASS2. */
13892 rs6000_register_move_cost (mode, from, to)
13893 enum machine_mode mode;
13894 enum reg_class from, to;
13896 /* Moves from/to GENERAL_REGS. */
13897 if (reg_classes_intersect_p (to, GENERAL_REGS)
13898 || reg_classes_intersect_p (from, GENERAL_REGS))
13900 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13903 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13904 return (rs6000_memory_move_cost (mode, from, 0)
13905 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13907 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13908 else if (from == CR_REGS)
13912 /* A move will cost one instruction per GPR moved. */
13913 return 2 * HARD_REGNO_NREGS (0, mode);
13916 /* Moving between two similar registers is just one instruction. */
13917 else if (reg_classes_intersect_p (to, from))
13918 return mode == TFmode ? 4 : 2;
13920 /* Everything else has to go through GENERAL_REGS. */
13922 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13923 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13926 /* A C expressions returning the cost of moving data of MODE from a register to
13930 rs6000_memory_move_cost (mode, class, in)
13931 enum machine_mode mode;
13932 enum reg_class class;
13933 int in ATTRIBUTE_UNUSED;
13935 if (reg_classes_intersect_p (class, GENERAL_REGS))
13936 return 4 * HARD_REGNO_NREGS (0, mode);
13937 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13938 return 4 * HARD_REGNO_NREGS (32, mode);
13939 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13940 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13942 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13945 /* Return true if TYPE is of type __ev64_opaque__. */
13948 is_ev64_opaque_type (type)
13952 && (type == opaque_V2SI_type_node
13953 || type == opaque_V2SF_type_node
13954 || (TREE_CODE (type) == VECTOR_TYPE
13955 && TYPE_NAME (type)
13956 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13957 && DECL_NAME (TYPE_NAME (type))
13958 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
13959 "__ev64_opaque__") == 0)));
13963 rs6000_dwarf_register_span (reg)
13968 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
13971 regno = REGNO (reg);
13973 /* The duality of the SPE register size wreaks all kinds of havoc.
13974 This is a way of distinguishing r0 in 32-bits from r0 in
13977 gen_rtx_PARALLEL (VOIDmode,
13980 gen_rtx_REG (SImode, regno + 1200),
13981 gen_rtx_REG (SImode, regno))
13983 gen_rtx_REG (SImode, regno),
13984 gen_rtx_REG (SImode, regno + 1200)));
13987 #include "gt-rs6000.h"