1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
63 && easy_vector_same (x, y))
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
70 enum processor_type rs6000_cpu;
71 struct rs6000_cpu_select rs6000_select[3] =
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
79 /* Size of long double */
80 const char *rs6000_long_double_size_string;
81 int rs6000_long_double_type_size;
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi;
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave;
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string;
92 /* Nonzero if we want SPE ABI extensions. */
95 /* Whether isel instructions should be generated. */
98 /* Nonzero if we have FPRs. */
101 /* String from -misel=. */
102 const char *rs6000_isel_string;
104 /* Set to nonzero once AIX common-mode calls have been defined. */
105 static GTY(()) int common_mode_defined;
107 /* Save information from a "cmpxx" operation until the branch or scc is
109 rtx rs6000_compare_op0, rs6000_compare_op1;
110 int rs6000_compare_fp_p;
112 /* Label number of label created for -mrelocatable, to call to so we can
113 get the address of the GOT section */
114 int rs6000_pic_labelno;
117 /* Which abi to adhere to */
118 const char *rs6000_abi_name = RS6000_ABI_NAME;
120 /* Semantics of the small data area */
121 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
123 /* Which small data model to use */
124 const char *rs6000_sdata_name = (char *)0;
126 /* Counter for labels which are to be placed in .fixup. */
127 int fixuplabelno = 0;
130 /* ABI enumeration available for subtarget to use. */
131 enum rs6000_abi rs6000_current_abi;
133 /* ABI string from -mabi= option. */
134 const char *rs6000_abi_string;
137 const char *rs6000_debug_name;
138 int rs6000_debug_stack; /* debug stack applications */
139 int rs6000_debug_arg; /* debug argument handling */
141 const char *rs6000_traceback_name;
143 traceback_default = 0,
149 /* Flag to say the TOC is initialized */
151 char toc_label_name[10];
153 /* Alias set for saves and restores from the rs6000 stack. */
154 static int rs6000_sr_alias_set;
156 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
157 The only place that looks at this is rs6000_set_default_type_attributes;
158 everywhere else should rely on the presence or absence of a longcall
159 attribute on the function declaration. */
160 int rs6000_default_long_calls;
161 const char *rs6000_longcall_switch;
163 struct builtin_description
165 /* mask is not const because we're going to alter it below. This
166 nonsense will go away when we rewrite the -march infrastructure
167 to give us more target flag bits. */
169 const enum insn_code icode;
170 const char *const name;
171 const enum rs6000_builtins code;
174 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
175 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
176 static void validate_condition_mode
177 PARAMS ((enum rtx_code, enum machine_mode));
178 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
179 static void rs6000_maybe_dead PARAMS ((rtx));
180 static void rs6000_emit_stack_tie PARAMS ((void));
181 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
182 static rtx spe_synthesize_frame_save PARAMS ((rtx));
183 static bool spe_func_has_64bit_regs_p PARAMS ((void));
184 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
185 unsigned int, int, int));
186 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
187 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
188 static unsigned rs6000_hash_constant PARAMS ((rtx));
189 static unsigned toc_hash_function PARAMS ((const void *));
190 static int toc_hash_eq PARAMS ((const void *, const void *));
191 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
192 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
193 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
194 #ifdef HAVE_GAS_HIDDEN
195 static void rs6000_assemble_visibility PARAMS ((tree, int));
197 static int rs6000_ra_ever_killed PARAMS ((void));
198 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
199 const struct attribute_spec rs6000_attribute_table[];
200 static void rs6000_set_default_type_attributes PARAMS ((tree));
201 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
202 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
203 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
204 HOST_WIDE_INT, tree));
205 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
206 HOST_WIDE_INT, HOST_WIDE_INT));
208 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
210 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
211 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
212 static void rs6000_elf_select_section PARAMS ((tree, int,
213 unsigned HOST_WIDE_INT));
214 static void rs6000_elf_unique_section PARAMS ((tree, int));
215 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
216 unsigned HOST_WIDE_INT));
217 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
219 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
220 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
223 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
224 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
225 static void rs6000_xcoff_select_section PARAMS ((tree, int,
226 unsigned HOST_WIDE_INT));
227 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
228 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
229 unsigned HOST_WIDE_INT));
230 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
231 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
232 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
236 static bool rs6000_binds_local_p PARAMS ((tree));
238 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
239 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
240 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
241 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
242 static int rs6000_adjust_priority PARAMS ((rtx, int));
243 static int rs6000_issue_rate PARAMS ((void));
244 static int rs6000_use_sched_lookahead PARAMS ((void));
246 static void rs6000_init_builtins PARAMS ((void));
247 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
248 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
249 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
250 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
251 static void altivec_init_builtins PARAMS ((void));
252 static void rs6000_common_init_builtins PARAMS ((void));
254 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
255 int, enum rs6000_builtins,
256 enum rs6000_builtins));
257 static void spe_init_builtins PARAMS ((void));
258 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
259 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
260 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
261 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
263 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
264 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
265 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
266 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
267 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
268 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
269 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
270 static void rs6000_parse_abi_options PARAMS ((void));
271 static void rs6000_parse_vrsave_option PARAMS ((void));
272 static void rs6000_parse_isel_option PARAMS ((void));
273 static int first_altivec_reg_to_save PARAMS ((void));
274 static unsigned int compute_vrsave_mask PARAMS ((void));
275 static void is_altivec_return_reg PARAMS ((rtx, void *));
276 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
277 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
278 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
279 static bool is_ev64_opaque_type PARAMS ((tree));
280 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
282 /* Hash table stuff for keeping track of TOC entries. */
284 struct toc_hash_struct GTY(())
286 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
287 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
289 enum machine_mode key_mode;
293 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
295 /* Default register names. */
296 char rs6000_reg_names[][8] =
298 "0", "1", "2", "3", "4", "5", "6", "7",
299 "8", "9", "10", "11", "12", "13", "14", "15",
300 "16", "17", "18", "19", "20", "21", "22", "23",
301 "24", "25", "26", "27", "28", "29", "30", "31",
302 "0", "1", "2", "3", "4", "5", "6", "7",
303 "8", "9", "10", "11", "12", "13", "14", "15",
304 "16", "17", "18", "19", "20", "21", "22", "23",
305 "24", "25", "26", "27", "28", "29", "30", "31",
306 "mq", "lr", "ctr","ap",
307 "0", "1", "2", "3", "4", "5", "6", "7",
309 /* AltiVec registers. */
310 "0", "1", "2", "3", "4", "5", "6", "7",
311 "8", "9", "10", "11", "12", "13", "14", "15",
312 "16", "17", "18", "19", "20", "21", "22", "23",
313 "24", "25", "26", "27", "28", "29", "30", "31",
319 #ifdef TARGET_REGNAMES
320 static const char alt_reg_names[][8] =
322 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
323 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
324 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
325 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
326 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
327 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
328 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
329 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
330 "mq", "lr", "ctr", "ap",
331 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
333 /* AltiVec registers. */
334 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
335 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
336 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
337 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
344 #ifndef MASK_STRICT_ALIGN
345 #define MASK_STRICT_ALIGN 0
347 #ifndef TARGET_PROFILE_KERNEL
348 #define TARGET_PROFILE_KERNEL 0
351 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
352 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
354 /* Initialize the GCC target structure. */
355 #undef TARGET_ATTRIBUTE_TABLE
356 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
357 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
358 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
360 #undef TARGET_ASM_ALIGNED_DI_OP
361 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
363 /* Default unaligned ops are only provided for ELF. Find the ops needed
364 for non-ELF systems. */
365 #ifndef OBJECT_FORMAT_ELF
367 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
369 #undef TARGET_ASM_UNALIGNED_HI_OP
370 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
371 #undef TARGET_ASM_UNALIGNED_SI_OP
372 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
373 #undef TARGET_ASM_UNALIGNED_DI_OP
374 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
377 #undef TARGET_ASM_UNALIGNED_HI_OP
378 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
379 #undef TARGET_ASM_UNALIGNED_SI_OP
380 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
384 /* This hook deals with fixups for relocatable code and DI-mode objects
386 #undef TARGET_ASM_INTEGER
387 #define TARGET_ASM_INTEGER rs6000_assemble_integer
389 #ifdef HAVE_GAS_HIDDEN
390 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
391 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
394 #undef TARGET_ASM_FUNCTION_PROLOGUE
395 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
396 #undef TARGET_ASM_FUNCTION_EPILOGUE
397 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
399 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
400 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
401 #undef TARGET_SCHED_VARIABLE_ISSUE
402 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
404 #undef TARGET_SCHED_ISSUE_RATE
405 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
406 #undef TARGET_SCHED_ADJUST_COST
407 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
408 #undef TARGET_SCHED_ADJUST_PRIORITY
409 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
411 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
412 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
414 #undef TARGET_INIT_BUILTINS
415 #define TARGET_INIT_BUILTINS rs6000_init_builtins
417 #undef TARGET_EXPAND_BUILTIN
418 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
421 #undef TARGET_BINDS_LOCAL_P
422 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
425 #undef TARGET_ASM_OUTPUT_MI_THUNK
426 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
428 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
429 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
431 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
432 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
434 #undef TARGET_RTX_COSTS
435 #define TARGET_RTX_COSTS rs6000_rtx_costs
436 #undef TARGET_ADDRESS_COST
437 #define TARGET_ADDRESS_COST hook_int_rtx_0
439 #undef TARGET_VECTOR_OPAQUE_P
440 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
442 #undef TARGET_DWARF_REGISTER_SPAN
443 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
445 struct gcc_target targetm = TARGET_INITIALIZER;
447 /* Override command line options. Mostly we process the processor
448 type and sometimes adjust other TARGET_ options. */
451 rs6000_override_options (default_cpu)
452 const char *default_cpu;
455 struct rs6000_cpu_select *ptr;
457 /* Simplify the entries below by making a mask for any POWER
458 variant and any PowerPC variant. */
460 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
461 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
462 | MASK_PPC_GFXOPT | MASK_POWERPC64)
463 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
467 const char *const name; /* Canonical processor name. */
468 const enum processor_type processor; /* Processor type enum value. */
469 const int target_enable; /* Target flags to enable. */
470 const int target_disable; /* Target flags to disable. */
471 } const processor_target_table[]
472 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
473 POWER_MASKS | POWERPC_MASKS},
474 {"power", PROCESSOR_POWER,
475 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
476 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
477 {"power2", PROCESSOR_POWER,
478 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
479 POWERPC_MASKS | MASK_NEW_MNEMONICS},
480 {"power3", PROCESSOR_PPC630,
481 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
483 {"power4", PROCESSOR_POWER4,
484 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
486 {"powerpc", PROCESSOR_POWERPC,
487 MASK_POWERPC | MASK_NEW_MNEMONICS,
488 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
489 {"powerpc64", PROCESSOR_POWERPC64,
490 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
491 POWER_MASKS | POWERPC_OPT_MASKS},
492 {"rios", PROCESSOR_RIOS1,
493 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
494 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
495 {"rios1", PROCESSOR_RIOS1,
496 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
497 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
498 {"rsc", PROCESSOR_PPC601,
499 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
500 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
501 {"rsc1", PROCESSOR_PPC601,
502 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
503 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
504 {"rios2", PROCESSOR_RIOS2,
505 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
506 POWERPC_MASKS | MASK_NEW_MNEMONICS},
507 {"rs64a", PROCESSOR_RS64A,
508 MASK_POWERPC | MASK_NEW_MNEMONICS,
509 POWER_MASKS | POWERPC_OPT_MASKS},
510 {"401", PROCESSOR_PPC403,
511 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
512 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
513 {"403", PROCESSOR_PPC403,
514 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
515 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
516 {"405", PROCESSOR_PPC405,
517 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
518 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
519 {"405f", PROCESSOR_PPC405,
520 MASK_POWERPC | MASK_NEW_MNEMONICS,
521 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
522 {"505", PROCESSOR_MPCCORE,
523 MASK_POWERPC | MASK_NEW_MNEMONICS,
524 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
525 {"601", PROCESSOR_PPC601,
526 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
527 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
528 {"602", PROCESSOR_PPC603,
529 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
530 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
531 {"603", PROCESSOR_PPC603,
532 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
533 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
534 {"603e", PROCESSOR_PPC603,
535 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
536 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
537 {"ec603e", PROCESSOR_PPC603,
538 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
539 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
540 {"604", PROCESSOR_PPC604,
541 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
542 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
543 {"604e", PROCESSOR_PPC604e,
544 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
545 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
546 {"620", PROCESSOR_PPC620,
547 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
549 {"630", PROCESSOR_PPC630,
550 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
552 {"740", PROCESSOR_PPC750,
553 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
554 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
555 {"750", PROCESSOR_PPC750,
556 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
557 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
558 {"7400", PROCESSOR_PPC7400,
559 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
560 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
561 {"7450", PROCESSOR_PPC7450,
562 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
563 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
564 {"8540", PROCESSOR_PPC8540,
565 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
566 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
567 {"801", PROCESSOR_MPCCORE,
568 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
569 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
570 {"821", PROCESSOR_MPCCORE,
571 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
572 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
573 {"823", PROCESSOR_MPCCORE,
574 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
575 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
576 {"860", PROCESSOR_MPCCORE,
577 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
578 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
580 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
582 /* Save current -mmultiple/-mno-multiple status. */
583 int multiple = TARGET_MULTIPLE;
584 /* Save current -mstring/-mno-string status. */
585 int string = TARGET_STRING;
587 /* Identify the processor type. */
588 rs6000_select[0].string = default_cpu;
589 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
591 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
593 ptr = &rs6000_select[i];
594 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
596 for (j = 0; j < ptt_size; j++)
597 if (! strcmp (ptr->string, processor_target_table[j].name))
600 rs6000_cpu = processor_target_table[j].processor;
604 target_flags |= processor_target_table[j].target_enable;
605 target_flags &= ~processor_target_table[j].target_disable;
611 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
615 if (rs6000_cpu == PROCESSOR_PPC8540)
618 /* If we are optimizing big endian systems for space, use the load/store
619 multiple and string instructions. */
620 if (BYTES_BIG_ENDIAN && optimize_size)
621 target_flags |= MASK_MULTIPLE | MASK_STRING;
623 /* If -mmultiple or -mno-multiple was explicitly used, don't
624 override with the processor default */
625 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
626 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
628 /* If -mstring or -mno-string was explicitly used, don't override
629 with the processor default. */
630 if ((target_flags_explicit & MASK_STRING) != 0)
631 target_flags = (target_flags & ~MASK_STRING) | string;
633 /* Don't allow -mmultiple or -mstring on little endian systems
634 unless the cpu is a 750, because the hardware doesn't support the
635 instructions used in little endian mode, and causes an alignment
636 trap. The 750 does not cause an alignment trap (except when the
637 target is unaligned). */
639 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
643 target_flags &= ~MASK_MULTIPLE;
644 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
645 warning ("-mmultiple is not supported on little endian systems");
650 target_flags &= ~MASK_STRING;
651 if ((target_flags_explicit & MASK_STRING) != 0)
652 warning ("-mstring is not supported on little endian systems");
656 /* Set debug flags */
657 if (rs6000_debug_name)
659 if (! strcmp (rs6000_debug_name, "all"))
660 rs6000_debug_stack = rs6000_debug_arg = 1;
661 else if (! strcmp (rs6000_debug_name, "stack"))
662 rs6000_debug_stack = 1;
663 else if (! strcmp (rs6000_debug_name, "arg"))
664 rs6000_debug_arg = 1;
666 error ("unknown -mdebug-%s switch", rs6000_debug_name);
669 if (rs6000_traceback_name)
671 if (! strncmp (rs6000_traceback_name, "full", 4))
672 rs6000_traceback = traceback_full;
673 else if (! strncmp (rs6000_traceback_name, "part", 4))
674 rs6000_traceback = traceback_part;
675 else if (! strncmp (rs6000_traceback_name, "no", 2))
676 rs6000_traceback = traceback_none;
678 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
679 rs6000_traceback_name);
682 /* Set size of long double */
683 rs6000_long_double_type_size = 64;
684 if (rs6000_long_double_size_string)
687 int size = strtol (rs6000_long_double_size_string, &tail, 10);
688 if (*tail != '\0' || (size != 64 && size != 128))
689 error ("Unknown switch -mlong-double-%s",
690 rs6000_long_double_size_string);
692 rs6000_long_double_type_size = size;
695 /* Handle -mabi= options. */
696 rs6000_parse_abi_options ();
698 /* Handle -mvrsave= option. */
699 rs6000_parse_vrsave_option ();
701 /* Handle -misel= option. */
702 rs6000_parse_isel_option ();
704 #ifdef SUBTARGET_OVERRIDE_OPTIONS
705 SUBTARGET_OVERRIDE_OPTIONS;
707 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
708 SUBSUBTARGET_OVERRIDE_OPTIONS;
711 /* The e500 does not have string instructions, and we set
712 MASK_STRING above when optimizing for size. */
713 if (rs6000_cpu == PROCESSOR_PPC8540 && (target_flags & MASK_STRING) != 0)
714 target_flags = target_flags & ~MASK_STRING;
716 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
717 using TARGET_OPTIONS to handle a toggle switch, but we're out of
718 bits in target_flags so TARGET_SWITCHES cannot be used.
719 Assumption here is that rs6000_longcall_switch points into the
720 text of the complete option, rather than being a copy, so we can
721 scan back for the presence or absence of the no- modifier. */
722 if (rs6000_longcall_switch)
724 const char *base = rs6000_longcall_switch;
725 while (base[-1] != 'm') base--;
727 if (*rs6000_longcall_switch != '\0')
728 error ("invalid option `%s'", base);
729 rs6000_default_long_calls = (base[0] != 'n');
732 #ifdef TARGET_REGNAMES
733 /* If the user desires alternate register names, copy in the
734 alternate names now. */
736 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
739 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
740 If -maix-struct-return or -msvr4-struct-return was explicitly
741 used, don't override with the ABI default. */
742 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
744 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
745 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
747 target_flags |= MASK_AIX_STRUCT_RET;
750 if (TARGET_LONG_DOUBLE_128
751 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
752 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
754 /* Allocate an alias set for register saves & restores from stack. */
755 rs6000_sr_alias_set = new_alias_set ();
758 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
760 /* We can only guarantee the availability of DI pseudo-ops when
761 assembling for 64-bit targets. */
764 targetm.asm_out.aligned_op.di = NULL;
765 targetm.asm_out.unaligned_op.di = NULL;
768 /* Set maximum branch target alignment at two instructions, eight bytes. */
769 align_jumps_max_skip = 8;
770 align_loops_max_skip = 8;
772 /* Arrange to save and restore machine status around nested functions. */
773 init_machine_status = rs6000_init_machine_status;
776 /* Handle -misel= option. */
778 rs6000_parse_isel_option ()
780 if (rs6000_isel_string == 0)
782 else if (! strcmp (rs6000_isel_string, "yes"))
784 else if (! strcmp (rs6000_isel_string, "no"))
787 error ("unknown -misel= option specified: '%s'",
791 /* Handle -mvrsave= options. */
793 rs6000_parse_vrsave_option ()
795 /* Generate VRSAVE instructions by default. */
796 if (rs6000_altivec_vrsave_string == 0
797 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
798 rs6000_altivec_vrsave = 1;
799 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
800 rs6000_altivec_vrsave = 0;
802 error ("unknown -mvrsave= option specified: '%s'",
803 rs6000_altivec_vrsave_string);
806 /* Handle -mabi= options. */
808 rs6000_parse_abi_options ()
810 if (rs6000_abi_string == 0)
812 else if (! strcmp (rs6000_abi_string, "altivec"))
813 rs6000_altivec_abi = 1;
814 else if (! strcmp (rs6000_abi_string, "no-altivec"))
815 rs6000_altivec_abi = 0;
816 else if (! strcmp (rs6000_abi_string, "spe"))
820 error ("not configured for ABI: '%s'", rs6000_abi_string);
823 else if (! strcmp (rs6000_abi_string, "no-spe"))
826 error ("unknown ABI specified: '%s'", rs6000_abi_string);
830 optimization_options (level, size)
831 int level ATTRIBUTE_UNUSED;
832 int size ATTRIBUTE_UNUSED;
836 /* Do anything needed at the start of the asm file. */
839 rs6000_file_start (file, default_cpu)
841 const char *default_cpu;
845 const char *start = buffer;
846 struct rs6000_cpu_select *ptr;
848 if (flag_verbose_asm)
850 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
851 rs6000_select[0].string = default_cpu;
853 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
855 ptr = &rs6000_select[i];
856 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
858 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
864 switch (rs6000_sdata)
866 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
867 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
868 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
869 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
872 if (rs6000_sdata && g_switch_value)
874 fprintf (file, "%s -G %d", start, g_switch_value);
884 /* Return nonzero if this function is known to have a null epilogue. */
889 if (reload_completed)
891 rs6000_stack_t *info = rs6000_stack_info ();
893 if (info->first_gp_reg_save == 32
894 && info->first_fp_reg_save == 64
895 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
898 && info->vrsave_mask == 0
906 /* Returns 1 always. */
909 any_operand (op, mode)
910 rtx op ATTRIBUTE_UNUSED;
911 enum machine_mode mode ATTRIBUTE_UNUSED;
916 /* Returns 1 if op is the count register. */
918 count_register_operand (op, mode)
920 enum machine_mode mode ATTRIBUTE_UNUSED;
922 if (GET_CODE (op) != REG)
925 if (REGNO (op) == COUNT_REGISTER_REGNUM)
928 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
934 /* Returns 1 if op is an altivec register. */
936 altivec_register_operand (op, mode)
938 enum machine_mode mode ATTRIBUTE_UNUSED;
941 return (register_operand (op, mode)
942 && (GET_CODE (op) != REG
943 || REGNO (op) > FIRST_PSEUDO_REGISTER
944 || ALTIVEC_REGNO_P (REGNO (op))));
948 xer_operand (op, mode)
950 enum machine_mode mode ATTRIBUTE_UNUSED;
952 if (GET_CODE (op) != REG)
955 if (XER_REGNO_P (REGNO (op)))
961 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
962 by such constants completes more quickly. */
965 s8bit_cint_operand (op, mode)
967 enum machine_mode mode ATTRIBUTE_UNUSED;
969 return ( GET_CODE (op) == CONST_INT
970 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
973 /* Return 1 if OP is a constant that can fit in a D field. */
976 short_cint_operand (op, mode)
978 enum machine_mode mode ATTRIBUTE_UNUSED;
980 return (GET_CODE (op) == CONST_INT
981 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
984 /* Similar for an unsigned D field. */
987 u_short_cint_operand (op, mode)
989 enum machine_mode mode ATTRIBUTE_UNUSED;
991 return (GET_CODE (op) == CONST_INT
992 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
995 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
998 non_short_cint_operand (op, mode)
1000 enum machine_mode mode ATTRIBUTE_UNUSED;
1002 return (GET_CODE (op) == CONST_INT
1003 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1006 /* Returns 1 if OP is a CONST_INT that is a positive value
1007 and an exact power of 2. */
1010 exact_log2_cint_operand (op, mode)
1012 enum machine_mode mode ATTRIBUTE_UNUSED;
1014 return (GET_CODE (op) == CONST_INT
1016 && exact_log2 (INTVAL (op)) >= 0);
1019 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1023 gpc_reg_operand (op, mode)
1025 enum machine_mode mode;
1027 return (register_operand (op, mode)
1028 && (GET_CODE (op) != REG
1029 || (REGNO (op) >= ARG_POINTER_REGNUM
1030 && !XER_REGNO_P (REGNO (op)))
1031 || REGNO (op) < MQ_REGNO));
1034 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1038 cc_reg_operand (op, mode)
1040 enum machine_mode mode;
1042 return (register_operand (op, mode)
1043 && (GET_CODE (op) != REG
1044 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1045 || CR_REGNO_P (REGNO (op))));
1048 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1049 CR field that isn't CR0. */
1052 cc_reg_not_cr0_operand (op, mode)
1054 enum machine_mode mode;
1056 return (register_operand (op, mode)
1057 && (GET_CODE (op) != REG
1058 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1059 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1062 /* Returns 1 if OP is either a constant integer valid for a D-field or
1063 a non-special register. If a register, it must be in the proper
1064 mode unless MODE is VOIDmode. */
1067 reg_or_short_operand (op, mode)
1069 enum machine_mode mode;
1071 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1074 /* Similar, except check if the negation of the constant would be
1075 valid for a D-field. */
1078 reg_or_neg_short_operand (op, mode)
1080 enum machine_mode mode;
1082 if (GET_CODE (op) == CONST_INT)
1083 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1085 return gpc_reg_operand (op, mode);
1088 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1089 a non-special register. If a register, it must be in the proper
1090 mode unless MODE is VOIDmode. */
1093 reg_or_aligned_short_operand (op, mode)
1095 enum machine_mode mode;
1097 if (gpc_reg_operand (op, mode))
1099 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1106 /* Return 1 if the operand is either a register or an integer whose
1107 high-order 16 bits are zero. */
1110 reg_or_u_short_operand (op, mode)
1112 enum machine_mode mode;
1114 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1117 /* Return 1 is the operand is either a non-special register or ANY
1118 constant integer. */
1121 reg_or_cint_operand (op, mode)
1123 enum machine_mode mode;
1125 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1128 /* Return 1 is the operand is either a non-special register or ANY
1129 32-bit signed constant integer. */
1132 reg_or_arith_cint_operand (op, mode)
1134 enum machine_mode mode;
1136 return (gpc_reg_operand (op, mode)
1137 || (GET_CODE (op) == CONST_INT
1138 #if HOST_BITS_PER_WIDE_INT != 32
1139 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1140 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1145 /* Return 1 is the operand is either a non-special register or a 32-bit
1146 signed constant integer valid for 64-bit addition. */
1149 reg_or_add_cint64_operand (op, mode)
1151 enum machine_mode mode;
1153 return (gpc_reg_operand (op, mode)
1154 || (GET_CODE (op) == CONST_INT
1155 #if HOST_BITS_PER_WIDE_INT == 32
1156 && INTVAL (op) < 0x7fff8000
1158 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1164 /* Return 1 is the operand is either a non-special register or a 32-bit
1165 signed constant integer valid for 64-bit subtraction. */
1168 reg_or_sub_cint64_operand (op, mode)
1170 enum machine_mode mode;
1172 return (gpc_reg_operand (op, mode)
1173 || (GET_CODE (op) == CONST_INT
1174 #if HOST_BITS_PER_WIDE_INT == 32
1175 && (- INTVAL (op)) < 0x7fff8000
1177 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1183 /* Return 1 is the operand is either a non-special register or ANY
1184 32-bit unsigned constant integer. */
1187 reg_or_logical_cint_operand (op, mode)
1189 enum machine_mode mode;
1191 if (GET_CODE (op) == CONST_INT)
1193 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1195 if (GET_MODE_BITSIZE (mode) <= 32)
1198 if (INTVAL (op) < 0)
1202 return ((INTVAL (op) & GET_MODE_MASK (mode)
1203 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1205 else if (GET_CODE (op) == CONST_DOUBLE)
1207 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1211 return CONST_DOUBLE_HIGH (op) == 0;
1214 return gpc_reg_operand (op, mode);
1217 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1220 got_operand (op, mode)
1222 enum machine_mode mode ATTRIBUTE_UNUSED;
1224 return (GET_CODE (op) == SYMBOL_REF
1225 || GET_CODE (op) == CONST
1226 || GET_CODE (op) == LABEL_REF);
1229 /* Return 1 if the operand is a simple references that can be loaded via
1230 the GOT (labels involving addition aren't allowed). */
1233 got_no_const_operand (op, mode)
1235 enum machine_mode mode ATTRIBUTE_UNUSED;
1237 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1240 /* Return the number of instructions it takes to form a constant in an
1241 integer register. */
1244 num_insns_constant_wide (value)
1245 HOST_WIDE_INT value;
1247 /* signed constant loadable with {cal|addi} */
1248 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1251 /* constant loadable with {cau|addis} */
1252 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1255 #if HOST_BITS_PER_WIDE_INT == 64
1256 else if (TARGET_POWERPC64)
1258 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1259 HOST_WIDE_INT high = value >> 31;
1261 if (high == 0 || high == -1)
1267 return num_insns_constant_wide (high) + 1;
1269 return (num_insns_constant_wide (high)
1270 + num_insns_constant_wide (low) + 1);
1279 num_insns_constant (op, mode)
1281 enum machine_mode mode;
1283 if (GET_CODE (op) == CONST_INT)
1285 #if HOST_BITS_PER_WIDE_INT == 64
1286 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1287 && mask64_operand (op, mode))
1291 return num_insns_constant_wide (INTVAL (op));
1294 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1299 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1300 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1301 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1304 else if (GET_CODE (op) == CONST_DOUBLE)
1310 int endian = (WORDS_BIG_ENDIAN == 0);
1312 if (mode == VOIDmode || mode == DImode)
1314 high = CONST_DOUBLE_HIGH (op);
1315 low = CONST_DOUBLE_LOW (op);
1319 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1320 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1322 low = l[1 - endian];
1326 return (num_insns_constant_wide (low)
1327 + num_insns_constant_wide (high));
1331 if (high == 0 && low >= 0)
1332 return num_insns_constant_wide (low);
1334 else if (high == -1 && low < 0)
1335 return num_insns_constant_wide (low);
1337 else if (mask64_operand (op, mode))
1341 return num_insns_constant_wide (high) + 1;
1344 return (num_insns_constant_wide (high)
1345 + num_insns_constant_wide (low) + 1);
1353 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1354 register with one instruction per word. We only do this if we can
1355 safely read CONST_DOUBLE_{LOW,HIGH}. */
1358 easy_fp_constant (op, mode)
1360 enum machine_mode mode;
1362 if (GET_CODE (op) != CONST_DOUBLE
1363 || GET_MODE (op) != mode
1364 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1367 /* Consider all constants with -msoft-float to be easy. */
1368 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1372 /* If we are using V.4 style PIC, consider all constants to be hard. */
1373 if (flag_pic && DEFAULT_ABI == ABI_V4)
1376 #ifdef TARGET_RELOCATABLE
1377 /* Similarly if we are using -mrelocatable, consider all constants
1379 if (TARGET_RELOCATABLE)
1388 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1389 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1391 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1392 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1393 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1394 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1397 else if (mode == DFmode)
1402 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1403 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1405 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1406 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1409 else if (mode == SFmode)
1414 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1415 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1417 return num_insns_constant_wide (l) == 1;
1420 else if (mode == DImode)
1421 return ((TARGET_POWERPC64
1422 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1423 || (num_insns_constant (op, DImode) <= 2));
1425 else if (mode == SImode)
1431 /* Return non zero if all elements of a vector have the same value. */
1434 easy_vector_same (op, mode)
1436 enum machine_mode mode ATTRIBUTE_UNUSED;
1440 units = CONST_VECTOR_NUNITS (op);
1442 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1443 for (i = 1; i < units; ++i)
1444 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1451 /* Return 1 if the operand is a CONST_INT and can be put into a
1452 register without using memory. */
1455 easy_vector_constant (op, mode)
1457 enum machine_mode mode;
1461 if (GET_CODE (op) != CONST_VECTOR
1466 if (zero_constant (op, mode)
1467 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1468 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1471 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1474 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1475 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1477 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1479 evmergelo r0, r0, r0
1482 I don't know how efficient it would be to allow bigger constants,
1483 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1484 instructions is better than a 64-bit memory load, but I don't
1485 have the e500 timing specs. */
1486 if (TARGET_SPE && mode == V2SImode
1487 && cst >= -0x7fff && cst <= 0x7fff
1488 && cst2 >= -0x7fff && cst <= 0x7fff)
1491 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1494 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1500 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1503 easy_vector_constant_add_self (op, mode)
1505 enum machine_mode mode;
1509 if (!easy_vector_constant (op, mode))
1512 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1514 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1518 output_vec_const_move (operands)
1522 enum machine_mode mode;
1528 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1529 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1530 mode = GET_MODE (dest);
1534 if (zero_constant (vec, mode))
1535 return "vxor %0,%0,%0";
1536 else if (EASY_VECTOR_15 (cst, vec, mode))
1538 operands[1] = GEN_INT (cst);
1542 return "vspltisw %0,%1";
1544 return "vspltish %0,%1";
1546 return "vspltisb %0,%1";
1551 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1559 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1560 pattern of V1DI, V4HI, and V2SF.
1562 FIXME: We should probabl return # and add post reload
1563 splitters for these, but this way is so easy ;-).
1565 operands[1] = GEN_INT (cst);
1566 operands[2] = GEN_INT (cst2);
1568 return "li %0,%1\n\tevmergelo %0,%0,%0";
1570 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1576 /* Return 1 if the operand is the constant 0. This works for scalars
1577 as well as vectors. */
1579 zero_constant (op, mode)
1581 enum machine_mode mode;
1583 return op == CONST0_RTX (mode);
1586 /* Return 1 if the operand is 0.0. */
1588 zero_fp_constant (op, mode)
1590 enum machine_mode mode;
1592 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1595 /* Return 1 if the operand is in volatile memory. Note that during
1596 the RTL generation phase, memory_operand does not return TRUE for
1597 volatile memory references. So this function allows us to
1598 recognize volatile references where its safe. */
1601 volatile_mem_operand (op, mode)
1603 enum machine_mode mode;
1605 if (GET_CODE (op) != MEM)
1608 if (!MEM_VOLATILE_P (op))
1611 if (mode != GET_MODE (op))
1614 if (reload_completed)
1615 return memory_operand (op, mode);
1617 if (reload_in_progress)
1618 return strict_memory_address_p (mode, XEXP (op, 0));
1620 return memory_address_p (mode, XEXP (op, 0));
1623 /* Return 1 if the operand is an offsettable memory operand. */
1626 offsettable_mem_operand (op, mode)
1628 enum machine_mode mode;
1630 return ((GET_CODE (op) == MEM)
1631 && offsettable_address_p (reload_completed || reload_in_progress,
1632 mode, XEXP (op, 0)));
1635 /* Return 1 if the operand is either an easy FP constant (see above) or
1639 mem_or_easy_const_operand (op, mode)
1641 enum machine_mode mode;
1643 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1646 /* Return 1 if the operand is either a non-special register or an item
1647 that can be used as the operand of a `mode' add insn. */
1650 add_operand (op, mode)
1652 enum machine_mode mode;
1654 if (GET_CODE (op) == CONST_INT)
1655 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1656 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1658 return gpc_reg_operand (op, mode);
1661 /* Return 1 if OP is a constant but not a valid add_operand. */
1664 non_add_cint_operand (op, mode)
1666 enum machine_mode mode ATTRIBUTE_UNUSED;
1668 return (GET_CODE (op) == CONST_INT
1669 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1670 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1673 /* Return 1 if the operand is a non-special register or a constant that
1674 can be used as the operand of an OR or XOR insn on the RS/6000. */
1677 logical_operand (op, mode)
1679 enum machine_mode mode;
1681 HOST_WIDE_INT opl, oph;
1683 if (gpc_reg_operand (op, mode))
1686 if (GET_CODE (op) == CONST_INT)
1688 opl = INTVAL (op) & GET_MODE_MASK (mode);
1690 #if HOST_BITS_PER_WIDE_INT <= 32
1691 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1695 else if (GET_CODE (op) == CONST_DOUBLE)
1697 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1700 opl = CONST_DOUBLE_LOW (op);
1701 oph = CONST_DOUBLE_HIGH (op);
1708 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1709 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1712 /* Return 1 if C is a constant that is not a logical operand (as
1713 above), but could be split into one. */
1716 non_logical_cint_operand (op, mode)
1718 enum machine_mode mode;
1720 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1721 && ! logical_operand (op, mode)
1722 && reg_or_logical_cint_operand (op, mode));
1725 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1726 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1727 Reject all ones and all zeros, since these should have been optimized
1728 away and confuse the making of MB and ME. */
1731 mask_operand (op, mode)
1733 enum machine_mode mode ATTRIBUTE_UNUSED;
1735 HOST_WIDE_INT c, lsb;
1737 if (GET_CODE (op) != CONST_INT)
1742 /* Fail in 64-bit mode if the mask wraps around because the upper
1743 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1744 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1747 /* We don't change the number of transitions by inverting,
1748 so make sure we start with the LS bit zero. */
1752 /* Reject all zeros or all ones. */
1756 /* Find the first transition. */
1759 /* Invert to look for a second transition. */
1762 /* Erase first transition. */
1765 /* Find the second transition (if any). */
1768 /* Match if all the bits above are 1's (or c is zero). */
1772 /* Return 1 for the PowerPC64 rlwinm corner case. */
1775 mask_operand_wrap (op, mode)
1777 enum machine_mode mode ATTRIBUTE_UNUSED;
1779 HOST_WIDE_INT c, lsb;
1781 if (GET_CODE (op) != CONST_INT)
1786 if ((c & 0x80000001) != 0x80000001)
1800 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1801 It is if there are no more than one 1->0 or 0->1 transitions.
1802 Reject all zeros, since zero should have been optimized away and
1803 confuses the making of MB and ME. */
1806 mask64_operand (op, mode)
1808 enum machine_mode mode ATTRIBUTE_UNUSED;
1810 if (GET_CODE (op) == CONST_INT)
1812 HOST_WIDE_INT c, lsb;
1816 /* Reject all zeros. */
1820 /* We don't change the number of transitions by inverting,
1821 so make sure we start with the LS bit zero. */
1825 /* Find the transition, and check that all bits above are 1's. */
1828 /* Match if all the bits above are 1's (or c is zero). */
1834 /* Like mask64_operand, but allow up to three transitions. This
1835 predicate is used by insn patterns that generate two rldicl or
1836 rldicr machine insns. */
1839 mask64_2_operand (op, mode)
1841 enum machine_mode mode ATTRIBUTE_UNUSED;
1843 if (GET_CODE (op) == CONST_INT)
1845 HOST_WIDE_INT c, lsb;
1849 /* Disallow all zeros. */
1853 /* We don't change the number of transitions by inverting,
1854 so make sure we start with the LS bit zero. */
1858 /* Find the first transition. */
1861 /* Invert to look for a second transition. */
1864 /* Erase first transition. */
1867 /* Find the second transition. */
1870 /* Invert to look for a third transition. */
1873 /* Erase second transition. */
1876 /* Find the third transition (if any). */
1879 /* Match if all the bits above are 1's (or c is zero). */
1885 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1886 implement ANDing by the mask IN. */
1888 build_mask64_2_operands (in, out)
1892 #if HOST_BITS_PER_WIDE_INT >= 64
1893 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1896 if (GET_CODE (in) != CONST_INT)
1902 /* Assume c initially something like 0x00fff000000fffff. The idea
1903 is to rotate the word so that the middle ^^^^^^ group of zeros
1904 is at the MS end and can be cleared with an rldicl mask. We then
1905 rotate back and clear off the MS ^^ group of zeros with a
1907 c = ~c; /* c == 0xff000ffffff00000 */
1908 lsb = c & -c; /* lsb == 0x0000000000100000 */
1909 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1910 c = ~c; /* c == 0x00fff000000fffff */
1911 c &= -lsb; /* c == 0x00fff00000000000 */
1912 lsb = c & -c; /* lsb == 0x0000100000000000 */
1913 c = ~c; /* c == 0xff000fffffffffff */
1914 c &= -lsb; /* c == 0xff00000000000000 */
1916 while ((lsb >>= 1) != 0)
1917 shift++; /* shift == 44 on exit from loop */
1918 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1919 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1920 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1924 /* Assume c initially something like 0xff000f0000000000. The idea
1925 is to rotate the word so that the ^^^ middle group of zeros
1926 is at the LS end and can be cleared with an rldicr mask. We then
1927 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1929 lsb = c & -c; /* lsb == 0x0000010000000000 */
1930 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1931 c = ~c; /* c == 0x00fff0ffffffffff */
1932 c &= -lsb; /* c == 0x00fff00000000000 */
1933 lsb = c & -c; /* lsb == 0x0000100000000000 */
1934 c = ~c; /* c == 0xff000fffffffffff */
1935 c &= -lsb; /* c == 0xff00000000000000 */
1937 while ((lsb >>= 1) != 0)
1938 shift++; /* shift == 44 on exit from loop */
1939 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1940 m1 >>= shift; /* m1 == 0x0000000000000fff */
1941 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1944 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1945 masks will be all 1's. We are guaranteed more than one transition. */
1946 out[0] = GEN_INT (64 - shift);
1947 out[1] = GEN_INT (m1);
1948 out[2] = GEN_INT (shift);
1949 out[3] = GEN_INT (m2);
1957 /* Return 1 if the operand is either a non-special register or a constant
1958 that can be used as the operand of a PowerPC64 logical AND insn. */
1961 and64_operand (op, mode)
1963 enum machine_mode mode;
1965 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1966 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1968 return (logical_operand (op, mode) || mask64_operand (op, mode));
1971 /* Like the above, but also match constants that can be implemented
1972 with two rldicl or rldicr insns. */
1975 and64_2_operand (op, mode)
1977 enum machine_mode mode;
1979 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1980 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1982 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1985 /* Return 1 if the operand is either a non-special register or a
1986 constant that can be used as the operand of an RS/6000 logical AND insn. */
1989 and_operand (op, mode)
1991 enum machine_mode mode;
1993 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1994 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1996 return (logical_operand (op, mode) || mask_operand (op, mode));
1999 /* Return 1 if the operand is a general register or memory operand. */
2002 reg_or_mem_operand (op, mode)
2004 enum machine_mode mode;
2006 return (gpc_reg_operand (op, mode)
2007 || memory_operand (op, mode)
2008 || volatile_mem_operand (op, mode));
2011 /* Return 1 if the operand is a general register or memory operand without
2012 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2016 lwa_operand (op, mode)
2018 enum machine_mode mode;
2022 if (reload_completed && GET_CODE (inner) == SUBREG)
2023 inner = SUBREG_REG (inner);
2025 return gpc_reg_operand (inner, mode)
2026 || (memory_operand (inner, mode)
2027 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2028 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2029 && (GET_CODE (XEXP (inner, 0)) != PLUS
2030 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2031 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2034 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2037 symbol_ref_operand (op, mode)
2039 enum machine_mode mode;
2041 if (mode != VOIDmode && GET_MODE (op) != mode)
2044 return (GET_CODE (op) == SYMBOL_REF);
2047 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2048 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2051 call_operand (op, mode)
2053 enum machine_mode mode;
2055 if (mode != VOIDmode && GET_MODE (op) != mode)
2058 return (GET_CODE (op) == SYMBOL_REF
2059 || (GET_CODE (op) == REG
2060 && (REGNO (op) == LINK_REGISTER_REGNUM
2061 || REGNO (op) == COUNT_REGISTER_REGNUM
2062 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2065 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2066 this file and the function is not weakly defined. */
2069 current_file_function_operand (op, mode)
2071 enum machine_mode mode ATTRIBUTE_UNUSED;
2073 return (GET_CODE (op) == SYMBOL_REF
2074 && (SYMBOL_REF_FLAG (op)
2075 || (op == XEXP (DECL_RTL (current_function_decl), 0)
2076 && ! DECL_WEAK (current_function_decl))));
2079 /* Return 1 if this operand is a valid input for a move insn. */
2082 input_operand (op, mode)
2084 enum machine_mode mode;
2086 /* Memory is always valid. */
2087 if (memory_operand (op, mode))
2090 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2091 if (GET_CODE (op) == CONSTANT_P_RTX)
2094 /* For floating-point, easy constants are valid. */
2095 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2097 && easy_fp_constant (op, mode))
2100 /* Allow any integer constant. */
2101 if (GET_MODE_CLASS (mode) == MODE_INT
2102 && (GET_CODE (op) == CONST_INT
2103 || GET_CODE (op) == CONST_DOUBLE))
2106 /* Allow easy vector constants. */
2107 if (GET_CODE (op) == CONST_VECTOR
2108 && easy_vector_constant (op, mode))
2111 /* For floating-point or multi-word mode, the only remaining valid type
2113 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2114 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2115 return register_operand (op, mode);
2117 /* The only cases left are integral modes one word or smaller (we
2118 do not get called for MODE_CC values). These can be in any
2120 if (register_operand (op, mode))
2123 /* A SYMBOL_REF referring to the TOC is valid. */
2124 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
2127 /* A constant pool expression (relative to the TOC) is valid */
2128 if (TOC_RELATIVE_EXPR_P (op))
2131 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2133 if (DEFAULT_ABI == ABI_V4
2134 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2135 && small_data_operand (op, Pmode))
2141 /* Return 1 for an operand in small memory on V.4/eabi. */
2144 small_data_operand (op, mode)
2145 rtx op ATTRIBUTE_UNUSED;
2146 enum machine_mode mode ATTRIBUTE_UNUSED;
2151 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2154 if (DEFAULT_ABI != ABI_V4)
2157 if (GET_CODE (op) == SYMBOL_REF)
2160 else if (GET_CODE (op) != CONST
2161 || GET_CODE (XEXP (op, 0)) != PLUS
2162 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2163 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2168 rtx sum = XEXP (op, 0);
2169 HOST_WIDE_INT summand;
2171 /* We have to be careful here, because it is the referenced address
2172 that must be 32k from _SDA_BASE_, not just the symbol. */
2173 summand = INTVAL (XEXP (sum, 1));
2174 if (summand < 0 || summand > g_switch_value)
2177 sym_ref = XEXP (sum, 0);
2180 if (*XSTR (sym_ref, 0) != '@')
2191 constant_pool_expr_1 (op, have_sym, have_toc)
2196 switch (GET_CODE(op))
2199 if (CONSTANT_POOL_ADDRESS_P (op))
2201 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2209 else if (! strcmp (XSTR (op, 0), toc_label_name))
2218 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2219 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2221 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2230 constant_pool_expr_p (op)
2235 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2239 toc_relative_expr_p (op)
2244 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2247 /* Try machine-dependent ways of modifying an illegitimate address
2248 to be legitimate. If we find one, return the new, valid address.
2249 This is used from only one place: `memory_address' in explow.c.
2251 OLDX is the address as it was before break_out_memory_refs was
2252 called. In some cases it is useful to look at this to decide what
2255 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2257 It is always safe for this function to do nothing. It exists to
2258 recognize opportunities to optimize the output.
2260 On RS/6000, first check for the sum of a register with a constant
2261 integer that is out of range. If so, generate code to add the
2262 constant with the low-order 16 bits masked to the register and force
2263 this result into another register (this can be done with `cau').
2264 Then generate an address of REG+(CONST&0xffff), allowing for the
2265 possibility of bit 16 being a one.
2267 Then check for the sum of a register and something not constant, try to
2268 load the other things into a register and return the sum. */
2270 rs6000_legitimize_address (x, oldx, mode)
2272 rtx oldx ATTRIBUTE_UNUSED;
2273 enum machine_mode mode;
2275 if (GET_CODE (x) == PLUS
2276 && GET_CODE (XEXP (x, 0)) == REG
2277 && GET_CODE (XEXP (x, 1)) == CONST_INT
2278 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2280 HOST_WIDE_INT high_int, low_int;
2282 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2283 high_int = INTVAL (XEXP (x, 1)) - low_int;
2284 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2285 GEN_INT (high_int)), 0);
2286 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2288 else if (GET_CODE (x) == PLUS
2289 && GET_CODE (XEXP (x, 0)) == REG
2290 && GET_CODE (XEXP (x, 1)) != CONST_INT
2291 && GET_MODE_NUNITS (mode) == 1
2292 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2294 || (mode != DFmode && mode != TFmode))
2295 && (TARGET_POWERPC64 || mode != DImode)
2298 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2299 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2301 else if (ALTIVEC_VECTOR_MODE (mode))
2305 /* Make sure both operands are registers. */
2306 if (GET_CODE (x) == PLUS)
2307 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2308 force_reg (Pmode, XEXP (x, 1)));
2310 reg = force_reg (Pmode, x);
2313 else if (SPE_VECTOR_MODE (mode))
2315 /* We accept [reg + reg] and [reg + OFFSET]. */
2317 if (GET_CODE (x) == PLUS)
2319 rtx op1 = XEXP (x, 0);
2320 rtx op2 = XEXP (x, 1);
2322 op1 = force_reg (Pmode, op1);
2324 if (GET_CODE (op2) != REG
2325 && (GET_CODE (op2) != CONST_INT
2326 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2327 op2 = force_reg (Pmode, op2);
2329 return gen_rtx_PLUS (Pmode, op1, op2);
2332 return force_reg (Pmode, x);
2338 && GET_CODE (x) != CONST_INT
2339 && GET_CODE (x) != CONST_DOUBLE
2341 && GET_MODE_NUNITS (mode) == 1
2342 && (GET_MODE_BITSIZE (mode) <= 32
2343 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2345 rtx reg = gen_reg_rtx (Pmode);
2346 emit_insn (gen_elf_high (reg, (x)));
2347 return gen_rtx_LO_SUM (Pmode, reg, (x));
2349 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2352 && ! MACHO_DYNAMIC_NO_PIC_P
2354 && GET_CODE (x) != CONST_INT
2355 && GET_CODE (x) != CONST_DOUBLE
2357 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2361 rtx reg = gen_reg_rtx (Pmode);
2362 emit_insn (gen_macho_high (reg, (x)));
2363 return gen_rtx_LO_SUM (Pmode, reg, (x));
2366 && CONSTANT_POOL_EXPR_P (x)
2367 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2369 return create_TOC_reference (x);
2375 /* The convention appears to be to define this wherever it is used.
2376 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2377 is now used here. */
2378 #ifndef REG_MODE_OK_FOR_BASE_P
2379 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2382 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2383 replace the input X, or the original X if no replacement is called for.
2384 The output parameter *WIN is 1 if the calling macro should goto WIN,
2387 For RS/6000, we wish to handle large displacements off a base
2388 register by splitting the addend across an addiu/addis and the mem insn.
2389 This cuts number of extra insns needed from 3 to 1.
2391 On Darwin, we use this to generate code for floating point constants.
2392 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2393 The Darwin code is inside #if TARGET_MACHO because only then is
2394 machopic_function_base_name() defined. */
2396 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2398 enum machine_mode mode;
2401 int ind_levels ATTRIBUTE_UNUSED;
2404 /* We must recognize output that we have already generated ourselves. */
2405 if (GET_CODE (x) == PLUS
2406 && GET_CODE (XEXP (x, 0)) == PLUS
2407 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2408 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2409 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2411 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2412 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2413 opnum, (enum reload_type)type);
2419 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2420 && GET_CODE (x) == LO_SUM
2421 && GET_CODE (XEXP (x, 0)) == PLUS
2422 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2423 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2424 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2425 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2426 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2427 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2428 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2430 /* Result of previous invocation of this function on Darwin
2431 floating point constant. */
2432 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2433 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2434 opnum, (enum reload_type)type);
2439 if (GET_CODE (x) == PLUS
2440 && GET_CODE (XEXP (x, 0)) == REG
2441 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2442 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2443 && GET_CODE (XEXP (x, 1)) == CONST_INT
2444 && !SPE_VECTOR_MODE (mode)
2445 && !ALTIVEC_VECTOR_MODE (mode))
2447 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2448 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2450 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2452 /* Check for 32-bit overflow. */
2453 if (high + low != val)
2459 /* Reload the high part into a base reg; leave the low part
2460 in the mem directly. */
2462 x = gen_rtx_PLUS (GET_MODE (x),
2463 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2467 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2468 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2469 opnum, (enum reload_type)type);
2474 if (GET_CODE (x) == SYMBOL_REF
2475 && DEFAULT_ABI == ABI_DARWIN
2476 && !ALTIVEC_VECTOR_MODE (mode)
2479 /* Darwin load of floating point constant. */
2480 rtx offset = gen_rtx (CONST, Pmode,
2481 gen_rtx (MINUS, Pmode, x,
2482 gen_rtx (SYMBOL_REF, Pmode,
2483 machopic_function_base_name ())));
2484 x = gen_rtx (LO_SUM, GET_MODE (x),
2485 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2486 gen_rtx (HIGH, Pmode, offset)), offset);
2487 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2488 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2489 opnum, (enum reload_type)type);
2493 if (GET_CODE (x) == SYMBOL_REF
2494 && DEFAULT_ABI == ABI_DARWIN
2495 && !ALTIVEC_VECTOR_MODE (mode)
2496 && MACHO_DYNAMIC_NO_PIC_P)
2498 /* Darwin load of floating point constant. */
2499 x = gen_rtx (LO_SUM, GET_MODE (x),
2500 gen_rtx (HIGH, Pmode, x), x);
2501 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2502 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2503 opnum, (enum reload_type)type);
2509 && CONSTANT_POOL_EXPR_P (x)
2510 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2512 (x) = create_TOC_reference (x);
2520 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2521 that is a valid memory address for an instruction.
2522 The MODE argument is the machine mode for the MEM expression
2523 that wants to use this address.
2525 On the RS/6000, there are four valid address: a SYMBOL_REF that
2526 refers to a constant pool entry of an address (or the sum of it
2527 plus a constant), a short (16-bit signed) constant plus a register,
2528 the sum of two registers, or a register indirect, possibly with an
2529 auto-increment. For DFmode and DImode with a constant plus register,
2530 we must ensure that both words are addressable or PowerPC64 with offset
2533 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2534 32-bit DImode, TImode), indexed addressing cannot be used because
2535 adjacent memory cells are accessed by adding word-sized offsets
2536 during assembly output. */
2538 rs6000_legitimate_address (mode, x, reg_ok_strict)
2539 enum machine_mode mode;
2543 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2545 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2546 && !ALTIVEC_VECTOR_MODE (mode)
2547 && !SPE_VECTOR_MODE (mode)
2549 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2551 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2553 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2555 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2557 && GET_CODE (x) == PLUS
2558 && GET_CODE (XEXP (x, 0)) == REG
2559 && XEXP (x, 0) == virtual_stack_vars_rtx
2560 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2562 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2565 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2567 || (mode != DFmode && mode != TFmode))
2568 && (TARGET_POWERPC64 || mode != DImode)
2569 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2571 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2576 /* Try to output insns to set TARGET equal to the constant C if it can
2577 be done in less than N insns. Do all computations in MODE.
2578 Returns the place where the output has been placed if it can be
2579 done and the insns have been emitted. If it would take more than N
2580 insns, zero is returned and no insns and emitted. */
2583 rs6000_emit_set_const (dest, mode, source, n)
2585 enum machine_mode mode;
2586 int n ATTRIBUTE_UNUSED;
2588 rtx result, insn, set;
2589 HOST_WIDE_INT c0, c1;
2591 if (mode == QImode || mode == HImode)
2594 dest = gen_reg_rtx (mode);
2595 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2598 else if (mode == SImode)
2600 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2602 emit_insn (gen_rtx_SET (VOIDmode, result,
2603 GEN_INT (INTVAL (source)
2604 & (~ (HOST_WIDE_INT) 0xffff))));
2605 emit_insn (gen_rtx_SET (VOIDmode, dest,
2606 gen_rtx_IOR (SImode, result,
2607 GEN_INT (INTVAL (source) & 0xffff))));
2610 else if (mode == DImode)
2612 if (GET_CODE (source) == CONST_INT)
2614 c0 = INTVAL (source);
2617 else if (GET_CODE (source) == CONST_DOUBLE)
2619 #if HOST_BITS_PER_WIDE_INT >= 64
2620 c0 = CONST_DOUBLE_LOW (source);
2623 c0 = CONST_DOUBLE_LOW (source);
2624 c1 = CONST_DOUBLE_HIGH (source);
2630 result = rs6000_emit_set_long_const (dest, c0, c1);
2635 insn = get_last_insn ();
2636 set = single_set (insn);
2637 if (! CONSTANT_P (SET_SRC (set)))
2638 set_unique_reg_note (insn, REG_EQUAL, source);
2643 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2644 fall back to a straight forward decomposition. We do this to avoid
2645 exponential run times encountered when looking for longer sequences
2646 with rs6000_emit_set_const. */
2648 rs6000_emit_set_long_const (dest, c1, c2)
2650 HOST_WIDE_INT c1, c2;
2652 if (!TARGET_POWERPC64)
2654 rtx operand1, operand2;
2656 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2658 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2660 emit_move_insn (operand1, GEN_INT (c1));
2661 emit_move_insn (operand2, GEN_INT (c2));
2665 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2668 ud2 = (c1 & 0xffff0000) >> 16;
2669 #if HOST_BITS_PER_WIDE_INT >= 64
2673 ud4 = (c2 & 0xffff0000) >> 16;
2675 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2676 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2679 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2681 emit_move_insn (dest, GEN_INT (ud1));
2684 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2685 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2688 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2691 emit_move_insn (dest, GEN_INT (ud2 << 16));
2693 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2695 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2696 || (ud4 == 0 && ! (ud3 & 0x8000)))
2699 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2702 emit_move_insn (dest, GEN_INT (ud3 << 16));
2705 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2706 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2708 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2713 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2716 emit_move_insn (dest, GEN_INT (ud4 << 16));
2719 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2721 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2723 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2724 GEN_INT (ud2 << 16)));
2726 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2732 /* Emit a move from SOURCE to DEST in mode MODE. */
2734 rs6000_emit_move (dest, source, mode)
2737 enum machine_mode mode;
2741 operands[1] = source;
2743 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2744 if (GET_CODE (operands[1]) == CONST_DOUBLE
2745 && ! FLOAT_MODE_P (mode)
2746 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2748 /* FIXME. This should never happen. */
2749 /* Since it seems that it does, do the safe thing and convert
2751 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2753 if (GET_CODE (operands[1]) == CONST_DOUBLE
2754 && ! FLOAT_MODE_P (mode)
2755 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2756 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2757 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2758 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2761 /* Check if GCC is setting up a block move that will end up using FP
2762 registers as temporaries. We must make sure this is acceptable. */
2763 if (GET_CODE (operands[0]) == MEM
2764 && GET_CODE (operands[1]) == MEM
2766 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2767 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2768 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2769 ? 32 : MEM_ALIGN (operands[0])))
2770 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2772 : MEM_ALIGN (operands[1]))))
2773 && ! MEM_VOLATILE_P (operands [0])
2774 && ! MEM_VOLATILE_P (operands [1]))
2776 emit_move_insn (adjust_address (operands[0], SImode, 0),
2777 adjust_address (operands[1], SImode, 0));
2778 emit_move_insn (adjust_address (operands[0], SImode, 4),
2779 adjust_address (operands[1], SImode, 4));
2783 if (!no_new_pseudos)
2785 if (GET_CODE (operands[1]) == MEM && optimize > 0
2786 && (mode == QImode || mode == HImode || mode == SImode)
2787 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2789 rtx reg = gen_reg_rtx (word_mode);
2791 emit_insn (gen_rtx_SET (word_mode, reg,
2792 gen_rtx_ZERO_EXTEND (word_mode,
2794 operands[1] = gen_lowpart (mode, reg);
2796 if (GET_CODE (operands[0]) != REG)
2797 operands[1] = force_reg (mode, operands[1]);
2800 if (mode == SFmode && ! TARGET_POWERPC
2801 && TARGET_HARD_FLOAT && TARGET_FPRS
2802 && GET_CODE (operands[0]) == MEM)
2806 if (reload_in_progress || reload_completed)
2807 regnum = true_regnum (operands[1]);
2808 else if (GET_CODE (operands[1]) == REG)
2809 regnum = REGNO (operands[1]);
2813 /* If operands[1] is a register, on POWER it may have
2814 double-precision data in it, so truncate it to single
2816 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2819 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2820 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2821 operands[1] = newreg;
2825 /* Handle the case where reload calls us with an invalid address;
2826 and the case of CONSTANT_P_RTX. */
2827 if (!ALTIVEC_VECTOR_MODE (mode)
2828 && (! general_operand (operands[1], mode)
2829 || ! nonimmediate_operand (operands[0], mode)
2830 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2832 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2836 /* FIXME: In the long term, this switch statement should go away
2837 and be replaced by a sequence of tests based on things like
2843 if (CONSTANT_P (operands[1])
2844 && GET_CODE (operands[1]) != CONST_INT)
2845 operands[1] = force_const_mem (mode, operands[1]);
2851 if (CONSTANT_P (operands[1])
2852 && ! easy_fp_constant (operands[1], mode))
2853 operands[1] = force_const_mem (mode, operands[1]);
2864 if (CONSTANT_P (operands[1])
2865 && !easy_vector_constant (operands[1], mode))
2866 operands[1] = force_const_mem (mode, operands[1]);
2871 /* Use default pattern for address of ELF small data */
2874 && DEFAULT_ABI == ABI_V4
2875 && (GET_CODE (operands[1]) == SYMBOL_REF
2876 || GET_CODE (operands[1]) == CONST)
2877 && small_data_operand (operands[1], mode))
2879 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2883 if (DEFAULT_ABI == ABI_V4
2884 && mode == Pmode && mode == SImode
2885 && flag_pic == 1 && got_operand (operands[1], mode))
2887 emit_insn (gen_movsi_got (operands[0], operands[1]));
2891 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2895 && CONSTANT_P (operands[1])
2896 && GET_CODE (operands[1]) != HIGH
2897 && GET_CODE (operands[1]) != CONST_INT)
2899 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2901 /* If this is a function address on -mcall-aixdesc,
2902 convert it to the address of the descriptor. */
2903 if (DEFAULT_ABI == ABI_AIX
2904 && GET_CODE (operands[1]) == SYMBOL_REF
2905 && XSTR (operands[1], 0)[0] == '.')
2907 const char *name = XSTR (operands[1], 0);
2909 while (*name == '.')
2911 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2912 CONSTANT_POOL_ADDRESS_P (new_ref)
2913 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2914 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2915 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2916 operands[1] = new_ref;
2919 if (DEFAULT_ABI == ABI_DARWIN)
2922 if (MACHO_DYNAMIC_NO_PIC_P)
2924 /* Take care of any required data indirection. */
2925 operands[1] = rs6000_machopic_legitimize_pic_address (
2926 operands[1], mode, operands[0]);
2927 if (operands[0] != operands[1])
2928 emit_insn (gen_rtx_SET (VOIDmode,
2929 operands[0], operands[1]));
2933 emit_insn (gen_macho_high (target, operands[1]));
2934 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2938 emit_insn (gen_elf_high (target, operands[1]));
2939 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2943 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2944 and we have put it in the TOC, we just need to make a TOC-relative
2947 && GET_CODE (operands[1]) == SYMBOL_REF
2948 && CONSTANT_POOL_EXPR_P (operands[1])
2949 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2950 get_pool_mode (operands[1])))
2952 operands[1] = create_TOC_reference (operands[1]);
2954 else if (mode == Pmode
2955 && CONSTANT_P (operands[1])
2956 && ((GET_CODE (operands[1]) != CONST_INT
2957 && ! easy_fp_constant (operands[1], mode))
2958 || (GET_CODE (operands[1]) == CONST_INT
2959 && num_insns_constant (operands[1], mode) > 2)
2960 || (GET_CODE (operands[0]) == REG
2961 && FP_REGNO_P (REGNO (operands[0]))))
2962 && GET_CODE (operands[1]) != HIGH
2963 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2964 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2966 /* Emit a USE operation so that the constant isn't deleted if
2967 expensive optimizations are turned on because nobody
2968 references it. This should only be done for operands that
2969 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2970 This should not be done for operands that contain LABEL_REFs.
2971 For now, we just handle the obvious case. */
2972 if (GET_CODE (operands[1]) != LABEL_REF)
2973 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2976 /* Darwin uses a special PIC legitimizer. */
2977 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
2980 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2982 if (operands[0] != operands[1])
2983 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2988 /* If we are to limit the number of things we put in the TOC and
2989 this is a symbol plus a constant we can add in one insn,
2990 just put the symbol in the TOC and add the constant. Don't do
2991 this if reload is in progress. */
2992 if (GET_CODE (operands[1]) == CONST
2993 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2994 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2995 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2996 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2997 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2998 && ! side_effects_p (operands[0]))
3001 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3002 rtx other = XEXP (XEXP (operands[1], 0), 1);
3004 sym = force_reg (mode, sym);
3006 emit_insn (gen_addsi3 (operands[0], sym, other));
3008 emit_insn (gen_adddi3 (operands[0], sym, other));
3012 operands[1] = force_const_mem (mode, operands[1]);
3015 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
3016 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3017 get_pool_constant (XEXP (operands[1], 0)),
3018 get_pool_mode (XEXP (operands[1], 0))))
3021 = gen_rtx_MEM (mode,
3022 create_TOC_reference (XEXP (operands[1], 0)));
3023 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3024 RTX_UNCHANGING_P (operands[1]) = 1;
3030 if (GET_CODE (operands[0]) == MEM
3031 && GET_CODE (XEXP (operands[0], 0)) != REG
3032 && ! reload_in_progress)
3034 = replace_equiv_address (operands[0],
3035 copy_addr_to_reg (XEXP (operands[0], 0)));
3037 if (GET_CODE (operands[1]) == MEM
3038 && GET_CODE (XEXP (operands[1], 0)) != REG
3039 && ! reload_in_progress)
3041 = replace_equiv_address (operands[1],
3042 copy_addr_to_reg (XEXP (operands[1], 0)));
3045 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3047 gen_rtx_SET (VOIDmode,
3048 operands[0], operands[1]),
3049 gen_rtx_CLOBBER (VOIDmode,
3050 gen_rtx_SCRATCH (SImode)))));
3059 /* Above, we may have called force_const_mem which may have returned
3060 an invalid address. If we can, fix this up; otherwise, reload will
3061 have to deal with it. */
3062 if (GET_CODE (operands[1]) == MEM
3063 && ! memory_address_p (mode, XEXP (operands[1], 0))
3064 && ! reload_in_progress)
3065 operands[1] = adjust_address (operands[1], mode, 0);
3067 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3071 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3072 for a call to a function whose data type is FNTYPE.
3073 For a library call, FNTYPE is 0.
3075 For incoming args we set the number of arguments in the prototype large
3076 so we never return a PARALLEL. */
3079 init_cumulative_args (cum, fntype, libname, incoming)
3080 CUMULATIVE_ARGS *cum;
3082 rtx libname ATTRIBUTE_UNUSED;
3085 static CUMULATIVE_ARGS zero_cumulative;
3087 *cum = zero_cumulative;
3089 cum->fregno = FP_ARG_MIN_REG;
3090 cum->vregno = ALTIVEC_ARG_MIN_REG;
3091 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3092 cum->call_cookie = CALL_NORMAL;
3093 cum->sysv_gregno = GP_ARG_MIN_REG;
3096 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3098 else if (cum->prototype)
3099 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3100 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3101 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3104 cum->nargs_prototype = 0;
3106 cum->orig_nargs = cum->nargs_prototype;
3108 /* Check for a longcall attribute. */
3110 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3111 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3112 cum->call_cookie = CALL_LONG;
3114 if (TARGET_DEBUG_ARG)
3116 fprintf (stderr, "\ninit_cumulative_args:");
3119 tree ret_type = TREE_TYPE (fntype);
3120 fprintf (stderr, " ret code = %s,",
3121 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3124 if (cum->call_cookie & CALL_LONG)
3125 fprintf (stderr, " longcall,");
3127 fprintf (stderr, " proto = %d, nargs = %d\n",
3128 cum->prototype, cum->nargs_prototype);
3132 /* If defined, a C expression which determines whether, and in which
3133 direction, to pad out an argument with extra space. The value
3134 should be of type `enum direction': either `upward' to pad above
3135 the argument, `downward' to pad below, or `none' to inhibit
3138 For the AIX ABI structs are always stored left shifted in their
3142 function_arg_padding (mode, type)
3143 enum machine_mode mode;
3146 if (type != 0 && AGGREGATE_TYPE_P (type))
3149 /* This is the default definition. */
3150 return (! BYTES_BIG_ENDIAN
3153 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3154 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3155 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3156 ? downward : upward));
3159 /* If defined, a C expression that gives the alignment boundary, in bits,
3160 of an argument with the specified mode and type. If it is not defined,
3161 PARM_BOUNDARY is used for all arguments.
3163 V.4 wants long longs to be double word aligned. */
3166 function_arg_boundary (mode, type)
3167 enum machine_mode mode;
3168 tree type ATTRIBUTE_UNUSED;
3170 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3172 else if (SPE_VECTOR_MODE (mode))
3174 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3177 return PARM_BOUNDARY;
3180 /* Update the data in CUM to advance over an argument
3181 of mode MODE and data type TYPE.
3182 (TYPE is null for libcalls where that information may not be available.) */
3185 function_arg_advance (cum, mode, type, named)
3186 CUMULATIVE_ARGS *cum;
3187 enum machine_mode mode;
3191 cum->nargs_prototype--;
3193 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3195 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3198 cum->words += RS6000_ARG_SIZE (mode, type);
3200 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3201 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3203 else if (DEFAULT_ABI == ABI_V4)
3205 if (TARGET_HARD_FLOAT && TARGET_FPRS
3206 && (mode == SFmode || mode == DFmode))
3208 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3213 cum->words += cum->words & 1;
3214 cum->words += RS6000_ARG_SIZE (mode, type);
3220 int gregno = cum->sysv_gregno;
3222 /* Aggregates and IEEE quad get passed by reference. */
3223 if ((type && AGGREGATE_TYPE_P (type))
3227 n_words = RS6000_ARG_SIZE (mode, type);
3229 /* Long long and SPE vectors are put in odd registers. */
3230 if (n_words == 2 && (gregno & 1) == 0)
3233 /* Long long and SPE vectors are not split between registers
3235 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3237 /* Long long is aligned on the stack. */
3239 cum->words += cum->words & 1;
3240 cum->words += n_words;
3243 /* Note: continuing to accumulate gregno past when we've started
3244 spilling to the stack indicates the fact that we've started
3245 spilling to the stack to expand_builtin_saveregs. */
3246 cum->sysv_gregno = gregno + n_words;
3249 if (TARGET_DEBUG_ARG)
3251 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3252 cum->words, cum->fregno);
3253 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3254 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3255 fprintf (stderr, "mode = %4s, named = %d\n",
3256 GET_MODE_NAME (mode), named);
3261 int align = (TARGET_32BIT && (cum->words & 1) != 0
3262 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3264 cum->words += align + RS6000_ARG_SIZE (mode, type);
3266 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3267 && TARGET_HARD_FLOAT && TARGET_FPRS)
3268 cum->fregno += (mode == TFmode ? 2 : 1);
3270 if (TARGET_DEBUG_ARG)
3272 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3273 cum->words, cum->fregno);
3274 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3275 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3276 fprintf (stderr, "named = %d, align = %d\n", named, align);
3281 /* Determine where to put an argument to a function.
3282 Value is zero to push the argument on the stack,
3283 or a hard register in which to store the argument.
3285 MODE is the argument's machine mode.
3286 TYPE is the data type of the argument (as a tree).
3287 This is null for libcalls where that information may
3289 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3290 the preceding args and about the function being called.
3291 NAMED is nonzero if this argument is a named parameter
3292 (otherwise it is an extra parameter matching an ellipsis).
3294 On RS/6000 the first eight words of non-FP are normally in registers
3295 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3296 Under V.4, the first 8 FP args are in registers.
3298 If this is floating-point and no prototype is specified, we use
3299 both an FP and integer register (or possibly FP reg and stack). Library
3300 functions (when TYPE is zero) always have the proper types for args,
3301 so we can pass the FP value just in one register. emit_library_function
3302 doesn't support PARALLEL anyway. */
3305 function_arg (cum, mode, type, named)
3306 CUMULATIVE_ARGS *cum;
3307 enum machine_mode mode;
3311 enum rs6000_abi abi = DEFAULT_ABI;
3313 /* Return a marker to indicate whether CR1 needs to set or clear the
3314 bit that V.4 uses to say fp args were passed in registers.
3315 Assume that we don't need the marker for software floating point,
3316 or compiler generated library calls. */
3317 if (mode == VOIDmode)
3320 && cum->nargs_prototype < 0
3321 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3323 /* For the SPE, we need to crxor CR6 always. */
3325 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3326 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3327 return GEN_INT (cum->call_cookie
3328 | ((cum->fregno == FP_ARG_MIN_REG)
3329 ? CALL_V4_SET_FP_ARGS
3330 : CALL_V4_CLEAR_FP_ARGS));
3333 return GEN_INT (cum->call_cookie);
3336 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3338 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3339 return gen_rtx_REG (mode, cum->vregno);
3343 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3345 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3346 return gen_rtx_REG (mode, cum->sysv_gregno);
3350 else if (abi == ABI_V4)
3352 if (TARGET_HARD_FLOAT && TARGET_FPRS
3353 && (mode == SFmode || mode == DFmode))
3355 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3356 return gen_rtx_REG (mode, cum->fregno);
3363 int gregno = cum->sysv_gregno;
3365 /* Aggregates and IEEE quad get passed by reference. */
3366 if ((type && AGGREGATE_TYPE_P (type))
3370 n_words = RS6000_ARG_SIZE (mode, type);
3372 /* Long long and SPE vectors are put in odd registers. */
3373 if (n_words == 2 && (gregno & 1) == 0)
3376 /* Long long and SPE vectors are not split between registers
3378 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3380 /* SPE vectors in ... get split into 2 registers. */
3381 if (TARGET_SPE && TARGET_SPE_ABI
3382 && SPE_VECTOR_MODE (mode) && !named)
3385 enum machine_mode m = SImode;
3387 r1 = gen_rtx_REG (m, gregno);
3388 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3389 r2 = gen_rtx_REG (m, gregno + 1);
3390 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3391 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3393 return gen_rtx_REG (mode, gregno);
3401 int align = (TARGET_32BIT && (cum->words & 1) != 0
3402 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3403 int align_words = cum->words + align;
3405 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3408 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3411 || ((cum->nargs_prototype > 0)
3412 /* IBM AIX extended its linkage convention definition always
3413 to require FP args after register save area hole on the
3415 && (DEFAULT_ABI != ABI_AIX
3417 || (align_words < GP_ARG_NUM_REG))))
3418 return gen_rtx_REG (mode, cum->fregno);
3420 return gen_rtx_PARALLEL (mode,
3422 gen_rtx_EXPR_LIST (VOIDmode,
3423 ((align_words >= GP_ARG_NUM_REG)
3426 + RS6000_ARG_SIZE (mode, type)
3428 /* If this is partially on the stack, then
3429 we only include the portion actually
3430 in registers here. */
3431 ? gen_rtx_REG (SImode,
3432 GP_ARG_MIN_REG + align_words)
3433 : gen_rtx_REG (mode,
3434 GP_ARG_MIN_REG + align_words))),
3436 gen_rtx_EXPR_LIST (VOIDmode,
3437 gen_rtx_REG (mode, cum->fregno),
3440 else if (align_words < GP_ARG_NUM_REG)
3441 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3447 /* For an arg passed partly in registers and partly in memory,
3448 this is the number of registers used.
3449 For args passed entirely in registers or entirely in memory, zero. */
3452 function_arg_partial_nregs (cum, mode, type, named)
3453 CUMULATIVE_ARGS *cum;
3454 enum machine_mode mode;
3456 int named ATTRIBUTE_UNUSED;
3458 if (DEFAULT_ABI == ABI_V4)
3461 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3462 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3464 if (cum->nargs_prototype >= 0)
3468 if (cum->words < GP_ARG_NUM_REG
3469 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3471 int ret = GP_ARG_NUM_REG - cum->words;
3472 if (ret && TARGET_DEBUG_ARG)
3473 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3481 /* A C expression that indicates when an argument must be passed by
3482 reference. If nonzero for an argument, a copy of that argument is
3483 made in memory and a pointer to the argument is passed instead of
3484 the argument itself. The pointer is passed in whatever way is
3485 appropriate for passing a pointer to that type.
3487 Under V.4, structures and unions are passed by reference.
3489 As an extension to all ABIs, variable sized types are passed by
3493 function_arg_pass_by_reference (cum, mode, type, named)
3494 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3495 enum machine_mode mode ATTRIBUTE_UNUSED;
3497 int named ATTRIBUTE_UNUSED;
3499 if (DEFAULT_ABI == ABI_V4
3500 && ((type && AGGREGATE_TYPE_P (type))
3503 if (TARGET_DEBUG_ARG)
3504 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3508 return type && int_size_in_bytes (type) <= 0;
3511 /* Perform any needed actions needed for a function that is receiving a
3512 variable number of arguments.
3516 MODE and TYPE are the mode and type of the current parameter.
3518 PRETEND_SIZE is a variable that should be set to the amount of stack
3519 that must be pushed by the prolog to pretend that our caller pushed
3522 Normally, this macro will push all remaining incoming registers on the
3523 stack and set PRETEND_SIZE to the length of the registers pushed. */
3526 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3527 CUMULATIVE_ARGS *cum;
3528 enum machine_mode mode;
3530 int *pretend_size ATTRIBUTE_UNUSED;
3534 CUMULATIVE_ARGS next_cum;
3535 int reg_size = TARGET_32BIT ? 4 : 8;
3536 rtx save_area = NULL_RTX, mem;
3537 int first_reg_offset, set;
3541 fntype = TREE_TYPE (current_function_decl);
3542 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3543 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3544 != void_type_node));
3546 /* For varargs, we do not want to skip the dummy va_dcl argument.
3547 For stdargs, we do want to skip the last named argument. */
3550 function_arg_advance (&next_cum, mode, type, 1);
3552 if (DEFAULT_ABI == ABI_V4)
3554 /* Indicate to allocate space on the stack for varargs save area. */
3555 cfun->machine->sysv_varargs_p = 1;
3557 save_area = plus_constant (virtual_stack_vars_rtx,
3558 - RS6000_VARARGS_SIZE);
3560 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3564 first_reg_offset = next_cum.words;
3565 save_area = virtual_incoming_args_rtx;
3566 cfun->machine->sysv_varargs_p = 0;
3568 if (MUST_PASS_IN_STACK (mode, type))
3569 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3572 set = get_varargs_alias_set ();
3573 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3575 mem = gen_rtx_MEM (BLKmode,
3576 plus_constant (save_area,
3577 first_reg_offset * reg_size)),
3578 set_mem_alias_set (mem, set);
3579 set_mem_align (mem, BITS_PER_WORD);
3582 (GP_ARG_MIN_REG + first_reg_offset, mem,
3583 GP_ARG_NUM_REG - first_reg_offset,
3584 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3587 /* Save FP registers if needed. */
3588 if (DEFAULT_ABI == ABI_V4
3589 && TARGET_HARD_FLOAT && TARGET_FPRS
3591 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3593 int fregno = next_cum.fregno;
3594 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3595 rtx lab = gen_label_rtx ();
3596 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3598 emit_jump_insn (gen_rtx_SET (VOIDmode,
3600 gen_rtx_IF_THEN_ELSE (VOIDmode,
3601 gen_rtx_NE (VOIDmode, cr1,
3603 gen_rtx_LABEL_REF (VOIDmode, lab),
3606 while (fregno <= FP_ARG_V4_MAX_REG)
3608 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3609 set_mem_alias_set (mem, set);
3610 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3619 /* Create the va_list data type. */
3622 rs6000_build_va_list ()
3624 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3626 /* For AIX, prefer 'char *' because that's what the system
3627 header files like. */
3628 if (DEFAULT_ABI != ABI_V4)
3629 return build_pointer_type (char_type_node);
3631 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3632 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3634 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3635 unsigned_char_type_node);
3636 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3637 unsigned_char_type_node);
3638 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3640 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3643 DECL_FIELD_CONTEXT (f_gpr) = record;
3644 DECL_FIELD_CONTEXT (f_fpr) = record;
3645 DECL_FIELD_CONTEXT (f_ovf) = record;
3646 DECL_FIELD_CONTEXT (f_sav) = record;
3648 TREE_CHAIN (record) = type_decl;
3649 TYPE_NAME (record) = type_decl;
3650 TYPE_FIELDS (record) = f_gpr;
3651 TREE_CHAIN (f_gpr) = f_fpr;
3652 TREE_CHAIN (f_fpr) = f_ovf;
3653 TREE_CHAIN (f_ovf) = f_sav;
3655 layout_type (record);
3657 /* The correct type is an array type of one element. */
3658 return build_array_type (record, build_index_type (size_zero_node));
3661 /* Implement va_start. */
3664 rs6000_va_start (valist, nextarg)
3668 HOST_WIDE_INT words, n_gpr, n_fpr;
3669 tree f_gpr, f_fpr, f_ovf, f_sav;
3670 tree gpr, fpr, ovf, sav, t;
3672 /* Only SVR4 needs something special. */
3673 if (DEFAULT_ABI != ABI_V4)
3675 std_expand_builtin_va_start (valist, nextarg);
3679 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3680 f_fpr = TREE_CHAIN (f_gpr);
3681 f_ovf = TREE_CHAIN (f_fpr);
3682 f_sav = TREE_CHAIN (f_ovf);
3684 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3685 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3686 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3687 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3688 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3690 /* Count number of gp and fp argument registers used. */
3691 words = current_function_args_info.words;
3692 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3693 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3695 if (TARGET_DEBUG_ARG)
3697 fputs ("va_start: words = ", stderr);
3698 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3699 fputs (", n_gpr = ", stderr);
3700 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3701 fputs (", n_fpr = ", stderr);
3702 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3703 putc ('\n', stderr);
3706 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3707 TREE_SIDE_EFFECTS (t) = 1;
3708 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3710 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3711 TREE_SIDE_EFFECTS (t) = 1;
3712 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3714 /* Find the overflow area. */
3715 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3717 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3718 build_int_2 (words * UNITS_PER_WORD, 0));
3719 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3720 TREE_SIDE_EFFECTS (t) = 1;
3721 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3723 /* Find the register save area. */
3724 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3725 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3726 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3727 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3728 TREE_SIDE_EFFECTS (t) = 1;
3729 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3732 /* Implement va_arg. */
3735 rs6000_va_arg (valist, type)
3738 tree f_gpr, f_fpr, f_ovf, f_sav;
3739 tree gpr, fpr, ovf, sav, reg, t, u;
3740 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3741 rtx lab_false, lab_over, addr_rtx, r;
3743 if (DEFAULT_ABI != ABI_V4)
3745 /* Variable sized types are passed by reference. */
3746 if (int_size_in_bytes (type) <= 0)
3748 u = build_pointer_type (type);
3750 /* Args grow upward. */
3751 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3752 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3753 TREE_SIDE_EFFECTS (t) = 1;
3755 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3756 TREE_SIDE_EFFECTS (t) = 1;
3758 t = build1 (INDIRECT_REF, u, t);
3759 TREE_SIDE_EFFECTS (t) = 1;
3761 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3764 return std_expand_builtin_va_arg (valist, type);
3767 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3768 f_fpr = TREE_CHAIN (f_gpr);
3769 f_ovf = TREE_CHAIN (f_fpr);
3770 f_sav = TREE_CHAIN (f_ovf);
3772 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3773 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3774 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3775 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3776 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3778 size = int_size_in_bytes (type);
3779 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3781 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3783 /* Aggregates and long doubles are passed by reference. */
3789 size = UNITS_PER_WORD;
3792 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3794 /* FP args go in FP registers, if present. */
3803 /* Otherwise into GP registers. */
3811 /* Pull the value out of the saved registers ... */
3813 lab_false = gen_label_rtx ();
3814 lab_over = gen_label_rtx ();
3815 addr_rtx = gen_reg_rtx (Pmode);
3817 /* AltiVec vectors never go in registers. */
3818 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3820 TREE_THIS_VOLATILE (reg) = 1;
3821 emit_cmp_and_jump_insns
3822 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3823 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3826 /* Long long is aligned in the registers. */
3829 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3830 build_int_2 (n_reg - 1, 0));
3831 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3832 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3833 TREE_SIDE_EFFECTS (u) = 1;
3834 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3838 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3842 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3843 build_int_2 (n_reg, 0));
3844 TREE_SIDE_EFFECTS (u) = 1;
3846 u = build1 (CONVERT_EXPR, integer_type_node, u);
3847 TREE_SIDE_EFFECTS (u) = 1;
3849 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3850 TREE_SIDE_EFFECTS (u) = 1;
3852 t = build (PLUS_EXPR, ptr_type_node, t, u);
3853 TREE_SIDE_EFFECTS (t) = 1;
3855 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3857 emit_move_insn (addr_rtx, r);
3859 emit_jump_insn (gen_jump (lab_over));
3863 emit_label (lab_false);
3865 /* ... otherwise out of the overflow area. */
3867 /* Make sure we don't find reg 7 for the next int arg.
3869 All AltiVec vectors go in the overflow area. So in the AltiVec
3870 case we need to get the vectors from the overflow area, but
3871 remember where the GPRs and FPRs are. */
3872 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3873 || !TARGET_ALTIVEC))
3875 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3876 TREE_SIDE_EFFECTS (t) = 1;
3877 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3880 /* Care for on-stack alignment if needed. */
3887 /* AltiVec vectors are 16 byte aligned. */
3888 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3893 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3894 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3898 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3900 emit_move_insn (addr_rtx, r);
3902 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3903 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3904 TREE_SIDE_EFFECTS (t) = 1;
3905 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3907 emit_label (lab_over);
3911 r = gen_rtx_MEM (Pmode, addr_rtx);
3912 set_mem_alias_set (r, get_varargs_alias_set ());
3913 emit_move_insn (addr_rtx, r);
3921 #define def_builtin(MASK, NAME, TYPE, CODE) \
3923 if ((MASK) & target_flags) \
3924 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3928 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3930 static const struct builtin_description bdesc_3arg[] =
3932 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3933 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3934 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3935 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3936 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3937 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3938 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3939 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3940 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3941 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3942 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3943 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3944 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3945 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3946 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3947 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3948 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3949 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3950 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3951 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3952 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3953 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3954 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3957 /* DST operations: void foo (void *, const int, const char). */
3959 static const struct builtin_description bdesc_dst[] =
3961 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3962 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3963 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3964 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3967 /* Simple binary operations: VECc = foo (VECa, VECb). */
3969 static struct builtin_description bdesc_2arg[] =
3971 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3972 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3973 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3974 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3975 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3976 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3977 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3978 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3979 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3980 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3981 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3982 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3983 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3984 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3985 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3986 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3987 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3988 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3989 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3990 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3991 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3992 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3993 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3994 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3995 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3996 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3997 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3998 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3999 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4000 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4001 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4002 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4003 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4004 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4005 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4006 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4007 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4008 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4009 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4010 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4011 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4012 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4013 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4014 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4015 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4016 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4017 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4018 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4019 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4020 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4021 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4022 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4023 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4024 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4025 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4026 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4027 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4028 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4029 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4030 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4031 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4032 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4033 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4034 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4035 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4036 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4037 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4038 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4039 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4040 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4041 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4042 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4043 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4044 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4045 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4046 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4047 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4048 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4049 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4050 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4051 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4052 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4053 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4054 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4055 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4056 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4057 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4058 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4059 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4060 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4061 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4062 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4063 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4064 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4065 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4066 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4067 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4068 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4069 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4070 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4071 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4072 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4073 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4074 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4075 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4076 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4077 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4078 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4079 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4080 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4081 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4082 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4083 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4085 /* Place holder, leave as first spe builtin. */
4086 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4087 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4088 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4089 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4090 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4091 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4092 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4093 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4094 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4095 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4096 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4097 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4098 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4099 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4100 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4101 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4102 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4103 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4104 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4105 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4106 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4107 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4108 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4109 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4110 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4111 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4112 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4113 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4114 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4115 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4116 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4117 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4118 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4119 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4120 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4121 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4122 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4123 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4124 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4125 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4126 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4127 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4128 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4129 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4130 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4131 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4132 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4133 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4134 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4135 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4136 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4137 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4138 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4139 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4140 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4141 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4142 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4143 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4144 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4145 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4146 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4147 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4148 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4149 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4150 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4151 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4152 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4153 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4154 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4155 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4156 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4157 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4158 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4159 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4160 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4161 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4162 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4163 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4164 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4165 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4166 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4167 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4168 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4169 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4170 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4171 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4172 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4173 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4174 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4175 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4176 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4177 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4178 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4179 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4180 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4181 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4182 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4183 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4184 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4185 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4186 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4187 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4188 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4189 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4190 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4191 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4192 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4193 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4194 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4196 /* SPE binary operations expecting a 5-bit unsigned literal. */
4197 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4199 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4200 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4201 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4202 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4203 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4204 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4205 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4206 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4207 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4208 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4209 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4210 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4211 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4212 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4213 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4214 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4215 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4216 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4217 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4218 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4219 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4220 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4221 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4222 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4223 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4224 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4226 /* Place-holder. Leave as last binary SPE builtin. */
4227 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4230 /* AltiVec predicates. */
4232 struct builtin_description_predicates
4234 const unsigned int mask;
4235 const enum insn_code icode;
4237 const char *const name;
4238 const enum rs6000_builtins code;
4241 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4243 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4244 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4245 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4246 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4247 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4248 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4249 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4250 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4251 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4252 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4253 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4254 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4255 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4258 /* SPE predicates. */
4259 static struct builtin_description bdesc_spe_predicates[] =
4261 /* Place-holder. Leave as first. */
4262 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4263 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4264 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4265 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4266 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4267 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4268 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4269 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4270 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4271 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4272 /* Place-holder. Leave as last. */
4273 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4276 /* SPE evsel predicates. */
4277 static struct builtin_description bdesc_spe_evsel[] =
4279 /* Place-holder. Leave as first. */
4280 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4281 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4282 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4283 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4284 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4285 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4286 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4287 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4288 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4289 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4290 /* Place-holder. Leave as last. */
4291 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4294 /* ABS* operations. */
4296 static const struct builtin_description bdesc_abs[] =
4298 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4299 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4300 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4301 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4302 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4303 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4304 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4307 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4310 static struct builtin_description bdesc_1arg[] =
4312 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4313 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4314 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4315 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4316 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4317 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4318 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4319 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4320 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4321 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4322 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4323 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4324 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4325 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4326 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4327 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4328 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4330 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4331 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4332 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4333 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4334 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4335 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4336 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4337 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4338 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4339 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4340 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4341 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4342 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4343 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4344 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4345 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4346 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4347 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4348 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4349 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4350 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4351 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4352 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4353 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4354 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4355 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4356 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4357 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4358 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4359 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4360 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4361 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4363 /* Place-holder. Leave as last unary SPE builtin. */
4364 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4368 rs6000_expand_unop_builtin (icode, arglist, target)
4369 enum insn_code icode;
4374 tree arg0 = TREE_VALUE (arglist);
4375 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4376 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4377 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4379 if (icode == CODE_FOR_nothing)
4380 /* Builtin not supported on this processor. */
4383 /* If we got invalid arguments bail out before generating bad rtl. */
4384 if (arg0 == error_mark_node)
4387 if (icode == CODE_FOR_altivec_vspltisb
4388 || icode == CODE_FOR_altivec_vspltish
4389 || icode == CODE_FOR_altivec_vspltisw
4390 || icode == CODE_FOR_spe_evsplatfi
4391 || icode == CODE_FOR_spe_evsplati)
4393 /* Only allow 5-bit *signed* literals. */
4394 if (GET_CODE (op0) != CONST_INT
4395 || INTVAL (op0) > 0x1f
4396 || INTVAL (op0) < -0x1f)
4398 error ("argument 1 must be a 5-bit signed literal");
4404 || GET_MODE (target) != tmode
4405 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4406 target = gen_reg_rtx (tmode);
4408 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4409 op0 = copy_to_mode_reg (mode0, op0);
4411 pat = GEN_FCN (icode) (target, op0);
4420 altivec_expand_abs_builtin (icode, arglist, target)
4421 enum insn_code icode;
4425 rtx pat, scratch1, scratch2;
4426 tree arg0 = TREE_VALUE (arglist);
4427 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4428 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4429 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4431 /* If we have invalid arguments, bail out before generating bad rtl. */
4432 if (arg0 == error_mark_node)
4436 || GET_MODE (target) != tmode
4437 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4438 target = gen_reg_rtx (tmode);
4440 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4441 op0 = copy_to_mode_reg (mode0, op0);
4443 scratch1 = gen_reg_rtx (mode0);
4444 scratch2 = gen_reg_rtx (mode0);
4446 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4455 rs6000_expand_binop_builtin (icode, arglist, target)
4456 enum insn_code icode;
4461 tree arg0 = TREE_VALUE (arglist);
4462 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4463 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4464 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4465 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4466 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4467 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4469 if (icode == CODE_FOR_nothing)
4470 /* Builtin not supported on this processor. */
4473 /* If we got invalid arguments bail out before generating bad rtl. */
4474 if (arg0 == error_mark_node || arg1 == error_mark_node)
4477 if (icode == CODE_FOR_altivec_vcfux
4478 || icode == CODE_FOR_altivec_vcfsx
4479 || icode == CODE_FOR_altivec_vctsxs
4480 || icode == CODE_FOR_altivec_vctuxs
4481 || icode == CODE_FOR_altivec_vspltb
4482 || icode == CODE_FOR_altivec_vsplth
4483 || icode == CODE_FOR_altivec_vspltw
4484 || icode == CODE_FOR_spe_evaddiw
4485 || icode == CODE_FOR_spe_evldd
4486 || icode == CODE_FOR_spe_evldh
4487 || icode == CODE_FOR_spe_evldw
4488 || icode == CODE_FOR_spe_evlhhesplat
4489 || icode == CODE_FOR_spe_evlhhossplat
4490 || icode == CODE_FOR_spe_evlhhousplat
4491 || icode == CODE_FOR_spe_evlwhe
4492 || icode == CODE_FOR_spe_evlwhos
4493 || icode == CODE_FOR_spe_evlwhou
4494 || icode == CODE_FOR_spe_evlwhsplat
4495 || icode == CODE_FOR_spe_evlwwsplat
4496 || icode == CODE_FOR_spe_evrlwi
4497 || icode == CODE_FOR_spe_evslwi
4498 || icode == CODE_FOR_spe_evsrwis
4499 || icode == CODE_FOR_spe_evsrwiu)
4501 /* Only allow 5-bit unsigned literals. */
4502 if (TREE_CODE (arg1) != INTEGER_CST
4503 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4505 error ("argument 2 must be a 5-bit unsigned literal");
4511 || GET_MODE (target) != tmode
4512 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4513 target = gen_reg_rtx (tmode);
4515 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4516 op0 = copy_to_mode_reg (mode0, op0);
4517 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4518 op1 = copy_to_mode_reg (mode1, op1);
4520 pat = GEN_FCN (icode) (target, op0, op1);
4529 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4530 enum insn_code icode;
4536 tree cr6_form = TREE_VALUE (arglist);
4537 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4538 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4539 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4540 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4541 enum machine_mode tmode = SImode;
4542 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4543 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4546 if (TREE_CODE (cr6_form) != INTEGER_CST)
4548 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4552 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4557 /* If we have invalid arguments, bail out before generating bad rtl. */
4558 if (arg0 == error_mark_node || arg1 == error_mark_node)
4562 || GET_MODE (target) != tmode
4563 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4564 target = gen_reg_rtx (tmode);
4566 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4567 op0 = copy_to_mode_reg (mode0, op0);
4568 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4569 op1 = copy_to_mode_reg (mode1, op1);
4571 scratch = gen_reg_rtx (mode0);
4573 pat = GEN_FCN (icode) (scratch, op0, op1,
4574 gen_rtx (SYMBOL_REF, Pmode, opcode));
4579 /* The vec_any* and vec_all* predicates use the same opcodes for two
4580 different operations, but the bits in CR6 will be different
4581 depending on what information we want. So we have to play tricks
4582 with CR6 to get the right bits out.
4584 If you think this is disgusting, look at the specs for the
4585 AltiVec predicates. */
4587 switch (cr6_form_int)
4590 emit_insn (gen_cr6_test_for_zero (target));
4593 emit_insn (gen_cr6_test_for_zero_reverse (target));
4596 emit_insn (gen_cr6_test_for_lt (target));
4599 emit_insn (gen_cr6_test_for_lt_reverse (target));
4602 error ("argument 1 of __builtin_altivec_predicate is out of range");
4610 altivec_expand_stv_builtin (icode, arglist)
4611 enum insn_code icode;
4614 tree arg0 = TREE_VALUE (arglist);
4615 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4616 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4617 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4618 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4619 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4621 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4622 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4623 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4625 /* Invalid arguments. Bail before doing anything stoopid! */
4626 if (arg0 == error_mark_node
4627 || arg1 == error_mark_node
4628 || arg2 == error_mark_node)
4631 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4632 op0 = copy_to_mode_reg (mode2, op0);
4633 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4634 op1 = copy_to_mode_reg (mode0, op1);
4635 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4636 op2 = copy_to_mode_reg (mode1, op2);
4638 pat = GEN_FCN (icode) (op1, op2, op0);
4645 rs6000_expand_ternop_builtin (icode, arglist, target)
4646 enum insn_code icode;
4651 tree arg0 = TREE_VALUE (arglist);
4652 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4653 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4654 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4655 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4656 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4657 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4658 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4659 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4660 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4662 if (icode == CODE_FOR_nothing)
4663 /* Builtin not supported on this processor. */
4666 /* If we got invalid arguments bail out before generating bad rtl. */
4667 if (arg0 == error_mark_node
4668 || arg1 == error_mark_node
4669 || arg2 == error_mark_node)
4672 if (icode == CODE_FOR_altivec_vsldoi_4sf
4673 || icode == CODE_FOR_altivec_vsldoi_4si
4674 || icode == CODE_FOR_altivec_vsldoi_8hi
4675 || icode == CODE_FOR_altivec_vsldoi_16qi)
4677 /* Only allow 4-bit unsigned literals. */
4678 if (TREE_CODE (arg2) != INTEGER_CST
4679 || TREE_INT_CST_LOW (arg2) & ~0xf)
4681 error ("argument 3 must be a 4-bit unsigned literal");
4687 || GET_MODE (target) != tmode
4688 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4689 target = gen_reg_rtx (tmode);
4691 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4692 op0 = copy_to_mode_reg (mode0, op0);
4693 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4694 op1 = copy_to_mode_reg (mode1, op1);
4695 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4696 op2 = copy_to_mode_reg (mode2, op2);
4698 pat = GEN_FCN (icode) (target, op0, op1, op2);
4706 /* Expand the lvx builtins. */
4708 altivec_expand_ld_builtin (exp, target, expandedp)
4713 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4714 tree arglist = TREE_OPERAND (exp, 1);
4715 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4717 enum machine_mode tmode, mode0;
4719 enum insn_code icode;
4723 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4724 icode = CODE_FOR_altivec_lvx_16qi;
4726 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4727 icode = CODE_FOR_altivec_lvx_8hi;
4729 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4730 icode = CODE_FOR_altivec_lvx_4si;
4732 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4733 icode = CODE_FOR_altivec_lvx_4sf;
4742 arg0 = TREE_VALUE (arglist);
4743 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4744 tmode = insn_data[icode].operand[0].mode;
4745 mode0 = insn_data[icode].operand[1].mode;
4748 || GET_MODE (target) != tmode
4749 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4750 target = gen_reg_rtx (tmode);
4752 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4753 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4755 pat = GEN_FCN (icode) (target, op0);
4762 /* Expand the stvx builtins. */
4764 altivec_expand_st_builtin (exp, target, expandedp)
4766 rtx target ATTRIBUTE_UNUSED;
4769 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4770 tree arglist = TREE_OPERAND (exp, 1);
4771 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4773 enum machine_mode mode0, mode1;
4775 enum insn_code icode;
4779 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4780 icode = CODE_FOR_altivec_stvx_16qi;
4782 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4783 icode = CODE_FOR_altivec_stvx_8hi;
4785 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4786 icode = CODE_FOR_altivec_stvx_4si;
4788 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4789 icode = CODE_FOR_altivec_stvx_4sf;
4796 arg0 = TREE_VALUE (arglist);
4797 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4798 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4799 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4800 mode0 = insn_data[icode].operand[0].mode;
4801 mode1 = insn_data[icode].operand[1].mode;
4803 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4804 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4805 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4806 op1 = copy_to_mode_reg (mode1, op1);
4808 pat = GEN_FCN (icode) (op0, op1);
4816 /* Expand the dst builtins. */
4818 altivec_expand_dst_builtin (exp, target, expandedp)
4820 rtx target ATTRIBUTE_UNUSED;
4823 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4824 tree arglist = TREE_OPERAND (exp, 1);
4825 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4826 tree arg0, arg1, arg2;
4827 enum machine_mode mode0, mode1, mode2;
4828 rtx pat, op0, op1, op2;
4829 struct builtin_description *d;
4834 /* Handle DST variants. */
4835 d = (struct builtin_description *) bdesc_dst;
4836 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4837 if (d->code == fcode)
4839 arg0 = TREE_VALUE (arglist);
4840 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4841 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4842 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4843 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4844 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4845 mode0 = insn_data[d->icode].operand[0].mode;
4846 mode1 = insn_data[d->icode].operand[1].mode;
4847 mode2 = insn_data[d->icode].operand[2].mode;
4849 /* Invalid arguments, bail out before generating bad rtl. */
4850 if (arg0 == error_mark_node
4851 || arg1 == error_mark_node
4852 || arg2 == error_mark_node)
4855 if (TREE_CODE (arg2) != INTEGER_CST
4856 || TREE_INT_CST_LOW (arg2) & ~0x3)
4858 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4862 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4863 op0 = copy_to_mode_reg (mode0, op0);
4864 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4865 op1 = copy_to_mode_reg (mode1, op1);
4867 pat = GEN_FCN (d->icode) (op0, op1, op2);
4878 /* Expand the builtin in EXP and store the result in TARGET. Store
4879 true in *EXPANDEDP if we found a builtin to expand. */
4881 altivec_expand_builtin (exp, target, expandedp)
4886 struct builtin_description *d;
4887 struct builtin_description_predicates *dp;
4889 enum insn_code icode;
4890 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4891 tree arglist = TREE_OPERAND (exp, 1);
4894 enum machine_mode tmode, mode0;
4895 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4897 target = altivec_expand_ld_builtin (exp, target, expandedp);
4901 target = altivec_expand_st_builtin (exp, target, expandedp);
4905 target = altivec_expand_dst_builtin (exp, target, expandedp);
4913 case ALTIVEC_BUILTIN_STVX:
4914 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4915 case ALTIVEC_BUILTIN_STVEBX:
4916 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4917 case ALTIVEC_BUILTIN_STVEHX:
4918 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4919 case ALTIVEC_BUILTIN_STVEWX:
4920 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4921 case ALTIVEC_BUILTIN_STVXL:
4922 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4924 case ALTIVEC_BUILTIN_MFVSCR:
4925 icode = CODE_FOR_altivec_mfvscr;
4926 tmode = insn_data[icode].operand[0].mode;
4929 || GET_MODE (target) != tmode
4930 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4931 target = gen_reg_rtx (tmode);
4933 pat = GEN_FCN (icode) (target);
4939 case ALTIVEC_BUILTIN_MTVSCR:
4940 icode = CODE_FOR_altivec_mtvscr;
4941 arg0 = TREE_VALUE (arglist);
4942 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4943 mode0 = insn_data[icode].operand[0].mode;
4945 /* If we got invalid arguments bail out before generating bad rtl. */
4946 if (arg0 == error_mark_node)
4949 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4950 op0 = copy_to_mode_reg (mode0, op0);
4952 pat = GEN_FCN (icode) (op0);
4957 case ALTIVEC_BUILTIN_DSSALL:
4958 emit_insn (gen_altivec_dssall ());
4961 case ALTIVEC_BUILTIN_DSS:
4962 icode = CODE_FOR_altivec_dss;
4963 arg0 = TREE_VALUE (arglist);
4964 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4965 mode0 = insn_data[icode].operand[0].mode;
4967 /* If we got invalid arguments bail out before generating bad rtl. */
4968 if (arg0 == error_mark_node)
4971 if (TREE_CODE (arg0) != INTEGER_CST
4972 || TREE_INT_CST_LOW (arg0) & ~0x3)
4974 error ("argument to dss must be a 2-bit unsigned literal");
4978 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4979 op0 = copy_to_mode_reg (mode0, op0);
4981 emit_insn (gen_altivec_dss (op0));
4985 /* Expand abs* operations. */
4986 d = (struct builtin_description *) bdesc_abs;
4987 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4988 if (d->code == fcode)
4989 return altivec_expand_abs_builtin (d->icode, arglist, target);
4991 /* Expand the AltiVec predicates. */
4992 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4993 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4994 if (dp->code == fcode)
4995 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4997 /* LV* are funky. We initialized them differently. */
5000 case ALTIVEC_BUILTIN_LVSL:
5001 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5003 case ALTIVEC_BUILTIN_LVSR:
5004 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5006 case ALTIVEC_BUILTIN_LVEBX:
5007 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5009 case ALTIVEC_BUILTIN_LVEHX:
5010 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5012 case ALTIVEC_BUILTIN_LVEWX:
5013 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5015 case ALTIVEC_BUILTIN_LVXL:
5016 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5018 case ALTIVEC_BUILTIN_LVX:
5019 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5030 /* Binops that need to be initialized manually, but can be expanded
5031 automagically by rs6000_expand_binop_builtin. */
5032 static struct builtin_description bdesc_2arg_spe[] =
5034 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5035 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5036 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5037 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5038 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5039 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5040 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5041 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5042 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5043 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5044 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5045 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5046 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5047 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5048 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5049 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5050 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5051 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5052 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5053 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5054 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5055 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5058 /* Expand the builtin in EXP and store the result in TARGET. Store
5059 true in *EXPANDEDP if we found a builtin to expand.
5061 This expands the SPE builtins that are not simple unary and binary
5064 spe_expand_builtin (exp, target, expandedp)
5069 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5070 tree arglist = TREE_OPERAND (exp, 1);
5072 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5073 enum insn_code icode;
5074 enum machine_mode tmode, mode0;
5076 struct builtin_description *d;
5081 /* Syntax check for a 5-bit unsigned immediate. */
5084 case SPE_BUILTIN_EVSTDD:
5085 case SPE_BUILTIN_EVSTDH:
5086 case SPE_BUILTIN_EVSTDW:
5087 case SPE_BUILTIN_EVSTWHE:
5088 case SPE_BUILTIN_EVSTWHO:
5089 case SPE_BUILTIN_EVSTWWE:
5090 case SPE_BUILTIN_EVSTWWO:
5091 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5092 if (TREE_CODE (arg1) != INTEGER_CST
5093 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5095 error ("argument 2 must be a 5-bit unsigned literal");
5103 d = (struct builtin_description *) bdesc_2arg_spe;
5104 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5105 if (d->code == fcode)
5106 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5108 d = (struct builtin_description *) bdesc_spe_predicates;
5109 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5110 if (d->code == fcode)
5111 return spe_expand_predicate_builtin (d->icode, arglist, target);
5113 d = (struct builtin_description *) bdesc_spe_evsel;
5114 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5115 if (d->code == fcode)
5116 return spe_expand_evsel_builtin (d->icode, arglist, target);
5120 case SPE_BUILTIN_EVSTDDX:
5121 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5122 case SPE_BUILTIN_EVSTDHX:
5123 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5124 case SPE_BUILTIN_EVSTDWX:
5125 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5126 case SPE_BUILTIN_EVSTWHEX:
5127 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5128 case SPE_BUILTIN_EVSTWHOX:
5129 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5130 case SPE_BUILTIN_EVSTWWEX:
5131 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5132 case SPE_BUILTIN_EVSTWWOX:
5133 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5134 case SPE_BUILTIN_EVSTDD:
5135 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5136 case SPE_BUILTIN_EVSTDH:
5137 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5138 case SPE_BUILTIN_EVSTDW:
5139 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5140 case SPE_BUILTIN_EVSTWHE:
5141 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5142 case SPE_BUILTIN_EVSTWHO:
5143 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5144 case SPE_BUILTIN_EVSTWWE:
5145 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5146 case SPE_BUILTIN_EVSTWWO:
5147 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5148 case SPE_BUILTIN_MFSPEFSCR:
5149 icode = CODE_FOR_spe_mfspefscr;
5150 tmode = insn_data[icode].operand[0].mode;
5153 || GET_MODE (target) != tmode
5154 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5155 target = gen_reg_rtx (tmode);
5157 pat = GEN_FCN (icode) (target);
5162 case SPE_BUILTIN_MTSPEFSCR:
5163 icode = CODE_FOR_spe_mtspefscr;
5164 arg0 = TREE_VALUE (arglist);
5165 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5166 mode0 = insn_data[icode].operand[0].mode;
5168 if (arg0 == error_mark_node)
5171 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5172 op0 = copy_to_mode_reg (mode0, op0);
5174 pat = GEN_FCN (icode) (op0);
5187 spe_expand_predicate_builtin (icode, arglist, target)
5188 enum insn_code icode;
5192 rtx pat, scratch, tmp;
5193 tree form = TREE_VALUE (arglist);
5194 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5195 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5196 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5197 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5198 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5199 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5203 if (TREE_CODE (form) != INTEGER_CST)
5205 error ("argument 1 of __builtin_spe_predicate must be a constant");
5209 form_int = TREE_INT_CST_LOW (form);
5214 if (arg0 == error_mark_node || arg1 == error_mark_node)
5218 || GET_MODE (target) != SImode
5219 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5220 target = gen_reg_rtx (SImode);
5222 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5223 op0 = copy_to_mode_reg (mode0, op0);
5224 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5225 op1 = copy_to_mode_reg (mode1, op1);
5227 scratch = gen_reg_rtx (CCmode);
5229 pat = GEN_FCN (icode) (scratch, op0, op1);
5234 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5235 _lower_. We use one compare, but look in different bits of the
5236 CR for each variant.
5238 There are 2 elements in each SPE simd type (upper/lower). The CR
5239 bits are set as follows:
5241 BIT0 | BIT 1 | BIT 2 | BIT 3
5242 U | L | (U | L) | (U & L)
5244 So, for an "all" relationship, BIT 3 would be set.
5245 For an "any" relationship, BIT 2 would be set. Etc.
5247 Following traditional nomenclature, these bits map to:
5249 BIT0 | BIT 1 | BIT 2 | BIT 3
5252 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5257 /* All variant. OV bit. */
5259 /* We need to get to the OV bit, which is the ORDERED bit. We
5260 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5261 that's ugly and will trigger a validate_condition_mode abort.
5262 So let's just use another pattern. */
5263 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5265 /* Any variant. EQ bit. */
5269 /* Upper variant. LT bit. */
5273 /* Lower variant. GT bit. */
5278 error ("argument 1 of __builtin_spe_predicate is out of range");
5282 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5283 emit_move_insn (target, tmp);
5288 /* The evsel builtins look like this:
5290 e = __builtin_spe_evsel_OP (a, b, c, d);
5294 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5295 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5299 spe_expand_evsel_builtin (icode, arglist, target)
5300 enum insn_code icode;
5305 tree arg0 = TREE_VALUE (arglist);
5306 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5307 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5308 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5309 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5310 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5311 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5312 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5313 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5314 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5319 if (arg0 == error_mark_node || arg1 == error_mark_node
5320 || arg2 == error_mark_node || arg3 == error_mark_node)
5324 || GET_MODE (target) != mode0
5325 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5326 target = gen_reg_rtx (mode0);
5328 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5329 op0 = copy_to_mode_reg (mode0, op0);
5330 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5331 op1 = copy_to_mode_reg (mode0, op1);
5332 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5333 op2 = copy_to_mode_reg (mode0, op2);
5334 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5335 op3 = copy_to_mode_reg (mode0, op3);
5337 /* Generate the compare. */
5338 scratch = gen_reg_rtx (CCmode);
5339 pat = GEN_FCN (icode) (scratch, op0, op1);
5344 if (mode0 == V2SImode)
5345 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5347 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5352 /* Expand an expression EXP that calls a built-in function,
5353 with result going to TARGET if that's convenient
5354 (and in mode MODE if that's convenient).
5355 SUBTARGET may be used as the target for computing one of EXP's operands.
5356 IGNORE is nonzero if the value is to be ignored. */
5359 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5362 rtx subtarget ATTRIBUTE_UNUSED;
5363 enum machine_mode mode ATTRIBUTE_UNUSED;
5364 int ignore ATTRIBUTE_UNUSED;
5366 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5367 tree arglist = TREE_OPERAND (exp, 1);
5368 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5369 struct builtin_description *d;
5376 ret = altivec_expand_builtin (exp, target, &success);
5383 ret = spe_expand_builtin (exp, target, &success);
5389 if (TARGET_ALTIVEC || TARGET_SPE)
5391 /* Handle simple unary operations. */
5392 d = (struct builtin_description *) bdesc_1arg;
5393 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5394 if (d->code == fcode)
5395 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5397 /* Handle simple binary operations. */
5398 d = (struct builtin_description *) bdesc_2arg;
5399 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5400 if (d->code == fcode)
5401 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5403 /* Handle simple ternary operations. */
5404 d = (struct builtin_description *) bdesc_3arg;
5405 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5406 if (d->code == fcode)
5407 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5415 rs6000_init_builtins ()
5418 spe_init_builtins ();
5420 altivec_init_builtins ();
5421 if (TARGET_ALTIVEC || TARGET_SPE)
5422 rs6000_common_init_builtins ();
5425 /* Search through a set of builtins and enable the mask bits.
5426 DESC is an array of builtins.
5427 SIZE is the total number of builtins.
5428 START is the builtin enum at which to start.
5429 END is the builtin enum at which to end. */
5431 enable_mask_for_builtins (desc, size, start, end)
5432 struct builtin_description *desc;
5434 enum rs6000_builtins start, end;
5438 for (i = 0; i < size; ++i)
5439 if (desc[i].code == start)
5445 for (; i < size; ++i)
5447 /* Flip all the bits on. */
5448 desc[i].mask = target_flags;
5449 if (desc[i].code == end)
5455 spe_init_builtins ()
5457 tree endlink = void_list_node;
5458 tree puint_type_node = build_pointer_type (unsigned_type_node);
5459 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5460 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5461 struct builtin_description *d;
5464 tree v2si_ftype_4_v2si
5465 = build_function_type
5467 tree_cons (NULL_TREE, V2SI_type_node,
5468 tree_cons (NULL_TREE, V2SI_type_node,
5469 tree_cons (NULL_TREE, V2SI_type_node,
5470 tree_cons (NULL_TREE, V2SI_type_node,
5473 tree v2sf_ftype_4_v2sf
5474 = build_function_type
5476 tree_cons (NULL_TREE, V2SF_type_node,
5477 tree_cons (NULL_TREE, V2SF_type_node,
5478 tree_cons (NULL_TREE, V2SF_type_node,
5479 tree_cons (NULL_TREE, V2SF_type_node,
5482 tree int_ftype_int_v2si_v2si
5483 = build_function_type
5485 tree_cons (NULL_TREE, integer_type_node,
5486 tree_cons (NULL_TREE, V2SI_type_node,
5487 tree_cons (NULL_TREE, V2SI_type_node,
5490 tree int_ftype_int_v2sf_v2sf
5491 = build_function_type
5493 tree_cons (NULL_TREE, integer_type_node,
5494 tree_cons (NULL_TREE, V2SF_type_node,
5495 tree_cons (NULL_TREE, V2SF_type_node,
5498 tree void_ftype_v2si_puint_int
5499 = build_function_type (void_type_node,
5500 tree_cons (NULL_TREE, V2SI_type_node,
5501 tree_cons (NULL_TREE, puint_type_node,
5502 tree_cons (NULL_TREE,
5506 tree void_ftype_v2si_puint_char
5507 = build_function_type (void_type_node,
5508 tree_cons (NULL_TREE, V2SI_type_node,
5509 tree_cons (NULL_TREE, puint_type_node,
5510 tree_cons (NULL_TREE,
5514 tree void_ftype_v2si_pv2si_int
5515 = build_function_type (void_type_node,
5516 tree_cons (NULL_TREE, V2SI_type_node,
5517 tree_cons (NULL_TREE, pv2si_type_node,
5518 tree_cons (NULL_TREE,
5522 tree void_ftype_v2si_pv2si_char
5523 = build_function_type (void_type_node,
5524 tree_cons (NULL_TREE, V2SI_type_node,
5525 tree_cons (NULL_TREE, pv2si_type_node,
5526 tree_cons (NULL_TREE,
5531 = build_function_type (void_type_node,
5532 tree_cons (NULL_TREE, integer_type_node, endlink));
5535 = build_function_type (integer_type_node,
5536 tree_cons (NULL_TREE, void_type_node, endlink));
5538 tree v2si_ftype_pv2si_int
5539 = build_function_type (V2SI_type_node,
5540 tree_cons (NULL_TREE, pv2si_type_node,
5541 tree_cons (NULL_TREE, integer_type_node,
5544 tree v2si_ftype_puint_int
5545 = build_function_type (V2SI_type_node,
5546 tree_cons (NULL_TREE, puint_type_node,
5547 tree_cons (NULL_TREE, integer_type_node,
5550 tree v2si_ftype_pushort_int
5551 = build_function_type (V2SI_type_node,
5552 tree_cons (NULL_TREE, pushort_type_node,
5553 tree_cons (NULL_TREE, integer_type_node,
5556 /* The initialization of the simple binary and unary builtins is
5557 done in rs6000_common_init_builtins, but we have to enable the
5558 mask bits here manually because we have run out of `target_flags'
5559 bits. We really need to redesign this mask business. */
5561 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5562 ARRAY_SIZE (bdesc_2arg),
5565 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5566 ARRAY_SIZE (bdesc_1arg),
5568 SPE_BUILTIN_EVSUBFUSIAAW);
5569 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5570 ARRAY_SIZE (bdesc_spe_predicates),
5571 SPE_BUILTIN_EVCMPEQ,
5572 SPE_BUILTIN_EVFSTSTLT);
5573 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5574 ARRAY_SIZE (bdesc_spe_evsel),
5575 SPE_BUILTIN_EVSEL_CMPGTS,
5576 SPE_BUILTIN_EVSEL_FSTSTEQ);
5578 /* Initialize irregular SPE builtins. */
5580 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5581 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5582 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5583 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5584 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5585 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5586 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5587 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5588 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5589 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5590 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5591 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5592 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5593 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5594 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5595 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5598 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5599 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5600 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5601 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5602 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5603 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5604 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5605 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5606 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5607 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5608 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5609 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5610 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5611 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5612 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5613 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5614 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5615 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5616 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5617 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5618 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5619 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5622 d = (struct builtin_description *) bdesc_spe_predicates;
5623 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5627 switch (insn_data[d->icode].operand[1].mode)
5630 type = int_ftype_int_v2si_v2si;
5633 type = int_ftype_int_v2sf_v2sf;
5639 def_builtin (d->mask, d->name, type, d->code);
5642 /* Evsel predicates. */
5643 d = (struct builtin_description *) bdesc_spe_evsel;
5644 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5648 switch (insn_data[d->icode].operand[1].mode)
5651 type = v2si_ftype_4_v2si;
5654 type = v2sf_ftype_4_v2sf;
5660 def_builtin (d->mask, d->name, type, d->code);
5665 altivec_init_builtins ()
5667 struct builtin_description *d;
5668 struct builtin_description_predicates *dp;
5670 tree pfloat_type_node = build_pointer_type (float_type_node);
5671 tree pint_type_node = build_pointer_type (integer_type_node);
5672 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5673 tree pchar_type_node = build_pointer_type (char_type_node);
5675 tree pvoid_type_node = build_pointer_type (void_type_node);
5677 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5678 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5679 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5680 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5682 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5684 tree int_ftype_int_v4si_v4si
5685 = build_function_type_list (integer_type_node,
5686 integer_type_node, V4SI_type_node,
5687 V4SI_type_node, NULL_TREE);
5688 tree v4sf_ftype_pcfloat
5689 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5690 tree void_ftype_pfloat_v4sf
5691 = build_function_type_list (void_type_node,
5692 pfloat_type_node, V4SF_type_node, NULL_TREE);
5693 tree v4si_ftype_pcint
5694 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5695 tree void_ftype_pint_v4si
5696 = build_function_type_list (void_type_node,
5697 pint_type_node, V4SI_type_node, NULL_TREE);
5698 tree v8hi_ftype_pcshort
5699 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5700 tree void_ftype_pshort_v8hi
5701 = build_function_type_list (void_type_node,
5702 pshort_type_node, V8HI_type_node, NULL_TREE);
5703 tree v16qi_ftype_pcchar
5704 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5705 tree void_ftype_pchar_v16qi
5706 = build_function_type_list (void_type_node,
5707 pchar_type_node, V16QI_type_node, NULL_TREE);
5708 tree void_ftype_v4si
5709 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5710 tree v8hi_ftype_void
5711 = build_function_type (V8HI_type_node, void_list_node);
5712 tree void_ftype_void
5713 = build_function_type (void_type_node, void_list_node);
5715 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5717 tree v16qi_ftype_int_pcvoid
5718 = build_function_type_list (V16QI_type_node,
5719 integer_type_node, pcvoid_type_node, NULL_TREE);
5720 tree v8hi_ftype_int_pcvoid
5721 = build_function_type_list (V8HI_type_node,
5722 integer_type_node, pcvoid_type_node, NULL_TREE);
5723 tree v4si_ftype_int_pcvoid
5724 = build_function_type_list (V4SI_type_node,
5725 integer_type_node, pcvoid_type_node, NULL_TREE);
5727 tree void_ftype_v4si_int_pvoid
5728 = build_function_type_list (void_type_node,
5729 V4SI_type_node, integer_type_node,
5730 pvoid_type_node, NULL_TREE);
5731 tree void_ftype_v16qi_int_pvoid
5732 = build_function_type_list (void_type_node,
5733 V16QI_type_node, integer_type_node,
5734 pvoid_type_node, NULL_TREE);
5735 tree void_ftype_v8hi_int_pvoid
5736 = build_function_type_list (void_type_node,
5737 V8HI_type_node, integer_type_node,
5738 pvoid_type_node, NULL_TREE);
5739 tree int_ftype_int_v8hi_v8hi
5740 = build_function_type_list (integer_type_node,
5741 integer_type_node, V8HI_type_node,
5742 V8HI_type_node, NULL_TREE);
5743 tree int_ftype_int_v16qi_v16qi
5744 = build_function_type_list (integer_type_node,
5745 integer_type_node, V16QI_type_node,
5746 V16QI_type_node, NULL_TREE);
5747 tree int_ftype_int_v4sf_v4sf
5748 = build_function_type_list (integer_type_node,
5749 integer_type_node, V4SF_type_node,
5750 V4SF_type_node, NULL_TREE);
5751 tree v4si_ftype_v4si
5752 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5753 tree v8hi_ftype_v8hi
5754 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5755 tree v16qi_ftype_v16qi
5756 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5757 tree v4sf_ftype_v4sf
5758 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5759 tree void_ftype_pcvoid_int_char
5760 = build_function_type_list (void_type_node,
5761 pcvoid_type_node, integer_type_node,
5762 char_type_node, NULL_TREE);
5764 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5765 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5766 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5767 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5768 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5769 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5770 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5771 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5772 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5773 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5774 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5775 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5776 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5777 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5778 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5779 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5780 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5781 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5782 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5783 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5784 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5785 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5786 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5787 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5788 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5789 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5790 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5791 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5792 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5793 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5794 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5795 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5797 /* Add the DST variants. */
5798 d = (struct builtin_description *) bdesc_dst;
5799 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5800 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5802 /* Initialize the predicates. */
5803 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5804 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5806 enum machine_mode mode1;
5809 mode1 = insn_data[dp->icode].operand[1].mode;
5814 type = int_ftype_int_v4si_v4si;
5817 type = int_ftype_int_v8hi_v8hi;
5820 type = int_ftype_int_v16qi_v16qi;
5823 type = int_ftype_int_v4sf_v4sf;
5829 def_builtin (dp->mask, dp->name, type, dp->code);
5832 /* Initialize the abs* operators. */
5833 d = (struct builtin_description *) bdesc_abs;
5834 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5836 enum machine_mode mode0;
5839 mode0 = insn_data[d->icode].operand[0].mode;
5844 type = v4si_ftype_v4si;
5847 type = v8hi_ftype_v8hi;
5850 type = v16qi_ftype_v16qi;
5853 type = v4sf_ftype_v4sf;
5859 def_builtin (d->mask, d->name, type, d->code);
5864 rs6000_common_init_builtins ()
5866 struct builtin_description *d;
5869 tree v4sf_ftype_v4sf_v4sf_v16qi
5870 = build_function_type_list (V4SF_type_node,
5871 V4SF_type_node, V4SF_type_node,
5872 V16QI_type_node, NULL_TREE);
5873 tree v4si_ftype_v4si_v4si_v16qi
5874 = build_function_type_list (V4SI_type_node,
5875 V4SI_type_node, V4SI_type_node,
5876 V16QI_type_node, NULL_TREE);
5877 tree v8hi_ftype_v8hi_v8hi_v16qi
5878 = build_function_type_list (V8HI_type_node,
5879 V8HI_type_node, V8HI_type_node,
5880 V16QI_type_node, NULL_TREE);
5881 tree v16qi_ftype_v16qi_v16qi_v16qi
5882 = build_function_type_list (V16QI_type_node,
5883 V16QI_type_node, V16QI_type_node,
5884 V16QI_type_node, NULL_TREE);
5885 tree v4si_ftype_char
5886 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5887 tree v8hi_ftype_char
5888 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5889 tree v16qi_ftype_char
5890 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5891 tree v8hi_ftype_v16qi
5892 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5893 tree v4sf_ftype_v4sf
5894 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5896 tree v2si_ftype_v2si_v2si
5897 = build_function_type_list (V2SI_type_node,
5898 V2SI_type_node, V2SI_type_node, NULL_TREE);
5900 tree v2sf_ftype_v2sf_v2sf
5901 = build_function_type_list (V2SF_type_node,
5902 V2SF_type_node, V2SF_type_node, NULL_TREE);
5904 tree v2si_ftype_int_int
5905 = build_function_type_list (V2SI_type_node,
5906 integer_type_node, integer_type_node,
5909 tree v2si_ftype_v2si
5910 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5912 tree v2sf_ftype_v2sf
5913 = build_function_type_list (V2SF_type_node,
5914 V2SF_type_node, NULL_TREE);
5916 tree v2sf_ftype_v2si
5917 = build_function_type_list (V2SF_type_node,
5918 V2SI_type_node, NULL_TREE);
5920 tree v2si_ftype_v2sf
5921 = build_function_type_list (V2SI_type_node,
5922 V2SF_type_node, NULL_TREE);
5924 tree v2si_ftype_v2si_char
5925 = build_function_type_list (V2SI_type_node,
5926 V2SI_type_node, char_type_node, NULL_TREE);
5928 tree v2si_ftype_int_char
5929 = build_function_type_list (V2SI_type_node,
5930 integer_type_node, char_type_node, NULL_TREE);
5932 tree v2si_ftype_char
5933 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5935 tree int_ftype_int_int
5936 = build_function_type_list (integer_type_node,
5937 integer_type_node, integer_type_node,
5940 tree v4si_ftype_v4si_v4si
5941 = build_function_type_list (V4SI_type_node,
5942 V4SI_type_node, V4SI_type_node, NULL_TREE);
5943 tree v4sf_ftype_v4si_char
5944 = build_function_type_list (V4SF_type_node,
5945 V4SI_type_node, char_type_node, NULL_TREE);
5946 tree v4si_ftype_v4sf_char
5947 = build_function_type_list (V4SI_type_node,
5948 V4SF_type_node, char_type_node, NULL_TREE);
5949 tree v4si_ftype_v4si_char
5950 = build_function_type_list (V4SI_type_node,
5951 V4SI_type_node, char_type_node, NULL_TREE);
5952 tree v8hi_ftype_v8hi_char
5953 = build_function_type_list (V8HI_type_node,
5954 V8HI_type_node, char_type_node, NULL_TREE);
5955 tree v16qi_ftype_v16qi_char
5956 = build_function_type_list (V16QI_type_node,
5957 V16QI_type_node, char_type_node, NULL_TREE);
5958 tree v16qi_ftype_v16qi_v16qi_char
5959 = build_function_type_list (V16QI_type_node,
5960 V16QI_type_node, V16QI_type_node,
5961 char_type_node, NULL_TREE);
5962 tree v8hi_ftype_v8hi_v8hi_char
5963 = build_function_type_list (V8HI_type_node,
5964 V8HI_type_node, V8HI_type_node,
5965 char_type_node, NULL_TREE);
5966 tree v4si_ftype_v4si_v4si_char
5967 = build_function_type_list (V4SI_type_node,
5968 V4SI_type_node, V4SI_type_node,
5969 char_type_node, NULL_TREE);
5970 tree v4sf_ftype_v4sf_v4sf_char
5971 = build_function_type_list (V4SF_type_node,
5972 V4SF_type_node, V4SF_type_node,
5973 char_type_node, NULL_TREE);
5974 tree v4sf_ftype_v4sf_v4sf
5975 = build_function_type_list (V4SF_type_node,
5976 V4SF_type_node, V4SF_type_node, NULL_TREE);
5977 tree v4sf_ftype_v4sf_v4sf_v4si
5978 = build_function_type_list (V4SF_type_node,
5979 V4SF_type_node, V4SF_type_node,
5980 V4SI_type_node, NULL_TREE);
5981 tree v4sf_ftype_v4sf_v4sf_v4sf
5982 = build_function_type_list (V4SF_type_node,
5983 V4SF_type_node, V4SF_type_node,
5984 V4SF_type_node, NULL_TREE);
5985 tree v4si_ftype_v4si_v4si_v4si
5986 = build_function_type_list (V4SI_type_node,
5987 V4SI_type_node, V4SI_type_node,
5988 V4SI_type_node, NULL_TREE);
5989 tree v8hi_ftype_v8hi_v8hi
5990 = build_function_type_list (V8HI_type_node,
5991 V8HI_type_node, V8HI_type_node, NULL_TREE);
5992 tree v8hi_ftype_v8hi_v8hi_v8hi
5993 = build_function_type_list (V8HI_type_node,
5994 V8HI_type_node, V8HI_type_node,
5995 V8HI_type_node, NULL_TREE);
5996 tree v4si_ftype_v8hi_v8hi_v4si
5997 = build_function_type_list (V4SI_type_node,
5998 V8HI_type_node, V8HI_type_node,
5999 V4SI_type_node, NULL_TREE);
6000 tree v4si_ftype_v16qi_v16qi_v4si
6001 = build_function_type_list (V4SI_type_node,
6002 V16QI_type_node, V16QI_type_node,
6003 V4SI_type_node, NULL_TREE);
6004 tree v16qi_ftype_v16qi_v16qi
6005 = build_function_type_list (V16QI_type_node,
6006 V16QI_type_node, V16QI_type_node, NULL_TREE);
6007 tree v4si_ftype_v4sf_v4sf
6008 = build_function_type_list (V4SI_type_node,
6009 V4SF_type_node, V4SF_type_node, NULL_TREE);
6010 tree v8hi_ftype_v16qi_v16qi
6011 = build_function_type_list (V8HI_type_node,
6012 V16QI_type_node, V16QI_type_node, NULL_TREE);
6013 tree v4si_ftype_v8hi_v8hi
6014 = build_function_type_list (V4SI_type_node,
6015 V8HI_type_node, V8HI_type_node, NULL_TREE);
6016 tree v8hi_ftype_v4si_v4si
6017 = build_function_type_list (V8HI_type_node,
6018 V4SI_type_node, V4SI_type_node, NULL_TREE);
6019 tree v16qi_ftype_v8hi_v8hi
6020 = build_function_type_list (V16QI_type_node,
6021 V8HI_type_node, V8HI_type_node, NULL_TREE);
6022 tree v4si_ftype_v16qi_v4si
6023 = build_function_type_list (V4SI_type_node,
6024 V16QI_type_node, V4SI_type_node, NULL_TREE);
6025 tree v4si_ftype_v16qi_v16qi
6026 = build_function_type_list (V4SI_type_node,
6027 V16QI_type_node, V16QI_type_node, NULL_TREE);
6028 tree v4si_ftype_v8hi_v4si
6029 = build_function_type_list (V4SI_type_node,
6030 V8HI_type_node, V4SI_type_node, NULL_TREE);
6031 tree v4si_ftype_v8hi
6032 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6033 tree int_ftype_v4si_v4si
6034 = build_function_type_list (integer_type_node,
6035 V4SI_type_node, V4SI_type_node, NULL_TREE);
6036 tree int_ftype_v4sf_v4sf
6037 = build_function_type_list (integer_type_node,
6038 V4SF_type_node, V4SF_type_node, NULL_TREE);
6039 tree int_ftype_v16qi_v16qi
6040 = build_function_type_list (integer_type_node,
6041 V16QI_type_node, V16QI_type_node, NULL_TREE);
6042 tree int_ftype_v8hi_v8hi
6043 = build_function_type_list (integer_type_node,
6044 V8HI_type_node, V8HI_type_node, NULL_TREE);
6046 /* Add the simple ternary operators. */
6047 d = (struct builtin_description *) bdesc_3arg;
6048 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6051 enum machine_mode mode0, mode1, mode2, mode3;
6054 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6057 mode0 = insn_data[d->icode].operand[0].mode;
6058 mode1 = insn_data[d->icode].operand[1].mode;
6059 mode2 = insn_data[d->icode].operand[2].mode;
6060 mode3 = insn_data[d->icode].operand[3].mode;
6062 /* When all four are of the same mode. */
6063 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6068 type = v4si_ftype_v4si_v4si_v4si;
6071 type = v4sf_ftype_v4sf_v4sf_v4sf;
6074 type = v8hi_ftype_v8hi_v8hi_v8hi;
6077 type = v16qi_ftype_v16qi_v16qi_v16qi;
6083 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6088 type = v4si_ftype_v4si_v4si_v16qi;
6091 type = v4sf_ftype_v4sf_v4sf_v16qi;
6094 type = v8hi_ftype_v8hi_v8hi_v16qi;
6097 type = v16qi_ftype_v16qi_v16qi_v16qi;
6103 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6104 && mode3 == V4SImode)
6105 type = v4si_ftype_v16qi_v16qi_v4si;
6106 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6107 && mode3 == V4SImode)
6108 type = v4si_ftype_v8hi_v8hi_v4si;
6109 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6110 && mode3 == V4SImode)
6111 type = v4sf_ftype_v4sf_v4sf_v4si;
6113 /* vchar, vchar, vchar, 4 bit literal. */
6114 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6116 type = v16qi_ftype_v16qi_v16qi_char;
6118 /* vshort, vshort, vshort, 4 bit literal. */
6119 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6121 type = v8hi_ftype_v8hi_v8hi_char;
6123 /* vint, vint, vint, 4 bit literal. */
6124 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6126 type = v4si_ftype_v4si_v4si_char;
6128 /* vfloat, vfloat, vfloat, 4 bit literal. */
6129 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6131 type = v4sf_ftype_v4sf_v4sf_char;
6136 def_builtin (d->mask, d->name, type, d->code);
6139 /* Add the simple binary operators. */
6140 d = (struct builtin_description *) bdesc_2arg;
6141 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6143 enum machine_mode mode0, mode1, mode2;
6146 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6149 mode0 = insn_data[d->icode].operand[0].mode;
6150 mode1 = insn_data[d->icode].operand[1].mode;
6151 mode2 = insn_data[d->icode].operand[2].mode;
6153 /* When all three operands are of the same mode. */
6154 if (mode0 == mode1 && mode1 == mode2)
6159 type = v4sf_ftype_v4sf_v4sf;
6162 type = v4si_ftype_v4si_v4si;
6165 type = v16qi_ftype_v16qi_v16qi;
6168 type = v8hi_ftype_v8hi_v8hi;
6171 type = v2si_ftype_v2si_v2si;
6174 type = v2sf_ftype_v2sf_v2sf;
6177 type = int_ftype_int_int;
6184 /* A few other combos we really don't want to do manually. */
6186 /* vint, vfloat, vfloat. */
6187 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6188 type = v4si_ftype_v4sf_v4sf;
6190 /* vshort, vchar, vchar. */
6191 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6192 type = v8hi_ftype_v16qi_v16qi;
6194 /* vint, vshort, vshort. */
6195 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6196 type = v4si_ftype_v8hi_v8hi;
6198 /* vshort, vint, vint. */
6199 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6200 type = v8hi_ftype_v4si_v4si;
6202 /* vchar, vshort, vshort. */
6203 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6204 type = v16qi_ftype_v8hi_v8hi;
6206 /* vint, vchar, vint. */
6207 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6208 type = v4si_ftype_v16qi_v4si;
6210 /* vint, vchar, vchar. */
6211 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6212 type = v4si_ftype_v16qi_v16qi;
6214 /* vint, vshort, vint. */
6215 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6216 type = v4si_ftype_v8hi_v4si;
6218 /* vint, vint, 5 bit literal. */
6219 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6220 type = v4si_ftype_v4si_char;
6222 /* vshort, vshort, 5 bit literal. */
6223 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6224 type = v8hi_ftype_v8hi_char;
6226 /* vchar, vchar, 5 bit literal. */
6227 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6228 type = v16qi_ftype_v16qi_char;
6230 /* vfloat, vint, 5 bit literal. */
6231 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6232 type = v4sf_ftype_v4si_char;
6234 /* vint, vfloat, 5 bit literal. */
6235 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6236 type = v4si_ftype_v4sf_char;
6238 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6239 type = v2si_ftype_int_int;
6241 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6242 type = v2si_ftype_v2si_char;
6244 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6245 type = v2si_ftype_int_char;
6248 else if (mode0 == SImode)
6253 type = int_ftype_v4si_v4si;
6256 type = int_ftype_v4sf_v4sf;
6259 type = int_ftype_v16qi_v16qi;
6262 type = int_ftype_v8hi_v8hi;
6272 def_builtin (d->mask, d->name, type, d->code);
6275 /* Add the simple unary operators. */
6276 d = (struct builtin_description *) bdesc_1arg;
6277 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6279 enum machine_mode mode0, mode1;
6282 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6285 mode0 = insn_data[d->icode].operand[0].mode;
6286 mode1 = insn_data[d->icode].operand[1].mode;
6288 if (mode0 == V4SImode && mode1 == QImode)
6289 type = v4si_ftype_char;
6290 else if (mode0 == V8HImode && mode1 == QImode)
6291 type = v8hi_ftype_char;
6292 else if (mode0 == V16QImode && mode1 == QImode)
6293 type = v16qi_ftype_char;
6294 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6295 type = v4sf_ftype_v4sf;
6296 else if (mode0 == V8HImode && mode1 == V16QImode)
6297 type = v8hi_ftype_v16qi;
6298 else if (mode0 == V4SImode && mode1 == V8HImode)
6299 type = v4si_ftype_v8hi;
6300 else if (mode0 == V2SImode && mode1 == V2SImode)
6301 type = v2si_ftype_v2si;
6302 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6303 type = v2sf_ftype_v2sf;
6304 else if (mode0 == V2SFmode && mode1 == V2SImode)
6305 type = v2sf_ftype_v2si;
6306 else if (mode0 == V2SImode && mode1 == V2SFmode)
6307 type = v2si_ftype_v2sf;
6308 else if (mode0 == V2SImode && mode1 == QImode)
6309 type = v2si_ftype_char;
6313 def_builtin (d->mask, d->name, type, d->code);
6318 /* Expand a block move operation, and return 1 if successful. Return 0
6319 if we should let the compiler generate normal code.
6321 operands[0] is the destination
6322 operands[1] is the source
6323 operands[2] is the length
6324 operands[3] is the alignment */
6326 #define MAX_MOVE_REG 4
6329 expand_block_move (operands)
6332 rtx orig_dest = operands[0];
6333 rtx orig_src = operands[1];
6334 rtx bytes_rtx = operands[2];
6335 rtx align_rtx = operands[3];
6336 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6342 /* If this is not a fixed size move, just call memcpy */
6346 /* If this is not a fixed size alignment, abort */
6347 if (GET_CODE (align_rtx) != CONST_INT)
6349 align = INTVAL (align_rtx);
6351 /* Anything to move? */
6352 bytes = INTVAL (bytes_rtx);
6356 /* store_one_arg depends on expand_block_move to handle at least the size of
6357 reg_parm_stack_space. */
6358 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6361 if (TARGET_STRING) /* string instructions are available */
6363 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6366 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6367 rtx (*mov) PARAMS ((rtx, rtx));
6369 enum machine_mode mode = BLKmode;
6372 if (bytes > 24 /* move up to 32 bytes at a time */
6380 && ! fixed_regs[12])
6382 move_bytes = (bytes > 32) ? 32 : bytes;
6383 gen_func.movstrsi = gen_movstrsi_8reg;
6385 else if (bytes > 16 /* move up to 24 bytes at a time */
6391 && ! fixed_regs[10])
6393 move_bytes = (bytes > 24) ? 24 : bytes;
6394 gen_func.movstrsi = gen_movstrsi_6reg;
6396 else if (bytes > 8 /* move up to 16 bytes at a time */
6402 move_bytes = (bytes > 16) ? 16 : bytes;
6403 gen_func.movstrsi = gen_movstrsi_4reg;
6405 else if (bytes >= 8 && TARGET_POWERPC64
6406 /* 64-bit loads and stores require word-aligned
6408 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6412 gen_func.mov = gen_movdi;
6414 else if (bytes > 4 && !TARGET_POWERPC64)
6415 { /* move up to 8 bytes at a time */
6416 move_bytes = (bytes > 8) ? 8 : bytes;
6417 gen_func.movstrsi = gen_movstrsi_2reg;
6419 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6420 { /* move 4 bytes */
6423 gen_func.mov = gen_movsi;
6425 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6426 { /* move 2 bytes */
6429 gen_func.mov = gen_movhi;
6431 else if (bytes == 1) /* move 1 byte */
6435 gen_func.mov = gen_movqi;
6438 { /* move up to 4 bytes at a time */
6439 move_bytes = (bytes > 4) ? 4 : bytes;
6440 gen_func.movstrsi = gen_movstrsi_1reg;
6443 src = adjust_address (orig_src, mode, offset);
6444 dest = adjust_address (orig_dest, mode, offset);
6446 if (mode == BLKmode)
6448 /* Move the address into scratch registers. The movstrsi
6449 patterns require zero offset. */
6450 if (!REG_P (XEXP (src, 0)))
6452 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6453 src = replace_equiv_address (src, src_reg);
6455 set_mem_size (src, GEN_INT (move_bytes));
6457 if (!REG_P (XEXP (dest, 0)))
6459 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6460 dest = replace_equiv_address (dest, dest_reg);
6462 set_mem_size (dest, GEN_INT (move_bytes));
6464 emit_insn ((*gen_func.movstrsi) (dest, src,
6465 GEN_INT (move_bytes & 31),
6470 rtx tmp_reg = gen_reg_rtx (mode);
6472 emit_insn ((*gen_func.mov) (tmp_reg, src));
6473 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6478 else /* string instructions not available */
6480 rtx stores[MAX_MOVE_REG];
6484 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6486 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6487 enum machine_mode mode;
6488 rtx src, dest, tmp_reg;
6490 /* Generate the appropriate load and store, saving the stores
6492 if (bytes >= 8 && TARGET_POWERPC64
6493 /* 64-bit loads and stores require word-aligned
6495 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6499 gen_mov_func = gen_movdi;
6501 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6505 gen_mov_func = gen_movsi;
6507 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6511 gen_mov_func = gen_movhi;
6517 gen_mov_func = gen_movqi;
6520 src = adjust_address (orig_src, mode, offset);
6521 dest = adjust_address (orig_dest, mode, offset);
6522 tmp_reg = gen_reg_rtx (mode);
6524 emit_insn ((*gen_mov_func) (tmp_reg, src));
6525 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6527 if (num_reg >= MAX_MOVE_REG)
6529 for (i = 0; i < num_reg; i++)
6530 emit_insn (stores[i]);
6535 for (i = 0; i < num_reg; i++)
6536 emit_insn (stores[i]);
6543 /* Return 1 if OP is a load multiple operation. It is known to be a
6544 PARALLEL and the first section will be tested. */
6547 load_multiple_operation (op, mode)
6549 enum machine_mode mode ATTRIBUTE_UNUSED;
6551 int count = XVECLEN (op, 0);
6552 unsigned int dest_regno;
6556 /* Perform a quick check so we don't blow up below. */
6558 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6559 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6560 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6563 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6564 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6566 for (i = 1; i < count; i++)
6568 rtx elt = XVECEXP (op, 0, i);
6570 if (GET_CODE (elt) != SET
6571 || GET_CODE (SET_DEST (elt)) != REG
6572 || GET_MODE (SET_DEST (elt)) != SImode
6573 || REGNO (SET_DEST (elt)) != dest_regno + i
6574 || GET_CODE (SET_SRC (elt)) != MEM
6575 || GET_MODE (SET_SRC (elt)) != SImode
6576 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6577 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6578 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6579 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6586 /* Similar, but tests for store multiple. Here, the second vector element
6587 is a CLOBBER. It will be tested later. */
6590 store_multiple_operation (op, mode)
6592 enum machine_mode mode ATTRIBUTE_UNUSED;
6594 int count = XVECLEN (op, 0) - 1;
6595 unsigned int src_regno;
6599 /* Perform a quick check so we don't blow up below. */
6601 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6602 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6603 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6606 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6607 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6609 for (i = 1; i < count; i++)
6611 rtx elt = XVECEXP (op, 0, i + 1);
6613 if (GET_CODE (elt) != SET
6614 || GET_CODE (SET_SRC (elt)) != REG
6615 || GET_MODE (SET_SRC (elt)) != SImode
6616 || REGNO (SET_SRC (elt)) != src_regno + i
6617 || GET_CODE (SET_DEST (elt)) != MEM
6618 || GET_MODE (SET_DEST (elt)) != SImode
6619 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6620 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6621 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6622 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6629 /* Return a string to perform a load_multiple operation.
6630 operands[0] is the vector.
6631 operands[1] is the source address.
6632 operands[2] is the first destination register. */
6635 rs6000_output_load_multiple (operands)
6638 /* We have to handle the case where the pseudo used to contain the address
6639 is assigned to one of the output registers. */
6641 int words = XVECLEN (operands[0], 0);
6644 if (XVECLEN (operands[0], 0) == 1)
6645 return "{l|lwz} %2,0(%1)";
6647 for (i = 0; i < words; i++)
6648 if (refers_to_regno_p (REGNO (operands[2]) + i,
6649 REGNO (operands[2]) + i + 1, operands[1], 0))
6653 xop[0] = GEN_INT (4 * (words-1));
6654 xop[1] = operands[1];
6655 xop[2] = operands[2];
6656 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6661 xop[0] = GEN_INT (4 * (words-1));
6662 xop[1] = operands[1];
6663 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6664 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6669 for (j = 0; j < words; j++)
6672 xop[0] = GEN_INT (j * 4);
6673 xop[1] = operands[1];
6674 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6675 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6677 xop[0] = GEN_INT (i * 4);
6678 xop[1] = operands[1];
6679 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6684 return "{lsi|lswi} %2,%1,%N0";
6687 /* Return 1 for a parallel vrsave operation. */
6690 vrsave_operation (op, mode)
6692 enum machine_mode mode ATTRIBUTE_UNUSED;
6694 int count = XVECLEN (op, 0);
6695 unsigned int dest_regno, src_regno;
6699 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6700 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6701 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6704 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6705 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6707 if (dest_regno != VRSAVE_REGNO
6708 && src_regno != VRSAVE_REGNO)
6711 for (i = 1; i < count; i++)
6713 rtx elt = XVECEXP (op, 0, i);
6715 if (GET_CODE (elt) != CLOBBER
6716 && GET_CODE (elt) != SET)
6723 /* Return 1 for an PARALLEL suitable for mtcrf. */
6726 mtcrf_operation (op, mode)
6728 enum machine_mode mode ATTRIBUTE_UNUSED;
6730 int count = XVECLEN (op, 0);
6734 /* Perform a quick check so we don't blow up below. */
6736 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6737 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6738 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6740 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6742 if (GET_CODE (src_reg) != REG
6743 || GET_MODE (src_reg) != SImode
6744 || ! INT_REGNO_P (REGNO (src_reg)))
6747 for (i = 0; i < count; i++)
6749 rtx exp = XVECEXP (op, 0, i);
6753 if (GET_CODE (exp) != SET
6754 || GET_CODE (SET_DEST (exp)) != REG
6755 || GET_MODE (SET_DEST (exp)) != CCmode
6756 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6758 unspec = SET_SRC (exp);
6759 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6761 if (GET_CODE (unspec) != UNSPEC
6762 || XINT (unspec, 1) != 20
6763 || XVECLEN (unspec, 0) != 2
6764 || XVECEXP (unspec, 0, 0) != src_reg
6765 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6766 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6772 /* Return 1 for an PARALLEL suitable for lmw. */
6775 lmw_operation (op, mode)
6777 enum machine_mode mode ATTRIBUTE_UNUSED;
6779 int count = XVECLEN (op, 0);
6780 unsigned int dest_regno;
6782 unsigned int base_regno;
6783 HOST_WIDE_INT offset;
6786 /* Perform a quick check so we don't blow up below. */
6788 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6789 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6790 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6793 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6794 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6797 || count != 32 - (int) dest_regno)
6800 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6803 base_regno = REGNO (src_addr);
6804 if (base_regno == 0)
6807 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6809 offset = INTVAL (XEXP (src_addr, 1));
6810 base_regno = REGNO (XEXP (src_addr, 0));
6815 for (i = 0; i < count; i++)
6817 rtx elt = XVECEXP (op, 0, i);
6820 HOST_WIDE_INT newoffset;
6822 if (GET_CODE (elt) != SET
6823 || GET_CODE (SET_DEST (elt)) != REG
6824 || GET_MODE (SET_DEST (elt)) != SImode
6825 || REGNO (SET_DEST (elt)) != dest_regno + i
6826 || GET_CODE (SET_SRC (elt)) != MEM
6827 || GET_MODE (SET_SRC (elt)) != SImode)
6829 newaddr = XEXP (SET_SRC (elt), 0);
6830 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6835 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6837 addr_reg = XEXP (newaddr, 0);
6838 newoffset = INTVAL (XEXP (newaddr, 1));
6842 if (REGNO (addr_reg) != base_regno
6843 || newoffset != offset + 4 * i)
6850 /* Return 1 for an PARALLEL suitable for stmw. */
6853 stmw_operation (op, mode)
6855 enum machine_mode mode ATTRIBUTE_UNUSED;
6857 int count = XVECLEN (op, 0);
6858 unsigned int src_regno;
6860 unsigned int base_regno;
6861 HOST_WIDE_INT offset;
6864 /* Perform a quick check so we don't blow up below. */
6866 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6867 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6868 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6871 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6872 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6875 || count != 32 - (int) src_regno)
6878 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6881 base_regno = REGNO (dest_addr);
6882 if (base_regno == 0)
6885 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6887 offset = INTVAL (XEXP (dest_addr, 1));
6888 base_regno = REGNO (XEXP (dest_addr, 0));
6893 for (i = 0; i < count; i++)
6895 rtx elt = XVECEXP (op, 0, i);
6898 HOST_WIDE_INT newoffset;
6900 if (GET_CODE (elt) != SET
6901 || GET_CODE (SET_SRC (elt)) != REG
6902 || GET_MODE (SET_SRC (elt)) != SImode
6903 || REGNO (SET_SRC (elt)) != src_regno + i
6904 || GET_CODE (SET_DEST (elt)) != MEM
6905 || GET_MODE (SET_DEST (elt)) != SImode)
6907 newaddr = XEXP (SET_DEST (elt), 0);
6908 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6913 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6915 addr_reg = XEXP (newaddr, 0);
6916 newoffset = INTVAL (XEXP (newaddr, 1));
6920 if (REGNO (addr_reg) != base_regno
6921 || newoffset != offset + 4 * i)
6928 /* A validation routine: say whether CODE, a condition code, and MODE
6929 match. The other alternatives either don't make sense or should
6930 never be generated. */
6933 validate_condition_mode (code, mode)
6935 enum machine_mode mode;
6937 if (GET_RTX_CLASS (code) != '<'
6938 || GET_MODE_CLASS (mode) != MODE_CC)
6941 /* These don't make sense. */
6942 if ((code == GT || code == LT || code == GE || code == LE)
6943 && mode == CCUNSmode)
6946 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6947 && mode != CCUNSmode)
6950 if (mode != CCFPmode
6951 && (code == ORDERED || code == UNORDERED
6952 || code == UNEQ || code == LTGT
6953 || code == UNGT || code == UNLT
6954 || code == UNGE || code == UNLE))
6957 /* These should never be generated except for
6958 flag_unsafe_math_optimizations and flag_finite_math_only. */
6959 if (mode == CCFPmode
6960 && ! flag_unsafe_math_optimizations
6961 && ! flag_finite_math_only
6962 && (code == LE || code == GE
6963 || code == UNEQ || code == LTGT
6964 || code == UNGT || code == UNLT))
6967 /* These are invalid; the information is not there. */
6968 if (mode == CCEQmode
6969 && code != EQ && code != NE)
6973 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6974 We only check the opcode against the mode of the CC value here. */
6977 branch_comparison_operator (op, mode)
6979 enum machine_mode mode ATTRIBUTE_UNUSED;
6981 enum rtx_code code = GET_CODE (op);
6982 enum machine_mode cc_mode;
6984 if (GET_RTX_CLASS (code) != '<')
6987 cc_mode = GET_MODE (XEXP (op, 0));
6988 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6991 validate_condition_mode (code, cc_mode);
6996 /* Return 1 if OP is a comparison operation that is valid for a branch
6997 insn and which is true if the corresponding bit in the CC register
7001 branch_positive_comparison_operator (op, mode)
7003 enum machine_mode mode;
7007 if (! branch_comparison_operator (op, mode))
7010 code = GET_CODE (op);
7011 return (code == EQ || code == LT || code == GT
7012 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7013 || code == LTU || code == GTU
7014 || code == UNORDERED);
7017 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
7018 We check the opcode against the mode of the CC value and disallow EQ or
7019 NE comparisons for integers. */
7022 scc_comparison_operator (op, mode)
7024 enum machine_mode mode;
7026 enum rtx_code code = GET_CODE (op);
7027 enum machine_mode cc_mode;
7029 if (GET_MODE (op) != mode && mode != VOIDmode)
7032 if (GET_RTX_CLASS (code) != '<')
7035 cc_mode = GET_MODE (XEXP (op, 0));
7036 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7039 validate_condition_mode (code, cc_mode);
7041 if (code == NE && cc_mode != CCFPmode)
7048 trap_comparison_operator (op, mode)
7050 enum machine_mode mode;
7052 if (mode != VOIDmode && mode != GET_MODE (op))
7054 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7058 boolean_operator (op, mode)
7060 enum machine_mode mode ATTRIBUTE_UNUSED;
7062 enum rtx_code code = GET_CODE (op);
7063 return (code == AND || code == IOR || code == XOR);
7067 boolean_or_operator (op, mode)
7069 enum machine_mode mode ATTRIBUTE_UNUSED;
7071 enum rtx_code code = GET_CODE (op);
7072 return (code == IOR || code == XOR);
7076 min_max_operator (op, mode)
7078 enum machine_mode mode ATTRIBUTE_UNUSED;
7080 enum rtx_code code = GET_CODE (op);
7081 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7084 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7085 mask required to convert the result of a rotate insn into a shift
7086 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7089 includes_lshift_p (shiftop, andop)
7093 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7095 shift_mask <<= INTVAL (shiftop);
7097 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7100 /* Similar, but for right shift. */
7103 includes_rshift_p (shiftop, andop)
7107 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7109 shift_mask >>= INTVAL (shiftop);
7111 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7114 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7115 to perform a left shift. It must have exactly SHIFTOP least
7116 significant 0's, then one or more 1's, then zero or more 0's. */
7119 includes_rldic_lshift_p (shiftop, andop)
7123 if (GET_CODE (andop) == CONST_INT)
7125 HOST_WIDE_INT c, lsb, shift_mask;
7128 if (c == 0 || c == ~0)
7132 shift_mask <<= INTVAL (shiftop);
7134 /* Find the least significant one bit. */
7137 /* It must coincide with the LSB of the shift mask. */
7138 if (-lsb != shift_mask)
7141 /* Invert to look for the next transition (if any). */
7144 /* Remove the low group of ones (originally low group of zeros). */
7147 /* Again find the lsb, and check we have all 1's above. */
7151 else if (GET_CODE (andop) == CONST_DOUBLE
7152 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7154 HOST_WIDE_INT low, high, lsb;
7155 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7157 low = CONST_DOUBLE_LOW (andop);
7158 if (HOST_BITS_PER_WIDE_INT < 64)
7159 high = CONST_DOUBLE_HIGH (andop);
7161 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7162 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7165 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7167 shift_mask_high = ~0;
7168 if (INTVAL (shiftop) > 32)
7169 shift_mask_high <<= INTVAL (shiftop) - 32;
7173 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7180 return high == -lsb;
7183 shift_mask_low = ~0;
7184 shift_mask_low <<= INTVAL (shiftop);
7188 if (-lsb != shift_mask_low)
7191 if (HOST_BITS_PER_WIDE_INT < 64)
7196 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7199 return high == -lsb;
7203 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7209 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7210 to perform a left shift. It must have SHIFTOP or more least
7211 signifigant 0's, with the remainder of the word 1's. */
7214 includes_rldicr_lshift_p (shiftop, andop)
7218 if (GET_CODE (andop) == CONST_INT)
7220 HOST_WIDE_INT c, lsb, shift_mask;
7223 shift_mask <<= INTVAL (shiftop);
7226 /* Find the least signifigant one bit. */
7229 /* It must be covered by the shift mask.
7230 This test also rejects c == 0. */
7231 if ((lsb & shift_mask) == 0)
7234 /* Check we have all 1's above the transition, and reject all 1's. */
7235 return c == -lsb && lsb != 1;
7237 else if (GET_CODE (andop) == CONST_DOUBLE
7238 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7240 HOST_WIDE_INT low, lsb, shift_mask_low;
7242 low = CONST_DOUBLE_LOW (andop);
7244 if (HOST_BITS_PER_WIDE_INT < 64)
7246 HOST_WIDE_INT high, shift_mask_high;
7248 high = CONST_DOUBLE_HIGH (andop);
7252 shift_mask_high = ~0;
7253 if (INTVAL (shiftop) > 32)
7254 shift_mask_high <<= INTVAL (shiftop) - 32;
7258 if ((lsb & shift_mask_high) == 0)
7261 return high == -lsb;
7267 shift_mask_low = ~0;
7268 shift_mask_low <<= INTVAL (shiftop);
7272 if ((lsb & shift_mask_low) == 0)
7275 return low == -lsb && lsb != 1;
7281 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7282 for lfq and stfq insns.
7284 Note reg1 and reg2 *must* be hard registers. To be sure we will
7285 abort if we are passed pseudo registers. */
7288 registers_ok_for_quad_peep (reg1, reg2)
7291 /* We might have been passed a SUBREG. */
7292 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7295 return (REGNO (reg1) == REGNO (reg2) - 1);
7298 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7299 addr1 and addr2 must be in consecutive memory locations
7300 (addr2 == addr1 + 8). */
7303 addrs_ok_for_quad_peep (addr1, addr2)
7310 /* Extract an offset (if used) from the first addr. */
7311 if (GET_CODE (addr1) == PLUS)
7313 /* If not a REG, return zero. */
7314 if (GET_CODE (XEXP (addr1, 0)) != REG)
7318 reg1 = REGNO (XEXP (addr1, 0));
7319 /* The offset must be constant! */
7320 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7322 offset1 = INTVAL (XEXP (addr1, 1));
7325 else if (GET_CODE (addr1) != REG)
7329 reg1 = REGNO (addr1);
7330 /* This was a simple (mem (reg)) expression. Offset is 0. */
7334 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7335 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7336 register as addr1. */
7337 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7339 if (GET_CODE (addr2) != PLUS)
7342 if (GET_CODE (XEXP (addr2, 0)) != REG
7343 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7346 if (reg1 != REGNO (XEXP (addr2, 0)))
7349 /* The offset for the second addr must be 8 more than the first addr. */
7350 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7353 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7358 /* Return the register class of a scratch register needed to copy IN into
7359 or out of a register in CLASS in MODE. If it can be done directly,
7360 NO_REGS is returned. */
7363 secondary_reload_class (class, mode, in)
7364 enum reg_class class;
7365 enum machine_mode mode ATTRIBUTE_UNUSED;
7370 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7372 && MACHOPIC_INDIRECT
7376 /* We cannot copy a symbolic operand directly into anything
7377 other than BASE_REGS for TARGET_ELF. So indicate that a
7378 register from BASE_REGS is needed as an intermediate
7381 On Darwin, pic addresses require a load from memory, which
7382 needs a base register. */
7383 if (class != BASE_REGS
7384 && (GET_CODE (in) == SYMBOL_REF
7385 || GET_CODE (in) == HIGH
7386 || GET_CODE (in) == LABEL_REF
7387 || GET_CODE (in) == CONST))
7391 if (GET_CODE (in) == REG)
7394 if (regno >= FIRST_PSEUDO_REGISTER)
7396 regno = true_regnum (in);
7397 if (regno >= FIRST_PSEUDO_REGISTER)
7401 else if (GET_CODE (in) == SUBREG)
7403 regno = true_regnum (in);
7404 if (regno >= FIRST_PSEUDO_REGISTER)
7410 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7412 if (class == GENERAL_REGS || class == BASE_REGS
7413 || (regno >= 0 && INT_REGNO_P (regno)))
7416 /* Constants, memory, and FP registers can go into FP registers. */
7417 if ((regno == -1 || FP_REGNO_P (regno))
7418 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7421 /* Memory, and AltiVec registers can go into AltiVec registers. */
7422 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7423 && class == ALTIVEC_REGS)
7426 /* We can copy among the CR registers. */
7427 if ((class == CR_REGS || class == CR0_REGS)
7428 && regno >= 0 && CR_REGNO_P (regno))
7431 /* Otherwise, we need GENERAL_REGS. */
7432 return GENERAL_REGS;
7435 /* Given a comparison operation, return the bit number in CCR to test. We
7436 know this is a valid comparison.
7438 SCC_P is 1 if this is for an scc. That means that %D will have been
7439 used instead of %C, so the bits will be in different places.
7441 Return -1 if OP isn't a valid comparison for some reason. */
7448 enum rtx_code code = GET_CODE (op);
7449 enum machine_mode cc_mode;
7454 if (GET_RTX_CLASS (code) != '<')
7459 if (GET_CODE (reg) != REG
7460 || ! CR_REGNO_P (REGNO (reg)))
7463 cc_mode = GET_MODE (reg);
7464 cc_regnum = REGNO (reg);
7465 base_bit = 4 * (cc_regnum - CR0_REGNO);
7467 validate_condition_mode (code, cc_mode);
7472 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7473 return base_bit + 1;
7474 return scc_p ? base_bit + 3 : base_bit + 2;
7476 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7477 return base_bit + 1;
7478 return base_bit + 2;
7479 case GT: case GTU: case UNLE:
7480 return base_bit + 1;
7481 case LT: case LTU: case UNGE:
7483 case ORDERED: case UNORDERED:
7484 return base_bit + 3;
7487 /* If scc, we will have done a cror to put the bit in the
7488 unordered position. So test that bit. For integer, this is ! LT
7489 unless this is an scc insn. */
7490 return scc_p ? base_bit + 3 : base_bit;
7493 return scc_p ? base_bit + 3 : base_bit + 1;
7500 /* Return the GOT register. */
7503 rs6000_got_register (value)
7504 rtx value ATTRIBUTE_UNUSED;
7506 /* The second flow pass currently (June 1999) can't update
7507 regs_ever_live without disturbing other parts of the compiler, so
7508 update it here to make the prolog/epilogue code happy. */
7509 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7510 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7512 current_function_uses_pic_offset_table = 1;
7514 return pic_offset_table_rtx;
7517 /* Function to init struct machine_function.
7518 This will be called, via a pointer variable,
7519 from push_function_context. */
7521 static struct machine_function *
7522 rs6000_init_machine_status ()
7524 return ggc_alloc_cleared (sizeof (machine_function));
7527 /* These macros test for integers and extract the low-order bits. */
7529 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7530 && GET_MODE (X) == VOIDmode)
7532 #define INT_LOWPART(X) \
7533 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7540 unsigned long val = INT_LOWPART (op);
7542 /* If the high bit is zero, the value is the first 1 bit we find
7544 if ((val & 0x80000000) == 0)
7546 if ((val & 0xffffffff) == 0)
7550 while (((val <<= 1) & 0x80000000) == 0)
7555 /* If the high bit is set and the low bit is not, or the mask is all
7556 1's, the value is zero. */
7557 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7560 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7563 while (((val >>= 1) & 1) != 0)
7574 unsigned long val = INT_LOWPART (op);
7576 /* If the low bit is zero, the value is the first 1 bit we find from
7580 if ((val & 0xffffffff) == 0)
7584 while (((val >>= 1) & 1) == 0)
7590 /* If the low bit is set and the high bit is not, or the mask is all
7591 1's, the value is 31. */
7592 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7595 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7598 while (((val <<= 1) & 0x80000000) != 0)
7604 /* Print an operand. Recognize special options, documented below. */
7607 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7608 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7610 #define SMALL_DATA_RELOC "sda21"
7611 #define SMALL_DATA_REG 0
7615 print_operand (file, x, code)
7622 unsigned HOST_WIDE_INT uval;
7627 /* Write out an instruction after the call which may be replaced
7628 with glue code by the loader. This depends on the AIX version. */
7629 asm_fprintf (file, RS6000_CALL_GLUE);
7632 /* %a is output_address. */
7635 /* If X is a constant integer whose low-order 5 bits are zero,
7636 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7637 in the AIX assembler where "sri" with a zero shift count
7638 writes a trash instruction. */
7639 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7646 /* If constant, low-order 16 bits of constant, unsigned.
7647 Otherwise, write normally. */
7649 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7651 print_operand (file, x, 0);
7655 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7656 for 64-bit mask direction. */
7657 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7660 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7664 /* There used to be a comment for 'C' reading "This is an
7665 optional cror needed for certain floating-point
7666 comparisons. Otherwise write nothing." */
7668 /* Similar, except that this is for an scc, so we must be able to
7669 encode the test in a single bit that is one. We do the above
7670 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7671 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7672 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7674 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7676 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7678 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7681 else if (GET_CODE (x) == NE)
7683 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7685 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7686 base_bit + 2, base_bit + 2);
7688 else if (TARGET_SPE && TARGET_HARD_FLOAT
7689 && GET_CODE (x) == EQ
7690 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7692 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7694 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7695 base_bit + 1, base_bit + 1);
7700 /* X is a CR register. Print the number of the EQ bit of the CR */
7701 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7702 output_operand_lossage ("invalid %%E value");
7704 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7708 /* X is a CR register. Print the shift count needed to move it
7709 to the high-order four bits. */
7710 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7711 output_operand_lossage ("invalid %%f value");
7713 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7717 /* Similar, but print the count for the rotate in the opposite
7719 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7720 output_operand_lossage ("invalid %%F value");
7722 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7726 /* X is a constant integer. If it is negative, print "m",
7727 otherwise print "z". This is to make an aze or ame insn. */
7728 if (GET_CODE (x) != CONST_INT)
7729 output_operand_lossage ("invalid %%G value");
7730 else if (INTVAL (x) >= 0)
7737 /* If constant, output low-order five bits. Otherwise, write
7740 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7742 print_operand (file, x, 0);
7746 /* If constant, output low-order six bits. Otherwise, write
7749 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7751 print_operand (file, x, 0);
7755 /* Print `i' if this is a constant, else nothing. */
7761 /* Write the bit number in CCR for jump. */
7764 output_operand_lossage ("invalid %%j code");
7766 fprintf (file, "%d", i);
7770 /* Similar, but add one for shift count in rlinm for scc and pass
7771 scc flag to `ccr_bit'. */
7774 output_operand_lossage ("invalid %%J code");
7776 /* If we want bit 31, write a shift count of zero, not 32. */
7777 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7781 /* X must be a constant. Write the 1's complement of the
7784 output_operand_lossage ("invalid %%k value");
7786 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7790 /* X must be a symbolic constant on ELF. Write an
7791 expression suitable for an 'addi' that adds in the low 16
7793 if (GET_CODE (x) != CONST)
7795 print_operand_address (file, x);
7800 if (GET_CODE (XEXP (x, 0)) != PLUS
7801 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7802 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7803 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7804 output_operand_lossage ("invalid %%K value");
7805 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7807 /* For GNU as, there must be a non-alphanumeric character
7808 between 'l' and the number. The '-' is added by
7809 print_operand() already. */
7810 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7812 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7816 /* %l is output_asm_label. */
7819 /* Write second word of DImode or DFmode reference. Works on register
7820 or non-indexed memory only. */
7821 if (GET_CODE (x) == REG)
7822 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7823 else if (GET_CODE (x) == MEM)
7825 /* Handle possible auto-increment. Since it is pre-increment and
7826 we have already done it, we can just use an offset of word. */
7827 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7828 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7829 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7832 output_address (XEXP (adjust_address_nv (x, SImode,
7836 if (small_data_operand (x, GET_MODE (x)))
7837 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7838 reg_names[SMALL_DATA_REG]);
7843 /* MB value for a mask operand. */
7844 if (! mask_operand (x, SImode))
7845 output_operand_lossage ("invalid %%m value");
7847 fprintf (file, "%d", extract_MB (x));
7851 /* ME value for a mask operand. */
7852 if (! mask_operand (x, SImode))
7853 output_operand_lossage ("invalid %%M value");
7855 fprintf (file, "%d", extract_ME (x));
7858 /* %n outputs the negative of its operand. */
7861 /* Write the number of elements in the vector times 4. */
7862 if (GET_CODE (x) != PARALLEL)
7863 output_operand_lossage ("invalid %%N value");
7865 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7869 /* Similar, but subtract 1 first. */
7870 if (GET_CODE (x) != PARALLEL)
7871 output_operand_lossage ("invalid %%O value");
7873 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7877 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7879 || INT_LOWPART (x) < 0
7880 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7881 output_operand_lossage ("invalid %%p value");
7883 fprintf (file, "%d", i);
7887 /* The operand must be an indirect memory reference. The result
7888 is the register number. */
7889 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7890 || REGNO (XEXP (x, 0)) >= 32)
7891 output_operand_lossage ("invalid %%P value");
7893 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7897 /* This outputs the logical code corresponding to a boolean
7898 expression. The expression may have one or both operands
7899 negated (if one, only the first one). For condition register
7900 logical operations, it will also treat the negated
7901 CR codes as NOTs, but not handle NOTs of them. */
7903 const char *const *t = 0;
7905 enum rtx_code code = GET_CODE (x);
7906 static const char * const tbl[3][3] = {
7907 { "and", "andc", "nor" },
7908 { "or", "orc", "nand" },
7909 { "xor", "eqv", "xor" } };
7913 else if (code == IOR)
7915 else if (code == XOR)
7918 output_operand_lossage ("invalid %%q value");
7920 if (GET_CODE (XEXP (x, 0)) != NOT)
7924 if (GET_CODE (XEXP (x, 1)) == NOT)
7935 /* X is a CR register. Print the mask for `mtcrf'. */
7936 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7937 output_operand_lossage ("invalid %%R value");
7939 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7943 /* Low 5 bits of 32 - value */
7945 output_operand_lossage ("invalid %%s value");
7947 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7951 /* PowerPC64 mask position. All 0's is excluded.
7952 CONST_INT 32-bit mask is considered sign-extended so any
7953 transition must occur within the CONST_INT, not on the boundary. */
7954 if (! mask64_operand (x, DImode))
7955 output_operand_lossage ("invalid %%S value");
7957 uval = INT_LOWPART (x);
7959 if (uval & 1) /* Clear Left */
7961 #if HOST_BITS_PER_WIDE_INT > 64
7962 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7966 else /* Clear Right */
7969 #if HOST_BITS_PER_WIDE_INT > 64
7970 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7978 fprintf (file, "%d", i);
7982 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7983 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7986 /* Bit 3 is OV bit. */
7987 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7989 /* If we want bit 31, write a shift count of zero, not 32. */
7990 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7994 /* Print the symbolic name of a branch target register. */
7995 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7996 && REGNO (x) != COUNT_REGISTER_REGNUM))
7997 output_operand_lossage ("invalid %%T value");
7998 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7999 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8001 fputs ("ctr", file);
8005 /* High-order 16 bits of constant for use in unsigned operand. */
8007 output_operand_lossage ("invalid %%u value");
8009 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8010 (INT_LOWPART (x) >> 16) & 0xffff);
8014 /* High-order 16 bits of constant for use in signed operand. */
8016 output_operand_lossage ("invalid %%v value");
8018 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8019 (INT_LOWPART (x) >> 16) & 0xffff);
8023 /* Print `u' if this has an auto-increment or auto-decrement. */
8024 if (GET_CODE (x) == MEM
8025 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8026 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8031 /* Print the trap code for this operand. */
8032 switch (GET_CODE (x))
8035 fputs ("eq", file); /* 4 */
8038 fputs ("ne", file); /* 24 */
8041 fputs ("lt", file); /* 16 */
8044 fputs ("le", file); /* 20 */
8047 fputs ("gt", file); /* 8 */
8050 fputs ("ge", file); /* 12 */
8053 fputs ("llt", file); /* 2 */
8056 fputs ("lle", file); /* 6 */
8059 fputs ("lgt", file); /* 1 */
8062 fputs ("lge", file); /* 5 */
8070 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8073 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8074 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8076 print_operand (file, x, 0);
8080 /* MB value for a PowerPC64 rldic operand. */
8081 val = (GET_CODE (x) == CONST_INT
8082 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8087 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8088 if ((val <<= 1) < 0)
8091 #if HOST_BITS_PER_WIDE_INT == 32
8092 if (GET_CODE (x) == CONST_INT && i >= 0)
8093 i += 32; /* zero-extend high-part was all 0's */
8094 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8096 val = CONST_DOUBLE_LOW (x);
8103 for ( ; i < 64; i++)
8104 if ((val <<= 1) < 0)
8109 fprintf (file, "%d", i + 1);
8113 if (GET_CODE (x) == MEM
8114 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
8119 /* Like 'L', for third word of TImode */
8120 if (GET_CODE (x) == REG)
8121 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8122 else if (GET_CODE (x) == MEM)
8124 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8125 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8126 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8128 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8129 if (small_data_operand (x, GET_MODE (x)))
8130 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8131 reg_names[SMALL_DATA_REG]);
8136 /* X is a SYMBOL_REF. Write out the name preceded by a
8137 period and without any trailing data in brackets. Used for function
8138 names. If we are configured for System V (or the embedded ABI) on
8139 the PowerPC, do not emit the period, since those systems do not use
8140 TOCs and the like. */
8141 if (GET_CODE (x) != SYMBOL_REF)
8144 if (XSTR (x, 0)[0] != '.')
8146 switch (DEFAULT_ABI)
8156 case ABI_AIX_NODESC:
8162 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8164 assemble_name (file, XSTR (x, 0));
8169 /* Like 'L', for last word of TImode. */
8170 if (GET_CODE (x) == REG)
8171 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8172 else if (GET_CODE (x) == MEM)
8174 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8175 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8176 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8178 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8179 if (small_data_operand (x, GET_MODE (x)))
8180 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8181 reg_names[SMALL_DATA_REG]);
8185 /* Print AltiVec or SPE memory operand. */
8190 if (GET_CODE (x) != MEM)
8198 if (GET_CODE (tmp) == REG)
8200 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8203 /* Handle [reg+UIMM]. */
8204 else if (GET_CODE (tmp) == PLUS &&
8205 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8209 if (GET_CODE (XEXP (tmp, 0)) != REG)
8212 x = INTVAL (XEXP (tmp, 1));
8213 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8217 /* Fall through. Must be [reg+reg]. */
8219 if (GET_CODE (tmp) == REG)
8220 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8221 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8223 if (REGNO (XEXP (tmp, 0)) == 0)
8224 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8225 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8227 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8228 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8236 if (GET_CODE (x) == REG)
8237 fprintf (file, "%s", reg_names[REGNO (x)]);
8238 else if (GET_CODE (x) == MEM)
8240 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8241 know the width from the mode. */
8242 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8243 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8244 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8245 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8246 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8247 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8249 output_address (XEXP (x, 0));
8252 output_addr_const (file, x);
8256 output_operand_lossage ("invalid %%xn code");
8260 /* Print the address of an operand. */
8263 print_operand_address (file, x)
8267 if (GET_CODE (x) == REG)
8268 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8269 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8270 || GET_CODE (x) == LABEL_REF)
8272 output_addr_const (file, x);
8273 if (small_data_operand (x, GET_MODE (x)))
8274 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8275 reg_names[SMALL_DATA_REG]);
8276 else if (TARGET_TOC)
8279 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8281 if (REGNO (XEXP (x, 0)) == 0)
8282 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8283 reg_names[ REGNO (XEXP (x, 0)) ]);
8285 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8286 reg_names[ REGNO (XEXP (x, 1)) ]);
8288 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8290 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8291 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8294 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8295 && CONSTANT_P (XEXP (x, 1)))
8297 output_addr_const (file, XEXP (x, 1));
8298 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8302 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8303 && CONSTANT_P (XEXP (x, 1)))
8305 fprintf (file, "lo16(");
8306 output_addr_const (file, XEXP (x, 1));
8307 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8310 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8312 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8314 rtx contains_minus = XEXP (x, 1);
8318 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8319 turn it into (sym) for output_addr_const. */
8320 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8321 contains_minus = XEXP (contains_minus, 0);
8323 minus = XEXP (contains_minus, 0);
8324 symref = XEXP (minus, 0);
8325 XEXP (contains_minus, 0) = symref;
8330 name = XSTR (symref, 0);
8331 newname = alloca (strlen (name) + sizeof ("@toc"));
8332 strcpy (newname, name);
8333 strcat (newname, "@toc");
8334 XSTR (symref, 0) = newname;
8336 output_addr_const (file, XEXP (x, 1));
8338 XSTR (symref, 0) = name;
8339 XEXP (contains_minus, 0) = minus;
8342 output_addr_const (file, XEXP (x, 1));
8344 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8350 /* Target hook for assembling integer objects. The PowerPC version has
8351 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8352 is defined. It also needs to handle DI-mode objects on 64-bit
8356 rs6000_assemble_integer (x, size, aligned_p)
8361 #ifdef RELOCATABLE_NEEDS_FIXUP
8362 /* Special handling for SI values. */
8363 if (size == 4 && aligned_p)
8365 extern int in_toc_section PARAMS ((void));
8366 static int recurse = 0;
8368 /* For -mrelocatable, we mark all addresses that need to be fixed up
8369 in the .fixup section. */
8370 if (TARGET_RELOCATABLE
8371 && !in_toc_section ()
8372 && !in_text_section ()
8374 && GET_CODE (x) != CONST_INT
8375 && GET_CODE (x) != CONST_DOUBLE
8381 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8383 ASM_OUTPUT_LABEL (asm_out_file, buf);
8384 fprintf (asm_out_file, "\t.long\t(");
8385 output_addr_const (asm_out_file, x);
8386 fprintf (asm_out_file, ")@fixup\n");
8387 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8388 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8389 fprintf (asm_out_file, "\t.long\t");
8390 assemble_name (asm_out_file, buf);
8391 fprintf (asm_out_file, "\n\t.previous\n");
8395 /* Remove initial .'s to turn a -mcall-aixdesc function
8396 address into the address of the descriptor, not the function
8398 else if (GET_CODE (x) == SYMBOL_REF
8399 && XSTR (x, 0)[0] == '.'
8400 && DEFAULT_ABI == ABI_AIX)
8402 const char *name = XSTR (x, 0);
8403 while (*name == '.')
8406 fprintf (asm_out_file, "\t.long\t%s\n", name);
8410 #endif /* RELOCATABLE_NEEDS_FIXUP */
8411 return default_assemble_integer (x, size, aligned_p);
8414 #ifdef HAVE_GAS_HIDDEN
8415 /* Emit an assembler directive to set symbol visibility for DECL to
8419 rs6000_assemble_visibility (decl, vis)
8423 /* Functions need to have their entry point symbol visibility set as
8424 well as their descriptor symbol visibility. */
8425 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8427 static const char * const visibility_types[] = {
8428 NULL, "internal", "hidden", "protected"
8431 const char *name, *type;
8433 name = ((* targetm.strip_name_encoding)
8434 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8435 type = visibility_types[vis];
8437 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8438 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8441 default_assemble_visibility (decl, vis);
8446 rs6000_reverse_condition (mode, code)
8447 enum machine_mode mode;
8450 /* Reversal of FP compares takes care -- an ordered compare
8451 becomes an unordered compare and vice versa. */
8452 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8453 return reverse_condition_maybe_unordered (code);
8455 return reverse_condition (code);
8458 /* Generate a compare for CODE. Return a brand-new rtx that
8459 represents the result of the compare. */
8462 rs6000_generate_compare (code)
8465 enum machine_mode comp_mode;
8468 if (rs6000_compare_fp_p)
8469 comp_mode = CCFPmode;
8470 else if (code == GTU || code == LTU
8471 || code == GEU || code == LEU)
8472 comp_mode = CCUNSmode;
8476 /* First, the compare. */
8477 compare_result = gen_reg_rtx (comp_mode);
8479 /* SPE FP compare instructions on the GPRs. Yuck! */
8480 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8482 rtx cmp, or1, or2, or_result, compare_result2;
8490 cmp = flag_unsafe_math_optimizations
8491 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8493 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8494 rs6000_compare_op1);
8502 cmp = flag_unsafe_math_optimizations
8503 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8505 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8506 rs6000_compare_op1);
8514 cmp = flag_unsafe_math_optimizations
8515 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8517 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8518 rs6000_compare_op1);
8524 /* Synthesize LE and GE from LT/GT || EQ. */
8525 if (code == LE || code == GE || code == LEU || code == GEU)
8527 /* Synthesize GE/LE frome GT/LT || EQ. */
8533 case LE: code = LT; break;
8534 case GE: code = GT; break;
8535 case LEU: code = LT; break;
8536 case GEU: code = GT; break;
8540 or1 = gen_reg_rtx (SImode);
8541 or2 = gen_reg_rtx (SImode);
8542 or_result = gen_reg_rtx (CCEQmode);
8543 compare_result2 = gen_reg_rtx (CCFPmode);
8546 cmp = flag_unsafe_math_optimizations
8547 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8549 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8550 rs6000_compare_op1);
8553 /* The MC8540 FP compare instructions set the CR bits
8554 differently than other PPC compare instructions. For
8555 that matter, there is no generic test instruction, but a
8556 testgt, testlt, and testeq. For a true condition, bit 2
8557 is set (x1xx) in the CR. Following the traditional CR
8563 ... bit 2 would be a GT CR alias, so later on we
8564 look in the GT bits for the branch instructions.
8565 However, we must be careful to emit correct RTL in
8566 the meantime, so optimizations don't get confused. */
8568 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8569 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8571 /* OR them together. */
8572 cmp = gen_rtx_SET (VOIDmode, or_result,
8573 gen_rtx_COMPARE (CCEQmode,
8574 gen_rtx_IOR (SImode, or1, or2),
8576 compare_result = or_result;
8581 /* We only care about 1 bit (x1xx), so map everything to NE to
8582 maintain rtl sanity. We'll get to the right bit (x1xx) at
8583 code output time. */
8584 if (code == NE || code == LTGT)
8585 /* Do the inverse here because we have no cmpne
8586 instruction. We use the cmpeq instruction and expect
8587 to get a 0 instead. */
8596 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8597 gen_rtx_COMPARE (comp_mode,
8599 rs6000_compare_op1)));
8601 /* Some kinds of FP comparisons need an OR operation;
8602 except for flag_unsafe_math_optimizations we don't bother. */
8603 if (rs6000_compare_fp_p
8604 && ! flag_unsafe_math_optimizations
8605 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8606 && (code == LE || code == GE
8607 || code == UNEQ || code == LTGT
8608 || code == UNGT || code == UNLT))
8610 enum rtx_code or1, or2;
8611 rtx or1_rtx, or2_rtx, compare2_rtx;
8612 rtx or_result = gen_reg_rtx (CCEQmode);
8616 case LE: or1 = LT; or2 = EQ; break;
8617 case GE: or1 = GT; or2 = EQ; break;
8618 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8619 case LTGT: or1 = LT; or2 = GT; break;
8620 case UNGT: or1 = UNORDERED; or2 = GT; break;
8621 case UNLT: or1 = UNORDERED; or2 = LT; break;
8624 validate_condition_mode (or1, comp_mode);
8625 validate_condition_mode (or2, comp_mode);
8626 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8627 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8628 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8629 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8631 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8633 compare_result = or_result;
8637 validate_condition_mode (code, GET_MODE (compare_result));
8639 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8643 /* Emit the RTL for an sCOND pattern. */
8646 rs6000_emit_sCOND (code, result)
8651 enum machine_mode op_mode;
8653 condition_rtx = rs6000_generate_compare (code);
8655 op_mode = GET_MODE (rs6000_compare_op0);
8656 if (op_mode == VOIDmode)
8657 op_mode = GET_MODE (rs6000_compare_op1);
8659 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8661 PUT_MODE (condition_rtx, DImode);
8662 convert_move (result, condition_rtx, 0);
8666 PUT_MODE (condition_rtx, SImode);
8667 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8671 /* Emit a branch of kind CODE to location LOC. */
8674 rs6000_emit_cbranch (code, loc)
8678 rtx condition_rtx, loc_ref;
8680 condition_rtx = rs6000_generate_compare (code);
8681 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8682 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8683 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8687 /* Return the string to output a conditional branch to LABEL, which is
8688 the operand number of the label, or -1 if the branch is really a
8691 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8692 condition code register and its mode specifies what kind of
8695 REVERSED is nonzero if we should reverse the sense of the comparison.
8697 INSN is the insn. */
8700 output_cbranch (op, label, reversed, insn)
8706 static char string[64];
8707 enum rtx_code code = GET_CODE (op);
8708 rtx cc_reg = XEXP (op, 0);
8709 enum machine_mode mode = GET_MODE (cc_reg);
8710 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8711 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8712 int really_reversed = reversed ^ need_longbranch;
8718 validate_condition_mode (code, mode);
8720 /* Work out which way this really branches. We could use
8721 reverse_condition_maybe_unordered here always but this
8722 makes the resulting assembler clearer. */
8723 if (really_reversed)
8725 /* Reversal of FP compares takes care -- an ordered compare
8726 becomes an unordered compare and vice versa. */
8727 if (mode == CCFPmode)
8728 code = reverse_condition_maybe_unordered (code);
8730 code = reverse_condition (code);
8733 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8735 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8738 /* Opposite of GT. */
8740 else if (code == NE)
8748 /* Not all of these are actually distinct opcodes, but
8749 we distinguish them for clarity of the resulting assembler. */
8751 ccode = "ne"; break;
8753 ccode = "eq"; break;
8755 ccode = "ge"; break;
8756 case GT: case GTU: case UNGT:
8757 ccode = "gt"; break;
8759 ccode = "le"; break;
8760 case LT: case LTU: case UNLT:
8761 ccode = "lt"; break;
8762 case UNORDERED: ccode = "un"; break;
8763 case ORDERED: ccode = "nu"; break;
8764 case UNGE: ccode = "nl"; break;
8765 case UNLE: ccode = "ng"; break;
8770 /* Maybe we have a guess as to how likely the branch is.
8771 The old mnemonics don't have a way to specify this information. */
8773 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8774 if (note != NULL_RTX)
8776 /* PROB is the difference from 50%. */
8777 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8778 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8780 /* Only hint for highly probable/improbable branches on newer
8781 cpus as static prediction overrides processor dynamic
8782 prediction. For older cpus we may as well always hint, but
8783 assume not taken for branches that are very close to 50% as a
8784 mispredicted taken branch is more expensive than a
8785 mispredicted not-taken branch. */
8787 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8789 if (abs (prob) > REG_BR_PROB_BASE / 20
8790 && ((prob > 0) ^ need_longbranch))
8798 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8800 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8802 /* We need to escape any '%' characters in the reg_names string.
8803 Assume they'd only be the first character... */
8804 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8806 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8810 /* If the branch distance was too far, we may have to use an
8811 unconditional branch to go the distance. */
8812 if (need_longbranch)
8813 s += sprintf (s, ",$+8\n\tb %s", label);
8815 s += sprintf (s, ",%s", label);
8821 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8822 operands of the last comparison is nonzero/true, FALSE_COND if it
8823 is zero/false. Return 0 if the hardware has no such operation. */
8826 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8832 enum rtx_code code = GET_CODE (op);
8833 rtx op0 = rs6000_compare_op0;
8834 rtx op1 = rs6000_compare_op1;
8836 enum machine_mode compare_mode = GET_MODE (op0);
8837 enum machine_mode result_mode = GET_MODE (dest);
8840 /* These modes should always match. */
8841 if (GET_MODE (op1) != compare_mode
8842 /* In the isel case however, we can use a compare immediate, so
8843 op1 may be a small constant. */
8844 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8846 if (GET_MODE (true_cond) != result_mode)
8848 if (GET_MODE (false_cond) != result_mode)
8851 /* First, work out if the hardware can do this at all, or
8852 if it's too slow... */
8853 if (! rs6000_compare_fp_p)
8856 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8860 /* Eliminate half of the comparisons by switching operands, this
8861 makes the remaining code simpler. */
8862 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8863 || code == LTGT || code == LT)
8865 code = reverse_condition_maybe_unordered (code);
8867 true_cond = false_cond;
8871 /* UNEQ and LTGT take four instructions for a comparison with zero,
8872 it'll probably be faster to use a branch here too. */
8876 if (GET_CODE (op1) == CONST_DOUBLE)
8877 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8879 /* We're going to try to implement comparisons by performing
8880 a subtract, then comparing against zero. Unfortunately,
8881 Inf - Inf is NaN which is not zero, and so if we don't
8882 know that the operand is finite and the comparison
8883 would treat EQ different to UNORDERED, we can't do it. */
8884 if (! flag_unsafe_math_optimizations
8885 && code != GT && code != UNGE
8886 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8887 /* Constructs of the form (a OP b ? a : b) are safe. */
8888 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8889 || (! rtx_equal_p (op0, true_cond)
8890 && ! rtx_equal_p (op1, true_cond))))
8892 /* At this point we know we can use fsel. */
8894 /* Reduce the comparison to a comparison against zero. */
8895 temp = gen_reg_rtx (compare_mode);
8896 emit_insn (gen_rtx_SET (VOIDmode, temp,
8897 gen_rtx_MINUS (compare_mode, op0, op1)));
8899 op1 = CONST0_RTX (compare_mode);
8901 /* If we don't care about NaNs we can reduce some of the comparisons
8902 down to faster ones. */
8903 if (flag_unsafe_math_optimizations)
8909 true_cond = false_cond;
8922 /* Now, reduce everything down to a GE. */
8929 temp = gen_reg_rtx (compare_mode);
8930 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8935 temp = gen_reg_rtx (compare_mode);
8936 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8941 temp = gen_reg_rtx (compare_mode);
8942 emit_insn (gen_rtx_SET (VOIDmode, temp,
8943 gen_rtx_NEG (compare_mode,
8944 gen_rtx_ABS (compare_mode, op0))));
8949 temp = gen_reg_rtx (result_mode);
8950 emit_insn (gen_rtx_SET (VOIDmode, temp,
8951 gen_rtx_IF_THEN_ELSE (result_mode,
8952 gen_rtx_GE (VOIDmode,
8954 true_cond, false_cond)));
8956 true_cond = false_cond;
8958 temp = gen_reg_rtx (compare_mode);
8959 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8964 temp = gen_reg_rtx (result_mode);
8965 emit_insn (gen_rtx_SET (VOIDmode, temp,
8966 gen_rtx_IF_THEN_ELSE (result_mode,
8967 gen_rtx_GE (VOIDmode,
8969 true_cond, false_cond)));
8971 false_cond = true_cond;
8973 temp = gen_reg_rtx (compare_mode);
8974 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8982 emit_insn (gen_rtx_SET (VOIDmode, dest,
8983 gen_rtx_IF_THEN_ELSE (result_mode,
8984 gen_rtx_GE (VOIDmode,
8986 true_cond, false_cond)));
8990 /* Same as above, but for ints (isel). */
8993 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8999 rtx condition_rtx, cr;
9001 /* All isel implementations thus far are 32-bits. */
9002 if (GET_MODE (rs6000_compare_op0) != SImode)
9005 /* We still have to do the compare, because isel doesn't do a
9006 compare, it just looks at the CRx bits set by a previous compare
9008 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9009 cr = XEXP (condition_rtx, 0);
9011 if (GET_MODE (cr) == CCmode)
9012 emit_insn (gen_isel_signed (dest, condition_rtx,
9013 true_cond, false_cond, cr));
9015 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9016 true_cond, false_cond, cr));
9022 output_isel (operands)
9027 code = GET_CODE (operands[1]);
9028 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9030 PUT_CODE (operands[1], reverse_condition (code));
9031 return "isel %0,%3,%2,%j1";
9034 return "isel %0,%2,%3,%j1";
9038 rs6000_emit_minmax (dest, code, op0, op1)
9044 enum machine_mode mode = GET_MODE (op0);
9048 if (code == SMAX || code == SMIN)
9053 if (code == SMAX || code == UMAX)
9054 target = emit_conditional_move (dest, c, op0, op1, mode,
9057 target = emit_conditional_move (dest, c, op0, op1, mode,
9059 if (target == NULL_RTX)
9062 emit_move_insn (dest, target);
9065 /* This page contains routines that are used to determine what the
9066 function prologue and epilogue code will do and write them out. */
9068 /* Return the first fixed-point register that is required to be
9069 saved. 32 if none. */
9072 first_reg_to_save ()
9076 /* Find lowest numbered live register. */
9077 for (first_reg = 13; first_reg <= 31; first_reg++)
9078 if (regs_ever_live[first_reg]
9079 && (! call_used_regs[first_reg]
9080 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9081 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9082 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9087 && current_function_uses_pic_offset_table
9088 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9089 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9095 /* Similar, for FP regs. */
9098 first_fp_reg_to_save ()
9102 /* Find lowest numbered live register. */
9103 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9104 if (regs_ever_live[first_reg])
9110 /* Similar, for AltiVec regs. */
9113 first_altivec_reg_to_save ()
9117 /* Stack frame remains as is unless we are in AltiVec ABI. */
9118 if (! TARGET_ALTIVEC_ABI)
9119 return LAST_ALTIVEC_REGNO + 1;
9121 /* Find lowest numbered live register. */
9122 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9123 if (regs_ever_live[i])
9129 /* Return a 32-bit mask of the AltiVec registers we need to set in
9130 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9131 the 32-bit word is 0. */
9134 compute_vrsave_mask ()
9136 unsigned int i, mask = 0;
9138 /* First, find out if we use _any_ altivec registers. */
9139 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9140 if (regs_ever_live[i])
9141 mask |= ALTIVEC_REG_BIT (i);
9146 /* Next, remove the argument registers from the set. These must
9147 be in the VRSAVE mask set by the caller, so we don't need to add
9148 them in again. More importantly, the mask we compute here is
9149 used to generate CLOBBERs in the set_vrsave insn, and we do not
9150 wish the argument registers to die. */
9151 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9152 mask &= ~ALTIVEC_REG_BIT (i);
9154 /* Similarly, remove the return value from the set. */
9157 diddle_return_value (is_altivec_return_reg, &yes);
9159 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9166 is_altivec_return_reg (reg, xyes)
9170 bool *yes = (bool *) xyes;
9171 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9176 /* Calculate the stack information for the current function. This is
9177 complicated by having two separate calling sequences, the AIX calling
9178 sequence and the V.4 calling sequence.
9180 AIX (and Darwin/Mac OS X) stack frames look like:
9182 SP----> +---------------------------------------+
9183 | back chain to caller | 0 0
9184 +---------------------------------------+
9185 | saved CR | 4 8 (8-11)
9186 +---------------------------------------+
9188 +---------------------------------------+
9189 | reserved for compilers | 12 24
9190 +---------------------------------------+
9191 | reserved for binders | 16 32
9192 +---------------------------------------+
9193 | saved TOC pointer | 20 40
9194 +---------------------------------------+
9195 | Parameter save area (P) | 24 48
9196 +---------------------------------------+
9197 | Alloca space (A) | 24+P etc.
9198 +---------------------------------------+
9199 | Local variable space (L) | 24+P+A
9200 +---------------------------------------+
9201 | Float/int conversion temporary (X) | 24+P+A+L
9202 +---------------------------------------+
9203 | Save area for AltiVec registers (W) | 24+P+A+L+X
9204 +---------------------------------------+
9205 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9206 +---------------------------------------+
9207 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9208 +---------------------------------------+
9209 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9210 +---------------------------------------+
9211 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9212 +---------------------------------------+
9213 old SP->| back chain to caller's caller |
9214 +---------------------------------------+
9216 The required alignment for AIX configurations is two words (i.e., 8
9220 V.4 stack frames look like:
9222 SP----> +---------------------------------------+
9223 | back chain to caller | 0
9224 +---------------------------------------+
9225 | caller's saved LR | 4
9226 +---------------------------------------+
9227 | Parameter save area (P) | 8
9228 +---------------------------------------+
9229 | Alloca space (A) | 8+P
9230 +---------------------------------------+
9231 | Varargs save area (V) | 8+P+A
9232 +---------------------------------------+
9233 | Local variable space (L) | 8+P+A+V
9234 +---------------------------------------+
9235 | Float/int conversion temporary (X) | 8+P+A+V+L
9236 +---------------------------------------+
9237 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9238 +---------------------------------------+
9239 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9240 +---------------------------------------+
9241 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9242 +---------------------------------------+
9243 | SPE: area for 64-bit GP registers |
9244 +---------------------------------------+
9245 | SPE alignment padding |
9246 +---------------------------------------+
9247 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9248 +---------------------------------------+
9249 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9250 +---------------------------------------+
9251 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9252 +---------------------------------------+
9253 old SP->| back chain to caller's caller |
9254 +---------------------------------------+
9256 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9257 given. (But note below and in sysv4.h that we require only 8 and
9258 may round up the size of our stack frame anyways. The historical
9259 reason is early versions of powerpc-linux which didn't properly
9260 align the stack at program startup. A happy side-effect is that
9261 -mno-eabi libraries can be used with -meabi programs.)
9263 The EABI configuration defaults to the V.4 layout, unless
9264 -mcall-aix is used, in which case the AIX layout is used. However,
9265 the stack alignment requirements may differ. If -mno-eabi is not
9266 given, the required stack alignment is 8 bytes; if -mno-eabi is
9267 given, the required alignment is 16 bytes. (But see V.4 comment
9270 #ifndef ABI_STACK_BOUNDARY
9271 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9275 rs6000_stack_info ()
9277 static rs6000_stack_t info, zero_info;
9278 rs6000_stack_t *info_ptr = &info;
9279 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9283 /* Zero all fields portably. */
9288 /* Cache value so we don't rescan instruction chain over and over. */
9289 if (cfun->machine->insn_chain_scanned_p == 0)
9291 cfun->machine->insn_chain_scanned_p = 1;
9292 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9296 /* Select which calling sequence. */
9297 info_ptr->abi = DEFAULT_ABI;
9299 /* Calculate which registers need to be saved & save area size. */
9300 info_ptr->first_gp_reg_save = first_reg_to_save ();
9301 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9302 even if it currently looks like we won't. */
9303 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9304 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9305 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9306 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9307 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9309 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9311 /* For the SPE, we have an additional upper 32-bits on each GPR.
9312 Ideally we should save the entire 64-bits only when the upper
9313 half is used in SIMD instructions. Since we only record
9314 registers live (not the size they are used in), this proves
9315 difficult because we'd have to traverse the instruction chain at
9316 the right time, taking reload into account. This is a real pain,
9317 so we opt to save the GPRs in 64-bits always if but one register
9318 gets used in 64-bits. Otherwise, all the registers in the frame
9319 get saved in 32-bits.
9321 So... since when we save all GPRs (except the SP) in 64-bits, the
9322 traditional GP save area will be empty. */
9323 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9324 info_ptr->gp_size = 0;
9326 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9327 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9329 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9330 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9331 - info_ptr->first_altivec_reg_save);
9333 /* Does this function call anything? */
9334 info_ptr->calls_p = (! current_function_is_leaf
9335 || cfun->machine->ra_needs_full_frame);
9337 /* Determine if we need to save the link register. */
9338 if (rs6000_ra_ever_killed ()
9339 || (DEFAULT_ABI == ABI_AIX
9340 && current_function_profile
9341 && !TARGET_PROFILE_KERNEL)
9342 #ifdef TARGET_RELOCATABLE
9343 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9345 || (info_ptr->first_fp_reg_save != 64
9346 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9347 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9348 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9349 || (DEFAULT_ABI == ABI_DARWIN
9351 && current_function_uses_pic_offset_table)
9352 || info_ptr->calls_p)
9354 info_ptr->lr_save_p = 1;
9355 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9358 /* Determine if we need to save the condition code registers. */
9359 if (regs_ever_live[CR2_REGNO]
9360 || regs_ever_live[CR3_REGNO]
9361 || regs_ever_live[CR4_REGNO])
9363 info_ptr->cr_save_p = 1;
9364 if (DEFAULT_ABI == ABI_V4)
9365 info_ptr->cr_size = reg_size;
9368 /* If the current function calls __builtin_eh_return, then we need
9369 to allocate stack space for registers that will hold data for
9370 the exception handler. */
9371 if (current_function_calls_eh_return)
9374 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9377 /* SPE saves EH registers in 64-bits. */
9378 ehrd_size = i * (TARGET_SPE_ABI
9379 && info_ptr->spe_64bit_regs_used != 0
9380 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9385 /* Determine various sizes. */
9386 info_ptr->reg_size = reg_size;
9387 info_ptr->fixed_size = RS6000_SAVE_AREA;
9388 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9389 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9390 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9393 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9394 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9396 info_ptr->spe_gp_size = 0;
9398 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9400 info_ptr->vrsave_mask = compute_vrsave_mask ();
9401 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9405 info_ptr->vrsave_mask = 0;
9406 info_ptr->vrsave_size = 0;
9409 /* Calculate the offsets. */
9410 switch (DEFAULT_ABI)
9417 case ABI_AIX_NODESC:
9419 info_ptr->fp_save_offset = - info_ptr->fp_size;
9420 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9422 if (TARGET_ALTIVEC_ABI)
9424 info_ptr->vrsave_save_offset
9425 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9427 /* Align stack so vector save area is on a quadword boundary. */
9428 if (info_ptr->altivec_size != 0)
9429 info_ptr->altivec_padding_size
9430 = 16 - (-info_ptr->vrsave_save_offset % 16);
9432 info_ptr->altivec_padding_size = 0;
9434 info_ptr->altivec_save_offset
9435 = info_ptr->vrsave_save_offset
9436 - info_ptr->altivec_padding_size
9437 - info_ptr->altivec_size;
9439 /* Adjust for AltiVec case. */
9440 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9443 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9444 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9445 info_ptr->lr_save_offset = 2*reg_size;
9449 info_ptr->fp_save_offset = - info_ptr->fp_size;
9450 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9451 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9453 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9455 /* Align stack so SPE GPR save area is aligned on a
9456 double-word boundary. */
9457 if (info_ptr->spe_gp_size != 0)
9458 info_ptr->spe_padding_size
9459 = 8 - (-info_ptr->cr_save_offset % 8);
9461 info_ptr->spe_padding_size = 0;
9463 info_ptr->spe_gp_save_offset
9464 = info_ptr->cr_save_offset
9465 - info_ptr->spe_padding_size
9466 - info_ptr->spe_gp_size;
9468 /* Adjust for SPE case. */
9469 info_ptr->toc_save_offset
9470 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9472 else if (TARGET_ALTIVEC_ABI)
9474 info_ptr->vrsave_save_offset
9475 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9477 /* Align stack so vector save area is on a quadword boundary. */
9478 if (info_ptr->altivec_size != 0)
9479 info_ptr->altivec_padding_size
9480 = 16 - (-info_ptr->vrsave_save_offset % 16);
9482 info_ptr->altivec_padding_size = 0;
9484 info_ptr->altivec_save_offset
9485 = info_ptr->vrsave_save_offset
9486 - info_ptr->altivec_padding_size
9487 - info_ptr->altivec_size;
9489 /* Adjust for AltiVec case. */
9490 info_ptr->toc_save_offset
9491 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9494 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9495 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9496 info_ptr->lr_save_offset = reg_size;
9500 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9502 + info_ptr->altivec_size
9503 + info_ptr->altivec_padding_size
9504 + info_ptr->vrsave_size
9505 + info_ptr->spe_gp_size
9506 + info_ptr->spe_padding_size
9510 + info_ptr->vrsave_size
9511 + info_ptr->toc_size,
9512 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9515 total_raw_size = (info_ptr->vars_size
9516 + info_ptr->parm_size
9517 + info_ptr->save_size
9518 + info_ptr->varargs_size
9519 + info_ptr->fixed_size);
9521 info_ptr->total_size =
9522 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9524 /* Determine if we need to allocate any stack frame:
9526 For AIX we need to push the stack if a frame pointer is needed
9527 (because the stack might be dynamically adjusted), if we are
9528 debugging, if we make calls, or if the sum of fp_save, gp_save,
9529 and local variables are more than the space needed to save all
9530 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9531 + 18*8 = 288 (GPR13 reserved).
9533 For V.4 we don't have the stack cushion that AIX uses, but assume
9534 that the debugger can handle stackless frames. */
9536 if (info_ptr->calls_p)
9537 info_ptr->push_p = 1;
9539 else if (DEFAULT_ABI == ABI_V4)
9540 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9542 else if (frame_pointer_needed)
9543 info_ptr->push_p = 1;
9545 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
9546 info_ptr->push_p = 1;
9550 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
9552 /* Zero offsets if we're not saving those registers. */
9553 if (info_ptr->fp_size == 0)
9554 info_ptr->fp_save_offset = 0;
9556 if (info_ptr->gp_size == 0)
9557 info_ptr->gp_save_offset = 0;
9559 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9560 info_ptr->altivec_save_offset = 0;
9562 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9563 info_ptr->vrsave_save_offset = 0;
9565 if (! TARGET_SPE_ABI
9566 || info_ptr->spe_64bit_regs_used == 0
9567 || info_ptr->spe_gp_size == 0)
9568 info_ptr->spe_gp_save_offset = 0;
9570 if (! info_ptr->lr_save_p)
9571 info_ptr->lr_save_offset = 0;
9573 if (! info_ptr->cr_save_p)
9574 info_ptr->cr_save_offset = 0;
9576 if (! info_ptr->toc_save_p)
9577 info_ptr->toc_save_offset = 0;
9582 /* Return true if the current function uses any GPRs in 64-bit SIMD
9586 spe_func_has_64bit_regs_p ()
9590 /* Functions that save and restore all the call-saved registers will
9591 need to save/restore the registers in 64-bits. */
9592 if (current_function_calls_eh_return
9593 || current_function_calls_setjmp
9594 || current_function_has_nonlocal_goto)
9597 insns = get_insns ();
9599 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
9606 if (GET_CODE (i) == SET
9607 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
9616 debug_stack_info (info)
9617 rs6000_stack_t *info;
9619 const char *abi_string;
9622 info = rs6000_stack_info ();
9624 fprintf (stderr, "\nStack information for function %s:\n",
9625 ((current_function_decl && DECL_NAME (current_function_decl))
9626 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9631 default: abi_string = "Unknown"; break;
9632 case ABI_NONE: abi_string = "NONE"; break;
9634 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9635 case ABI_DARWIN: abi_string = "Darwin"; break;
9636 case ABI_V4: abi_string = "V.4"; break;
9639 fprintf (stderr, "\tABI = %5s\n", abi_string);
9641 if (TARGET_ALTIVEC_ABI)
9642 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9645 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9647 if (info->first_gp_reg_save != 32)
9648 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9650 if (info->first_fp_reg_save != 64)
9651 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9653 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9654 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9655 info->first_altivec_reg_save);
9657 if (info->lr_save_p)
9658 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9660 if (info->cr_save_p)
9661 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9663 if (info->toc_save_p)
9664 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9666 if (info->vrsave_mask)
9667 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9670 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9673 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9675 if (info->gp_save_offset)
9676 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9678 if (info->fp_save_offset)
9679 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9681 if (info->altivec_save_offset)
9682 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9683 info->altivec_save_offset);
9685 if (info->spe_gp_save_offset)
9686 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9687 info->spe_gp_save_offset);
9689 if (info->vrsave_save_offset)
9690 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9691 info->vrsave_save_offset);
9693 if (info->lr_save_offset)
9694 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9696 if (info->cr_save_offset)
9697 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9699 if (info->toc_save_offset)
9700 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9702 if (info->varargs_save_offset)
9703 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9705 if (info->total_size)
9706 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9708 if (info->varargs_size)
9709 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9711 if (info->vars_size)
9712 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9714 if (info->parm_size)
9715 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9717 if (info->fixed_size)
9718 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9721 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9723 if (info->spe_gp_size)
9724 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9727 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9729 if (info->altivec_size)
9730 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9732 if (info->vrsave_size)
9733 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9735 if (info->altivec_padding_size)
9736 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9737 info->altivec_padding_size);
9739 if (info->spe_padding_size)
9740 fprintf (stderr, "\tspe_padding_size = %5d\n",
9741 info->spe_padding_size);
9744 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9747 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9750 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9752 if (info->save_size)
9753 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9755 if (info->reg_size != 4)
9756 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9758 fprintf (stderr, "\n");
9762 rs6000_return_addr (count, frame)
9766 /* Currently we don't optimize very well between prolog and body
9767 code and for PIC code the code can be actually quite bad, so
9768 don't try to be too clever here. */
9769 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
9771 cfun->machine->ra_needs_full_frame = 1;
9778 plus_constant (copy_to_reg
9779 (gen_rtx_MEM (Pmode,
9780 memory_address (Pmode, frame))),
9781 RETURN_ADDRESS_OFFSET)));
9784 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9787 /* Say whether a function is a candidate for sibcall handling or not.
9788 We do not allow indirect calls to be optimized into sibling calls.
9789 Also, we can't do it if there are any vector parameters; there's
9790 nowhere to put the VRsave code so it works; note that functions with
9791 vector parameters are required to have a prototype, so the argument
9792 type info must be available here. (The tail recursion case can work
9793 with vector parameters, but there's no way to distinguish here.) */
9795 rs6000_function_ok_for_sibcall (decl, exp)
9797 tree exp ATTRIBUTE_UNUSED;
9802 if (TARGET_ALTIVEC_VRSAVE)
9804 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
9805 type; type = TREE_CHAIN (type))
9807 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9811 if (DEFAULT_ABI == ABI_DARWIN
9812 || (*targetm.binds_local_p) (decl))
9814 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
9816 if (!lookup_attribute ("longcall", attr_list)
9817 || lookup_attribute ("shortcall", attr_list))
9825 rs6000_ra_ever_killed ()
9831 /* Irritatingly, there are two kinds of thunks -- those created with
9832 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9833 through the regular part of the compiler. This is a very hacky
9834 way to tell them apart. */
9835 if (current_function_is_thunk && !no_new_pseudos)
9838 /* regs_ever_live has LR marked as used if any sibcalls are present,
9839 but this should not force saving and restoring in the
9840 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9841 clobbers LR, so that is inappropriate. */
9843 /* Also, the prologue can generate a store into LR that
9844 doesn't really count, like this:
9847 bcl to set PIC register
9851 When we're called from the epilogue, we need to avoid counting
9854 push_topmost_sequence ();
9856 pop_topmost_sequence ();
9857 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9859 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9863 if (FIND_REG_INC_NOTE (insn, reg))
9865 else if (GET_CODE (insn) == CALL_INSN
9866 && !SIBLING_CALL_P (insn))
9868 else if (set_of (reg, insn) != NULL_RTX
9869 && !prologue_epilogue_contains (insn))
9876 /* Add a REG_MAYBE_DEAD note to the insn. */
9878 rs6000_maybe_dead (insn)
9881 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9886 /* Emit instructions needed to load the TOC register.
9887 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9888 a constant pool; or for SVR4 -fpic. */
9891 rs6000_emit_load_toc_table (fromprolog)
9895 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9897 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9899 rtx temp = (fromprolog
9900 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9901 : gen_reg_rtx (Pmode));
9902 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
9904 rs6000_maybe_dead (insn);
9905 insn = emit_move_insn (dest, temp);
9907 rs6000_maybe_dead (insn);
9909 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9912 rtx tempLR = (fromprolog
9913 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9914 : gen_reg_rtx (Pmode));
9915 rtx temp0 = (fromprolog
9916 ? gen_rtx_REG (Pmode, 0)
9917 : gen_reg_rtx (Pmode));
9920 /* possibly create the toc section */
9921 if (! toc_initialized)
9924 function_section (current_function_decl);
9931 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9932 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9934 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9935 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9937 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9939 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9940 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9947 static int reload_toc_labelno = 0;
9949 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9951 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9952 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9954 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
9955 emit_move_insn (dest, tempLR);
9956 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
9958 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
9960 rs6000_maybe_dead (insn);
9962 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9964 /* This is for AIX code running in non-PIC ELF32. */
9967 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9968 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9970 insn = emit_insn (gen_elf_high (dest, realsym));
9972 rs6000_maybe_dead (insn);
9973 insn = emit_insn (gen_elf_low (dest, dest, realsym));
9975 rs6000_maybe_dead (insn);
9977 else if (DEFAULT_ABI == ABI_AIX)
9980 insn = emit_insn (gen_load_toc_aix_si (dest));
9982 insn = emit_insn (gen_load_toc_aix_di (dest));
9984 rs6000_maybe_dead (insn);
9991 get_TOC_alias_set ()
9993 static int set = -1;
9995 set = new_alias_set ();
9999 /* This retuns nonzero if the current function uses the TOC. This is
10000 determined by the presence of (unspec ... 7), which is generated by
10001 the various load_toc_* patterns. */
10008 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10011 rtx pat = PATTERN (insn);
10014 if (GET_CODE (pat) == PARALLEL)
10015 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10016 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
10017 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
10024 create_TOC_reference (symbol)
10027 return gen_rtx_PLUS (Pmode,
10028 gen_rtx_REG (Pmode, TOC_REGISTER),
10029 gen_rtx_CONST (Pmode,
10030 gen_rtx_MINUS (Pmode, symbol,
10031 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10035 /* __throw will restore its own return address to be the same as the
10036 return address of the function that the throw is being made to.
10037 This is unfortunate, because we want to check the original
10038 return address to see if we need to restore the TOC.
10039 So we have to squirrel it away here.
10040 This is used only in compiling __throw and __rethrow.
10042 Most of this code should be removed by CSE. */
10043 static rtx insn_after_throw;
10045 /* This does the saving... */
10047 rs6000_aix_emit_builtin_unwind_init ()
10050 rtx stack_top = gen_reg_rtx (Pmode);
10051 rtx opcode_addr = gen_reg_rtx (Pmode);
10053 insn_after_throw = gen_reg_rtx (SImode);
10055 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10056 emit_move_insn (stack_top, mem);
10058 mem = gen_rtx_MEM (Pmode,
10059 gen_rtx_PLUS (Pmode, stack_top,
10060 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10061 emit_move_insn (opcode_addr, mem);
10062 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10065 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10066 in _eh.o). Only used on AIX.
10068 The idea is that on AIX, function calls look like this:
10069 bl somefunction-trampoline
10073 somefunction-trampoline:
10075 ... load function address in the count register ...
10077 or like this, if the linker determines that this is not a cross-module call
10078 and so the TOC need not be restored:
10081 or like this, if the compiler could determine that this is not a
10084 now, the tricky bit here is that register 2 is saved and restored
10085 by the _linker_, so we can't readily generate debugging information
10086 for it. So we need to go back up the call chain looking at the
10087 insns at return addresses to see which calls saved the TOC register
10088 and so see where it gets restored from.
10090 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10091 just before the actual epilogue.
10093 On the bright side, this incurs no space or time overhead unless an
10094 exception is thrown, except for the extra code in libgcc.a.
10096 The parameter STACKSIZE is a register containing (at runtime)
10097 the amount to be popped off the stack in addition to the stack frame
10098 of this routine (which will be __throw or __rethrow, and so is
10099 guaranteed to have a stack frame). */
10102 rs6000_emit_eh_toc_restore (stacksize)
10106 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10107 rtx tocompare = gen_reg_rtx (SImode);
10108 rtx opcode = gen_reg_rtx (SImode);
10109 rtx opcode_addr = gen_reg_rtx (Pmode);
10111 rtx loop_start = gen_label_rtx ();
10112 rtx no_toc_restore_needed = gen_label_rtx ();
10113 rtx loop_exit = gen_label_rtx ();
10115 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10116 set_mem_alias_set (mem, rs6000_sr_alias_set);
10117 emit_move_insn (bottom_of_stack, mem);
10119 top_of_stack = expand_binop (Pmode, add_optab,
10120 bottom_of_stack, stacksize,
10121 NULL_RTX, 1, OPTAB_WIDEN);
10123 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10124 : 0xE8410028, SImode));
10126 if (insn_after_throw == NULL_RTX)
10128 emit_move_insn (opcode, insn_after_throw);
10130 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10131 emit_label (loop_start);
10133 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10134 SImode, NULL_RTX, NULL_RTX,
10135 no_toc_restore_needed);
10137 mem = gen_rtx_MEM (Pmode,
10138 gen_rtx_PLUS (Pmode, bottom_of_stack,
10139 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10140 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10142 emit_label (no_toc_restore_needed);
10143 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10144 Pmode, NULL_RTX, NULL_RTX,
10147 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10148 set_mem_alias_set (mem, rs6000_sr_alias_set);
10149 emit_move_insn (bottom_of_stack, mem);
10151 mem = gen_rtx_MEM (Pmode,
10152 gen_rtx_PLUS (Pmode, bottom_of_stack,
10153 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10154 emit_move_insn (opcode_addr, mem);
10155 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10157 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10158 emit_jump (loop_start);
10159 emit_note (NULL, NOTE_INSN_LOOP_END);
10160 emit_label (loop_exit);
10162 #endif /* TARGET_AIX */
10164 /* This ties together stack memory (MEM with an alias set of
10165 rs6000_sr_alias_set) and the change to the stack pointer. */
10168 rs6000_emit_stack_tie ()
10170 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10172 set_mem_alias_set (mem, rs6000_sr_alias_set);
10173 emit_insn (gen_stack_tie (mem));
10176 /* Emit the correct code for allocating stack space, as insns.
10177 If COPY_R12, make sure a copy of the old frame is left in r12.
10178 The generated code may use hard register 0 as a temporary. */
10181 rs6000_emit_allocate_stack (size, copy_r12)
10182 HOST_WIDE_INT size;
10186 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10187 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10188 rtx todec = GEN_INT (-size);
10190 if (current_function_limit_stack)
10192 if (REG_P (stack_limit_rtx)
10193 && REGNO (stack_limit_rtx) > 1
10194 && REGNO (stack_limit_rtx) <= 31)
10196 emit_insn (TARGET_32BIT
10197 ? gen_addsi3 (tmp_reg,
10200 : gen_adddi3 (tmp_reg,
10204 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10207 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10209 && DEFAULT_ABI == ABI_V4)
10211 rtx toload = gen_rtx_CONST (VOIDmode,
10212 gen_rtx_PLUS (Pmode,
10216 emit_insn (gen_elf_high (tmp_reg, toload));
10217 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10218 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10222 warning ("stack limit expression is not supported");
10225 if (copy_r12 || ! TARGET_UPDATE)
10226 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10232 /* Need a note here so that try_split doesn't get confused. */
10233 if (get_last_insn() == NULL_RTX)
10234 emit_note (0, NOTE_INSN_DELETED);
10235 insn = emit_move_insn (tmp_reg, todec);
10236 try_split (PATTERN (insn), insn, 0);
10240 insn = emit_insn (TARGET_32BIT
10241 ? gen_movsi_update (stack_reg, stack_reg,
10243 : gen_movdi_update (stack_reg, stack_reg,
10244 todec, stack_reg));
10248 insn = emit_insn (TARGET_32BIT
10249 ? gen_addsi3 (stack_reg, stack_reg, todec)
10250 : gen_adddi3 (stack_reg, stack_reg, todec));
10251 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10252 gen_rtx_REG (Pmode, 12));
10255 RTX_FRAME_RELATED_P (insn) = 1;
10257 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10258 gen_rtx_SET (VOIDmode, stack_reg,
10259 gen_rtx_PLUS (Pmode, stack_reg,
10264 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10265 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10266 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10267 deduce these equivalences by itself so it wasn't necessary to hold
10268 its hand so much. */
10271 rs6000_frame_related (insn, reg, val, reg2, rreg)
10280 /* copy_rtx will not make unique copies of registers, so we need to
10281 ensure we don't have unwanted sharing here. */
10283 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10286 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10288 real = copy_rtx (PATTERN (insn));
10290 if (reg2 != NULL_RTX)
10291 real = replace_rtx (real, reg2, rreg);
10293 real = replace_rtx (real, reg,
10294 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10295 STACK_POINTER_REGNUM),
10298 /* We expect that 'real' is either a SET or a PARALLEL containing
10299 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10300 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10302 if (GET_CODE (real) == SET)
10306 temp = simplify_rtx (SET_SRC (set));
10308 SET_SRC (set) = temp;
10309 temp = simplify_rtx (SET_DEST (set));
10311 SET_DEST (set) = temp;
10312 if (GET_CODE (SET_DEST (set)) == MEM)
10314 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10316 XEXP (SET_DEST (set), 0) = temp;
10319 else if (GET_CODE (real) == PARALLEL)
10322 for (i = 0; i < XVECLEN (real, 0); i++)
10323 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10325 rtx set = XVECEXP (real, 0, i);
10327 temp = simplify_rtx (SET_SRC (set));
10329 SET_SRC (set) = temp;
10330 temp = simplify_rtx (SET_DEST (set));
10332 SET_DEST (set) = temp;
10333 if (GET_CODE (SET_DEST (set)) == MEM)
10335 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10337 XEXP (SET_DEST (set), 0) = temp;
10339 RTX_FRAME_RELATED_P (set) = 1;
10346 real = spe_synthesize_frame_save (real);
10348 RTX_FRAME_RELATED_P (insn) = 1;
10349 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10354 /* Given an SPE frame note, return a PARALLEL of SETs with the
10355 original note, plus a synthetic register save. */
10358 spe_synthesize_frame_save (real)
10361 rtx synth, offset, reg, real2;
10363 if (GET_CODE (real) != SET
10364 || GET_MODE (SET_SRC (real)) != V2SImode)
10367 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10368 frame related note. The parallel contains a set of the register
10369 being saved, and another set to a synthetic register (n+1200).
10370 This is so we can differentiate between 64-bit and 32-bit saves.
10371 Words cannot describe this nastiness. */
10373 if (GET_CODE (SET_DEST (real)) != MEM
10374 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10375 || GET_CODE (SET_SRC (real)) != REG)
10379 (set (mem (plus (reg x) (const y)))
10382 (set (mem (plus (reg x) (const y+4)))
10386 real2 = copy_rtx (real);
10387 PUT_MODE (SET_DEST (real2), SImode);
10388 reg = SET_SRC (real2);
10389 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10390 synth = copy_rtx (real2);
10392 if (BYTES_BIG_ENDIAN)
10394 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10395 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10398 reg = SET_SRC (synth);
10400 synth = replace_rtx (synth, reg,
10401 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10403 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10404 synth = replace_rtx (synth, offset,
10405 GEN_INT (INTVAL (offset)
10406 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10408 RTX_FRAME_RELATED_P (synth) = 1;
10409 RTX_FRAME_RELATED_P (real2) = 1;
10410 if (BYTES_BIG_ENDIAN)
10411 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10413 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10418 /* Returns an insn that has a vrsave set operation with the
10419 appropriate CLOBBERs. */
10422 generate_set_vrsave (reg, info, epiloguep)
10424 rs6000_stack_t *info;
10428 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10429 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10432 = gen_rtx_SET (VOIDmode,
10434 gen_rtx_UNSPEC_VOLATILE (SImode,
10435 gen_rtvec (2, reg, vrsave),
10440 /* We need to clobber the registers in the mask so the scheduler
10441 does not move sets to VRSAVE before sets of AltiVec registers.
10443 However, if the function receives nonlocal gotos, reload will set
10444 all call saved registers live. We will end up with:
10446 (set (reg 999) (mem))
10447 (parallel [ (set (reg vrsave) (unspec blah))
10448 (clobber (reg 999))])
10450 The clobber will cause the store into reg 999 to be dead, and
10451 flow will attempt to delete an epilogue insn. In this case, we
10452 need an unspec use/set of the register. */
10454 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10455 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10457 if (!epiloguep || call_used_regs [i])
10458 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10459 gen_rtx_REG (V4SImode, i));
10462 rtx reg = gen_rtx_REG (V4SImode, i);
10465 = gen_rtx_SET (VOIDmode,
10467 gen_rtx_UNSPEC (V4SImode,
10468 gen_rtvec (1, reg), 27));
10472 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10474 for (i = 0; i < nclobs; ++i)
10475 XVECEXP (insn, 0, i) = clobs[i];
10480 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10481 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10484 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10487 enum machine_mode mode;
10488 unsigned int regno;
10492 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10493 rtx replacea, replaceb;
10495 int_rtx = GEN_INT (offset);
10497 /* Some cases that need register indexed addressing. */
10498 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10500 && SPE_VECTOR_MODE (mode)
10501 && !SPE_CONST_OFFSET_OK (offset)))
10503 /* Whomever calls us must make sure r11 is available in the
10504 flow path of instructions in the prologue. */
10505 offset_rtx = gen_rtx_REG (Pmode, 11);
10506 emit_move_insn (offset_rtx, int_rtx);
10508 replacea = offset_rtx;
10509 replaceb = int_rtx;
10513 offset_rtx = int_rtx;
10514 replacea = NULL_RTX;
10515 replaceb = NULL_RTX;
10518 reg = gen_rtx_REG (mode, regno);
10519 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10520 mem = gen_rtx_MEM (mode, addr);
10521 set_mem_alias_set (mem, rs6000_sr_alias_set);
10523 insn = emit_move_insn (mem, reg);
10525 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10528 /* Emit an offset memory reference suitable for a frame store, while
10529 converting to a valid addressing mode. */
10532 gen_frame_mem_offset (mode, reg, offset)
10533 enum machine_mode mode;
10537 rtx int_rtx, offset_rtx;
10539 int_rtx = GEN_INT (offset);
10541 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10543 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10544 emit_move_insn (offset_rtx, int_rtx);
10547 offset_rtx = int_rtx;
10549 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10552 /* Emit function prologue as insns. */
10555 rs6000_emit_prologue ()
10557 rs6000_stack_t *info = rs6000_stack_info ();
10558 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10559 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10560 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10561 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10562 rtx frame_reg_rtx = sp_reg_rtx;
10563 rtx cr_save_rtx = NULL;
10565 int saving_FPRs_inline;
10566 int using_store_multiple;
10567 HOST_WIDE_INT sp_offset = 0;
10569 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10571 reg_mode = V2SImode;
10575 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10576 && (!TARGET_SPE_ABI
10577 || info->spe_64bit_regs_used == 0)
10578 && info->first_gp_reg_save < 31);
10579 saving_FPRs_inline = (info->first_fp_reg_save == 64
10580 || FP_SAVE_INLINE (info->first_fp_reg_save));
10582 /* For V.4, update stack before we do any saving and set back pointer. */
10583 if (info->push_p && DEFAULT_ABI == ABI_V4)
10585 if (info->total_size < 32767)
10586 sp_offset = info->total_size;
10588 frame_reg_rtx = frame_ptr_rtx;
10589 rs6000_emit_allocate_stack (info->total_size,
10590 (frame_reg_rtx != sp_reg_rtx
10591 && (info->cr_save_p
10593 || info->first_fp_reg_save < 64
10594 || info->first_gp_reg_save < 32
10596 if (frame_reg_rtx != sp_reg_rtx)
10597 rs6000_emit_stack_tie ();
10600 /* Save AltiVec registers if needed. */
10601 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10605 /* There should be a non inline version of this, for when we
10606 are saving lots of vector registers. */
10607 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10608 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10610 rtx areg, savereg, mem;
10613 offset = info->altivec_save_offset + sp_offset
10614 + 16 * (i - info->first_altivec_reg_save);
10616 savereg = gen_rtx_REG (V4SImode, i);
10618 areg = gen_rtx_REG (Pmode, 0);
10619 emit_move_insn (areg, GEN_INT (offset));
10621 /* AltiVec addressing mode is [reg+reg]. */
10622 mem = gen_rtx_MEM (V4SImode,
10623 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10625 set_mem_alias_set (mem, rs6000_sr_alias_set);
10627 insn = emit_move_insn (mem, savereg);
10629 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10630 areg, GEN_INT (offset));
10634 /* VRSAVE is a bit vector representing which AltiVec registers
10635 are used. The OS uses this to determine which vector
10636 registers to save on a context switch. We need to save
10637 VRSAVE on the stack frame, add whatever AltiVec registers we
10638 used in this function, and do the corresponding magic in the
10641 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10643 rtx reg, mem, vrsave;
10646 /* Get VRSAVE onto a GPR. */
10647 reg = gen_rtx_REG (SImode, 12);
10648 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10650 emit_insn (gen_get_vrsave_internal (reg));
10652 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10655 offset = info->vrsave_save_offset + sp_offset;
10657 = gen_rtx_MEM (SImode,
10658 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10659 set_mem_alias_set (mem, rs6000_sr_alias_set);
10660 insn = emit_move_insn (mem, reg);
10662 /* Include the registers in the mask. */
10663 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10665 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10668 /* If we use the link register, get it into r0. */
10669 if (info->lr_save_p)
10670 emit_move_insn (gen_rtx_REG (Pmode, 0),
10671 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10673 /* If we need to save CR, put it into r12. */
10674 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10676 cr_save_rtx = gen_rtx_REG (SImode, 12);
10677 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10680 /* Do any required saving of fpr's. If only one or two to save, do
10681 it ourselves. Otherwise, call function. */
10682 if (saving_FPRs_inline)
10685 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10686 if ((regs_ever_live[info->first_fp_reg_save+i]
10687 && ! call_used_regs[info->first_fp_reg_save+i]))
10688 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10689 info->first_fp_reg_save + i,
10690 info->fp_save_offset + sp_offset + 8 * i,
10693 else if (info->first_fp_reg_save != 64)
10697 const char *alloc_rname;
10699 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10701 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10702 gen_rtx_REG (Pmode,
10703 LINK_REGISTER_REGNUM));
10704 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10705 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10706 alloc_rname = ggc_strdup (rname);
10707 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10708 gen_rtx_SYMBOL_REF (Pmode,
10710 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10712 rtx addr, reg, mem;
10713 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10714 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10715 GEN_INT (info->fp_save_offset
10716 + sp_offset + 8*i));
10717 mem = gen_rtx_MEM (DFmode, addr);
10718 set_mem_alias_set (mem, rs6000_sr_alias_set);
10720 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10722 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10723 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10724 NULL_RTX, NULL_RTX);
10727 /* Save GPRs. This is done as a PARALLEL if we are using
10728 the store-multiple instructions. */
10729 if (using_store_multiple)
10733 p = rtvec_alloc (32 - info->first_gp_reg_save);
10734 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10736 rtx addr, reg, mem;
10737 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10738 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10739 GEN_INT (info->gp_save_offset
10742 mem = gen_rtx_MEM (reg_mode, addr);
10743 set_mem_alias_set (mem, rs6000_sr_alias_set);
10745 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10747 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10748 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10749 NULL_RTX, NULL_RTX);
10754 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10755 if ((regs_ever_live[info->first_gp_reg_save+i]
10756 && ! call_used_regs[info->first_gp_reg_save+i])
10757 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10758 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10759 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10761 rtx addr, reg, mem;
10762 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10764 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10766 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10769 if (!SPE_CONST_OFFSET_OK (offset))
10771 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10772 emit_move_insn (b, GEN_INT (offset));
10775 b = GEN_INT (offset);
10777 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10778 mem = gen_rtx_MEM (V2SImode, addr);
10779 set_mem_alias_set (mem, rs6000_sr_alias_set);
10780 insn = emit_move_insn (mem, reg);
10782 if (GET_CODE (b) == CONST_INT)
10783 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10784 NULL_RTX, NULL_RTX);
10786 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10787 b, GEN_INT (offset));
10791 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10792 GEN_INT (info->gp_save_offset
10795 mem = gen_rtx_MEM (reg_mode, addr);
10796 set_mem_alias_set (mem, rs6000_sr_alias_set);
10798 insn = emit_move_insn (mem, reg);
10799 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10800 NULL_RTX, NULL_RTX);
10805 /* ??? There's no need to emit actual instructions here, but it's the
10806 easiest way to get the frame unwind information emitted. */
10807 if (current_function_calls_eh_return)
10809 unsigned int i, regno;
10813 regno = EH_RETURN_DATA_REGNO (i);
10814 if (regno == INVALID_REGNUM)
10817 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10818 info->ehrd_offset + sp_offset
10819 + reg_size * (int) i,
10824 /* Save lr if we used it. */
10825 if (info->lr_save_p)
10827 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10828 GEN_INT (info->lr_save_offset + sp_offset));
10829 rtx reg = gen_rtx_REG (Pmode, 0);
10830 rtx mem = gen_rtx_MEM (Pmode, addr);
10831 /* This should not be of rs6000_sr_alias_set, because of
10832 __builtin_return_address. */
10834 insn = emit_move_insn (mem, reg);
10835 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10836 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10839 /* Save CR if we use any that must be preserved. */
10840 if (info->cr_save_p)
10842 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10843 GEN_INT (info->cr_save_offset + sp_offset));
10844 rtx mem = gen_rtx_MEM (SImode, addr);
10846 set_mem_alias_set (mem, rs6000_sr_alias_set);
10848 /* If r12 was used to hold the original sp, copy cr into r0 now
10850 if (REGNO (frame_reg_rtx) == 12)
10852 cr_save_rtx = gen_rtx_REG (SImode, 0);
10853 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10855 insn = emit_move_insn (mem, cr_save_rtx);
10857 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10858 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10859 OK. All we have to do is specify that _one_ condition code
10860 register is saved in this stack slot. The thrower's epilogue
10861 will then restore all the call-saved registers.
10862 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10863 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10864 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10867 /* Update stack and set back pointer unless this is V.4,
10868 for which it was done previously. */
10869 if (info->push_p && DEFAULT_ABI != ABI_V4)
10870 rs6000_emit_allocate_stack (info->total_size, FALSE);
10872 /* Set frame pointer, if needed. */
10873 if (frame_pointer_needed)
10875 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10877 RTX_FRAME_RELATED_P (insn) = 1;
10880 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10881 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10882 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10883 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10885 /* If emit_load_toc_table will use the link register, we need to save
10886 it. We use R11 for this purpose because emit_load_toc_table
10887 can use register 0. This allows us to use a plain 'blr' to return
10888 from the procedure more often. */
10889 int save_LR_around_toc_setup = (TARGET_ELF
10890 && DEFAULT_ABI != ABI_AIX
10892 && ! info->lr_save_p
10893 && EXIT_BLOCK_PTR->pred != NULL);
10894 if (save_LR_around_toc_setup)
10895 emit_move_insn (gen_rtx_REG (Pmode, 11),
10896 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10898 rs6000_emit_load_toc_table (TRUE);
10900 if (save_LR_around_toc_setup)
10901 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10902 gen_rtx_REG (Pmode, 11));
10906 if (DEFAULT_ABI == ABI_DARWIN
10907 && flag_pic && current_function_uses_pic_offset_table)
10909 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10910 const char *picbase = machopic_function_base_name ();
10911 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
10913 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10915 rs6000_maybe_dead (
10916 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10917 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10922 /* Write function prologue. */
10925 rs6000_output_function_prologue (file, size)
10927 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10929 rs6000_stack_t *info = rs6000_stack_info ();
10931 if (TARGET_DEBUG_STACK)
10932 debug_stack_info (info);
10934 /* Write .extern for any function we will call to save and restore
10936 if (info->first_fp_reg_save < 64
10937 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10938 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10939 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10940 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10941 RESTORE_FP_SUFFIX);
10943 /* Write .extern for AIX common mode routines, if needed. */
10944 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10946 fputs ("\t.extern __mulh\n", file);
10947 fputs ("\t.extern __mull\n", file);
10948 fputs ("\t.extern __divss\n", file);
10949 fputs ("\t.extern __divus\n", file);
10950 fputs ("\t.extern __quoss\n", file);
10951 fputs ("\t.extern __quous\n", file);
10952 common_mode_defined = 1;
10955 if (! HAVE_prologue)
10959 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10960 the "toplevel" insn chain. */
10961 emit_note (0, NOTE_INSN_DELETED);
10962 rs6000_emit_prologue ();
10963 emit_note (0, NOTE_INSN_DELETED);
10965 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10969 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10971 INSN_ADDRESSES_NEW (insn, addr);
10976 if (TARGET_DEBUG_STACK)
10977 debug_rtx_list (get_insns (), 100);
10978 final (get_insns (), file, FALSE, FALSE);
10982 rs6000_pic_labelno++;
10985 /* Emit function epilogue as insns.
10987 At present, dwarf2out_frame_debug_expr doesn't understand
10988 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10989 anywhere in the epilogue. Most of the insns below would in any case
10990 need special notes to explain where r11 is in relation to the stack. */
10993 rs6000_emit_epilogue (sibcall)
10996 rs6000_stack_t *info;
10997 int restoring_FPRs_inline;
10998 int using_load_multiple;
10999 int using_mfcr_multiple;
11000 int use_backchain_to_restore_sp;
11002 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11003 rtx frame_reg_rtx = sp_reg_rtx;
11004 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11005 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11008 info = rs6000_stack_info ();
11010 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11012 reg_mode = V2SImode;
11016 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11017 && (!TARGET_SPE_ABI
11018 || info->spe_64bit_regs_used == 0)
11019 && info->first_gp_reg_save < 31);
11020 restoring_FPRs_inline = (sibcall
11021 || current_function_calls_eh_return
11022 || info->first_fp_reg_save == 64
11023 || FP_SAVE_INLINE (info->first_fp_reg_save));
11024 use_backchain_to_restore_sp = (frame_pointer_needed
11025 || current_function_calls_alloca
11026 || info->total_size > 32767);
11027 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11028 || rs6000_cpu == PROCESSOR_PPC603
11029 || rs6000_cpu == PROCESSOR_PPC750
11032 /* If we have a frame pointer, a call to alloca, or a large stack
11033 frame, restore the old stack pointer using the backchain. Otherwise,
11034 we know what size to update it with. */
11035 if (use_backchain_to_restore_sp)
11037 /* Under V.4, don't reset the stack pointer until after we're done
11038 loading the saved registers. */
11039 if (DEFAULT_ABI == ABI_V4)
11040 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11042 emit_move_insn (frame_reg_rtx,
11043 gen_rtx_MEM (Pmode, sp_reg_rtx));
11046 else if (info->push_p)
11048 if (DEFAULT_ABI == ABI_V4)
11049 sp_offset = info->total_size;
11052 emit_insn (TARGET_32BIT
11053 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11054 GEN_INT (info->total_size))
11055 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11056 GEN_INT (info->total_size)));
11060 /* Restore AltiVec registers if needed. */
11061 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11065 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11066 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11068 rtx addr, areg, mem;
11070 areg = gen_rtx_REG (Pmode, 0);
11072 (areg, GEN_INT (info->altivec_save_offset
11074 + 16 * (i - info->first_altivec_reg_save)));
11076 /* AltiVec addressing mode is [reg+reg]. */
11077 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11078 mem = gen_rtx_MEM (V4SImode, addr);
11079 set_mem_alias_set (mem, rs6000_sr_alias_set);
11081 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11085 /* Restore VRSAVE if needed. */
11086 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11088 rtx addr, mem, reg;
11090 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11091 GEN_INT (info->vrsave_save_offset + sp_offset));
11092 mem = gen_rtx_MEM (SImode, addr);
11093 set_mem_alias_set (mem, rs6000_sr_alias_set);
11094 reg = gen_rtx_REG (SImode, 12);
11095 emit_move_insn (reg, mem);
11097 emit_insn (generate_set_vrsave (reg, info, 1));
11100 /* Get the old lr if we saved it. */
11101 if (info->lr_save_p)
11103 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11104 info->lr_save_offset + sp_offset);
11106 set_mem_alias_set (mem, rs6000_sr_alias_set);
11108 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11111 /* Get the old cr if we saved it. */
11112 if (info->cr_save_p)
11114 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11115 GEN_INT (info->cr_save_offset + sp_offset));
11116 rtx mem = gen_rtx_MEM (SImode, addr);
11118 set_mem_alias_set (mem, rs6000_sr_alias_set);
11120 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11123 /* Set LR here to try to overlap restores below. */
11124 if (info->lr_save_p)
11125 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11126 gen_rtx_REG (Pmode, 0));
11128 /* Load exception handler data registers, if needed. */
11129 if (current_function_calls_eh_return)
11131 unsigned int i, regno;
11137 regno = EH_RETURN_DATA_REGNO (i);
11138 if (regno == INVALID_REGNUM)
11141 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11142 info->ehrd_offset + sp_offset
11143 + reg_size * (int) i);
11144 set_mem_alias_set (mem, rs6000_sr_alias_set);
11146 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11150 /* Restore GPRs. This is done as a PARALLEL if we are using
11151 the load-multiple instructions. */
11152 if (using_load_multiple)
11155 p = rtvec_alloc (32 - info->first_gp_reg_save);
11156 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11158 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11159 GEN_INT (info->gp_save_offset
11162 rtx mem = gen_rtx_MEM (reg_mode, addr);
11164 set_mem_alias_set (mem, rs6000_sr_alias_set);
11167 gen_rtx_SET (VOIDmode,
11168 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11171 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11174 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11175 if ((regs_ever_live[info->first_gp_reg_save+i]
11176 && ! call_used_regs[info->first_gp_reg_save+i])
11177 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11178 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11179 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11181 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11182 GEN_INT (info->gp_save_offset
11185 rtx mem = gen_rtx_MEM (reg_mode, addr);
11187 /* Restore 64-bit quantities for SPE. */
11188 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11190 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11193 if (!SPE_CONST_OFFSET_OK (offset))
11195 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11196 emit_move_insn (b, GEN_INT (offset));
11199 b = GEN_INT (offset);
11201 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11202 mem = gen_rtx_MEM (V2SImode, addr);
11205 set_mem_alias_set (mem, rs6000_sr_alias_set);
11207 emit_move_insn (gen_rtx_REG (reg_mode,
11208 info->first_gp_reg_save + i), mem);
11211 /* Restore fpr's if we need to do it without calling a function. */
11212 if (restoring_FPRs_inline)
11213 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11214 if ((regs_ever_live[info->first_fp_reg_save+i]
11215 && ! call_used_regs[info->first_fp_reg_save+i]))
11218 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11219 GEN_INT (info->fp_save_offset
11222 mem = gen_rtx_MEM (DFmode, addr);
11223 set_mem_alias_set (mem, rs6000_sr_alias_set);
11225 emit_move_insn (gen_rtx_REG (DFmode,
11226 info->first_fp_reg_save + i),
11230 /* If we saved cr, restore it here. Just those that were used. */
11231 if (info->cr_save_p)
11233 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11236 if (using_mfcr_multiple)
11238 for (i = 0; i < 8; i++)
11239 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11245 if (using_mfcr_multiple && count > 1)
11250 p = rtvec_alloc (count);
11253 for (i = 0; i < 8; i++)
11254 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11256 rtvec r = rtvec_alloc (2);
11257 RTVEC_ELT (r, 0) = r12_rtx;
11258 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11259 RTVEC_ELT (p, ndx) =
11260 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11261 gen_rtx_UNSPEC (CCmode, r, 20));
11264 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11269 for (i = 0; i < 8; i++)
11270 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11272 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11278 /* If this is V.4, unwind the stack pointer after all of the loads
11279 have been done. We need to emit a block here so that sched
11280 doesn't decide to move the sp change before the register restores
11281 (which may not have any obvious dependency on the stack). This
11282 doesn't hurt performance, because there is no scheduling that can
11283 be done after this point. */
11284 if (DEFAULT_ABI == ABI_V4)
11286 if (frame_reg_rtx != sp_reg_rtx)
11287 rs6000_emit_stack_tie ();
11289 if (use_backchain_to_restore_sp)
11291 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11293 else if (sp_offset != 0)
11295 emit_insn (TARGET_32BIT
11296 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11297 GEN_INT (sp_offset))
11298 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11299 GEN_INT (sp_offset)));
11303 if (current_function_calls_eh_return)
11305 rtx sa = EH_RETURN_STACKADJ_RTX;
11306 emit_insn (TARGET_32BIT
11307 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11308 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11314 if (! restoring_FPRs_inline)
11315 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11317 p = rtvec_alloc (2);
11319 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11320 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11321 gen_rtx_REG (Pmode,
11322 LINK_REGISTER_REGNUM));
11324 /* If we have to restore more than two FP registers, branch to the
11325 restore function. It will return to our caller. */
11326 if (! restoring_FPRs_inline)
11330 const char *alloc_rname;
11332 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11333 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11334 alloc_rname = ggc_strdup (rname);
11335 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11336 gen_rtx_SYMBOL_REF (Pmode,
11339 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11342 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11343 GEN_INT (info->fp_save_offset + 8*i));
11344 mem = gen_rtx_MEM (DFmode, addr);
11345 set_mem_alias_set (mem, rs6000_sr_alias_set);
11347 RTVEC_ELT (p, i+3) =
11348 gen_rtx_SET (VOIDmode,
11349 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11354 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11358 /* Write function epilogue. */
11361 rs6000_output_function_epilogue (file, size)
11363 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11365 rs6000_stack_t *info = rs6000_stack_info ();
11367 if (! HAVE_epilogue)
11369 rtx insn = get_last_insn ();
11370 /* If the last insn was a BARRIER, we don't have to write anything except
11371 the trace table. */
11372 if (GET_CODE (insn) == NOTE)
11373 insn = prev_nonnote_insn (insn);
11374 if (insn == 0 || GET_CODE (insn) != BARRIER)
11376 /* This is slightly ugly, but at least we don't have two
11377 copies of the epilogue-emitting code. */
11380 /* A NOTE_INSN_DELETED is supposed to be at the start
11381 and end of the "toplevel" insn chain. */
11382 emit_note (0, NOTE_INSN_DELETED);
11383 rs6000_emit_epilogue (FALSE);
11384 emit_note (0, NOTE_INSN_DELETED);
11386 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11390 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11392 INSN_ADDRESSES_NEW (insn, addr);
11397 if (TARGET_DEBUG_STACK)
11398 debug_rtx_list (get_insns (), 100);
11399 final (get_insns (), file, FALSE, FALSE);
11404 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11407 We don't output a traceback table if -finhibit-size-directive was
11408 used. The documentation for -finhibit-size-directive reads
11409 ``don't output a @code{.size} assembler directive, or anything
11410 else that would cause trouble if the function is split in the
11411 middle, and the two halves are placed at locations far apart in
11412 memory.'' The traceback table has this property, since it
11413 includes the offset from the start of the function to the
11414 traceback table itself.
11416 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11417 different traceback table. */
11418 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11419 && rs6000_traceback != traceback_none)
11421 const char *fname = NULL;
11422 const char *language_string = lang_hooks.name;
11423 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11425 int optional_tbtab;
11427 if (rs6000_traceback == traceback_full)
11428 optional_tbtab = 1;
11429 else if (rs6000_traceback == traceback_part)
11430 optional_tbtab = 0;
11432 optional_tbtab = !optimize_size && !TARGET_ELF;
11434 if (optional_tbtab)
11436 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11437 while (*fname == '.') /* V.4 encodes . in the name */
11440 /* Need label immediately before tbtab, so we can compute
11441 its offset from the function start. */
11442 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11443 ASM_OUTPUT_LABEL (file, fname);
11446 /* The .tbtab pseudo-op can only be used for the first eight
11447 expressions, since it can't handle the possibly variable
11448 length fields that follow. However, if you omit the optional
11449 fields, the assembler outputs zeros for all optional fields
11450 anyways, giving each variable length field is minimum length
11451 (as defined in sys/debug.h). Thus we can not use the .tbtab
11452 pseudo-op at all. */
11454 /* An all-zero word flags the start of the tbtab, for debuggers
11455 that have to find it by searching forward from the entry
11456 point or from the current pc. */
11457 fputs ("\t.long 0\n", file);
11459 /* Tbtab format type. Use format type 0. */
11460 fputs ("\t.byte 0,", file);
11462 /* Language type. Unfortunately, there doesn't seem to be any
11463 official way to get this info, so we use language_string. C
11464 is 0. C++ is 9. No number defined for Obj-C, so use the
11465 value for C for now. There is no official value for Java,
11466 although IBM appears to be using 13. There is no official value
11467 for Chill, so we've chosen 44 pseudo-randomly. */
11468 if (! strcmp (language_string, "GNU C")
11469 || ! strcmp (language_string, "GNU Objective-C"))
11471 else if (! strcmp (language_string, "GNU F77"))
11473 else if (! strcmp (language_string, "GNU Ada"))
11475 else if (! strcmp (language_string, "GNU Pascal"))
11477 else if (! strcmp (language_string, "GNU C++"))
11479 else if (! strcmp (language_string, "GNU Java"))
11481 else if (! strcmp (language_string, "GNU CHILL"))
11485 fprintf (file, "%d,", i);
11487 /* 8 single bit fields: global linkage (not set for C extern linkage,
11488 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11489 from start of procedure stored in tbtab, internal function, function
11490 has controlled storage, function has no toc, function uses fp,
11491 function logs/aborts fp operations. */
11492 /* Assume that fp operations are used if any fp reg must be saved. */
11493 fprintf (file, "%d,",
11494 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11496 /* 6 bitfields: function is interrupt handler, name present in
11497 proc table, function calls alloca, on condition directives
11498 (controls stack walks, 3 bits), saves condition reg, saves
11500 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11501 set up as a frame pointer, even when there is no alloca call. */
11502 fprintf (file, "%d,",
11503 ((optional_tbtab << 6)
11504 | ((optional_tbtab & frame_pointer_needed) << 5)
11505 | (info->cr_save_p << 1)
11506 | (info->lr_save_p)));
11508 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11510 fprintf (file, "%d,",
11511 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11513 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11514 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11516 if (optional_tbtab)
11518 /* Compute the parameter info from the function decl argument
11521 int next_parm_info_bit = 31;
11523 for (decl = DECL_ARGUMENTS (current_function_decl);
11524 decl; decl = TREE_CHAIN (decl))
11526 rtx parameter = DECL_INCOMING_RTL (decl);
11527 enum machine_mode mode = GET_MODE (parameter);
11529 if (GET_CODE (parameter) == REG)
11531 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11537 if (mode == SFmode)
11539 else if (mode == DFmode || mode == TFmode)
11544 /* If only one bit will fit, don't or in this entry. */
11545 if (next_parm_info_bit > 0)
11546 parm_info |= (bits << (next_parm_info_bit - 1));
11547 next_parm_info_bit -= 2;
11551 fixed_parms += ((GET_MODE_SIZE (mode)
11552 + (UNITS_PER_WORD - 1))
11554 next_parm_info_bit -= 1;
11560 /* Number of fixed point parameters. */
11561 /* This is actually the number of words of fixed point parameters; thus
11562 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11563 fprintf (file, "%d,", fixed_parms);
11565 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11567 /* This is actually the number of fp registers that hold parameters;
11568 and thus the maximum value is 13. */
11569 /* Set parameters on stack bit if parameters are not in their original
11570 registers, regardless of whether they are on the stack? Xlc
11571 seems to set the bit when not optimizing. */
11572 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11574 if (! optional_tbtab)
11577 /* Optional fields follow. Some are variable length. */
11579 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11580 11 double float. */
11581 /* There is an entry for each parameter in a register, in the order that
11582 they occur in the parameter list. Any intervening arguments on the
11583 stack are ignored. If the list overflows a long (max possible length
11584 34 bits) then completely leave off all elements that don't fit. */
11585 /* Only emit this long if there was at least one parameter. */
11586 if (fixed_parms || float_parms)
11587 fprintf (file, "\t.long %d\n", parm_info);
11589 /* Offset from start of code to tb table. */
11590 fputs ("\t.long ", file);
11591 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11593 RS6000_OUTPUT_BASENAME (file, fname);
11595 assemble_name (file, fname);
11597 fputs ("-.", file);
11599 RS6000_OUTPUT_BASENAME (file, fname);
11601 assemble_name (file, fname);
11605 /* Interrupt handler mask. */
11606 /* Omit this long, since we never set the interrupt handler bit
11609 /* Number of CTL (controlled storage) anchors. */
11610 /* Omit this long, since the has_ctl bit is never set above. */
11612 /* Displacement into stack of each CTL anchor. */
11613 /* Omit this list of longs, because there are no CTL anchors. */
11615 /* Length of function name. */
11618 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11620 /* Function name. */
11621 assemble_string (fname, strlen (fname));
11623 /* Register for alloca automatic storage; this is always reg 31.
11624 Only emit this if the alloca bit was set above. */
11625 if (frame_pointer_needed)
11626 fputs ("\t.byte 31\n", file);
11628 fputs ("\t.align 2\n", file);
11632 /* A C compound statement that outputs the assembler code for a thunk
11633 function, used to implement C++ virtual function calls with
11634 multiple inheritance. The thunk acts as a wrapper around a virtual
11635 function, adjusting the implicit object parameter before handing
11636 control off to the real function.
11638 First, emit code to add the integer DELTA to the location that
11639 contains the incoming first argument. Assume that this argument
11640 contains a pointer, and is the one used to pass the `this' pointer
11641 in C++. This is the incoming argument *before* the function
11642 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11643 values of all other incoming arguments.
11645 After the addition, emit code to jump to FUNCTION, which is a
11646 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11647 not touch the return address. Hence returning from FUNCTION will
11648 return to whoever called the current `thunk'.
11650 The effect must be as if FUNCTION had been called directly with the
11651 adjusted first argument. This macro is responsible for emitting
11652 all of the code for a thunk function; output_function_prologue()
11653 and output_function_epilogue() are not invoked.
11655 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11656 been extracted from it.) It might possibly be useful on some
11657 targets, but probably not.
11659 If you do not define this macro, the target-independent code in the
11660 C++ frontend will generate a less efficient heavyweight thunk that
11661 calls FUNCTION instead of jumping to it. The generic approach does
11662 not support varargs. */
11665 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11667 tree thunk_fndecl ATTRIBUTE_UNUSED;
11668 HOST_WIDE_INT delta;
11669 HOST_WIDE_INT vcall_offset;
11672 rtx this, insn, funexp;
11674 reload_completed = 1;
11675 no_new_pseudos = 1;
11677 /* Mark the end of the (empty) prologue. */
11678 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11680 /* Find the "this" pointer. If the function returns a structure,
11681 the structure return pointer is in r3. */
11682 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11683 this = gen_rtx_REG (Pmode, 4);
11685 this = gen_rtx_REG (Pmode, 3);
11687 /* Apply the constant offset, if required. */
11690 rtx delta_rtx = GEN_INT (delta);
11691 emit_insn (TARGET_32BIT
11692 ? gen_addsi3 (this, this, delta_rtx)
11693 : gen_adddi3 (this, this, delta_rtx));
11696 /* Apply the offset from the vtable, if required. */
11699 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11700 rtx tmp = gen_rtx_REG (Pmode, 12);
11702 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11703 emit_insn (TARGET_32BIT
11704 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11705 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11706 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11707 emit_insn (TARGET_32BIT
11708 ? gen_addsi3 (this, this, tmp)
11709 : gen_adddi3 (this, this, tmp));
11712 /* Generate a tail call to the target function. */
11713 if (!TREE_USED (function))
11715 assemble_external (function);
11716 TREE_USED (function) = 1;
11718 funexp = XEXP (DECL_RTL (function), 0);
11720 SYMBOL_REF_FLAG (funexp) = 0;
11721 if (current_file_function_operand (funexp, VOIDmode)
11722 && (! lookup_attribute ("longcall",
11723 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11724 || lookup_attribute ("shortcall",
11725 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11726 SYMBOL_REF_FLAG (funexp) = 1;
11728 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11731 if (MACHOPIC_INDIRECT)
11732 funexp = machopic_indirect_call_target (funexp);
11735 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11736 generate sibcall RTL explicitly to avoid constraint abort. */
11737 insn = emit_call_insn (
11738 gen_rtx_PARALLEL (VOIDmode,
11740 gen_rtx_CALL (VOIDmode,
11741 funexp, const0_rtx),
11742 gen_rtx_USE (VOIDmode, const0_rtx),
11743 gen_rtx_USE (VOIDmode,
11744 gen_rtx_REG (SImode,
11745 LINK_REGISTER_REGNUM)),
11746 gen_rtx_RETURN (VOIDmode))));
11747 SIBLING_CALL_P (insn) = 1;
11750 /* Run just enough of rest_of_compilation to get the insns emitted.
11751 There's not really enough bulk here to make other passes such as
11752 instruction scheduling worth while. Note that use_thunk calls
11753 assemble_start_function and assemble_end_function. */
11754 insn = get_insns ();
11755 shorten_branches (insn);
11756 final_start_function (insn, file, 1);
11757 final (insn, file, 1, 0);
11758 final_end_function ();
11760 reload_completed = 0;
11761 no_new_pseudos = 0;
11764 /* A quick summary of the various types of 'constant-pool tables'
11767 Target Flags Name One table per
11768 AIX (none) AIX TOC object file
11769 AIX -mfull-toc AIX TOC object file
11770 AIX -mminimal-toc AIX minimal TOC translation unit
11771 SVR4/EABI (none) SVR4 SDATA object file
11772 SVR4/EABI -fpic SVR4 pic object file
11773 SVR4/EABI -fPIC SVR4 PIC translation unit
11774 SVR4/EABI -mrelocatable EABI TOC function
11775 SVR4/EABI -maix AIX TOC object file
11776 SVR4/EABI -maix -mminimal-toc
11777 AIX minimal TOC translation unit
11779 Name Reg. Set by entries contains:
11780 made by addrs? fp? sum?
11782 AIX TOC 2 crt0 as Y option option
11783 AIX minimal TOC 30 prolog gcc Y Y option
11784 SVR4 SDATA 13 crt0 gcc N Y N
11785 SVR4 pic 30 prolog ld Y not yet N
11786 SVR4 PIC 30 prolog gcc Y option option
11787 EABI TOC 30 prolog gcc Y option option
11791 /* Hash functions for the hash table. */
11794 rs6000_hash_constant (k)
11797 enum rtx_code code = GET_CODE (k);
11798 enum machine_mode mode = GET_MODE (k);
11799 unsigned result = (code << 3) ^ mode;
11800 const char *format;
11803 format = GET_RTX_FORMAT (code);
11804 flen = strlen (format);
11810 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11813 if (mode != VOIDmode)
11814 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11826 for (; fidx < flen; fidx++)
11827 switch (format[fidx])
11832 const char *str = XSTR (k, fidx);
11833 len = strlen (str);
11834 result = result * 613 + len;
11835 for (i = 0; i < len; i++)
11836 result = result * 613 + (unsigned) str[i];
11841 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11845 result = result * 613 + (unsigned) XINT (k, fidx);
11848 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11849 result = result * 613 + (unsigned) XWINT (k, fidx);
11853 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11854 result = result * 613 + (unsigned) (XWINT (k, fidx)
11866 toc_hash_function (hash_entry)
11867 const void * hash_entry;
11869 const struct toc_hash_struct *thc =
11870 (const struct toc_hash_struct *) hash_entry;
11871 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11874 /* Compare H1 and H2 for equivalence. */
11877 toc_hash_eq (h1, h2)
11881 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11882 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11884 if (((const struct toc_hash_struct *) h1)->key_mode
11885 != ((const struct toc_hash_struct *) h2)->key_mode)
11888 return rtx_equal_p (r1, r2);
11891 /* These are the names given by the C++ front-end to vtables, and
11892 vtable-like objects. Ideally, this logic should not be here;
11893 instead, there should be some programmatic way of inquiring as
11894 to whether or not an object is a vtable. */
11896 #define VTABLE_NAME_P(NAME) \
11897 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11898 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11899 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11900 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11903 rs6000_output_symbol_ref (file, x)
11907 /* Currently C++ toc references to vtables can be emitted before it
11908 is decided whether the vtable is public or private. If this is
11909 the case, then the linker will eventually complain that there is
11910 a reference to an unknown section. Thus, for vtables only,
11911 we emit the TOC reference to reference the symbol and not the
11913 const char *name = XSTR (x, 0);
11915 if (VTABLE_NAME_P (name))
11917 RS6000_OUTPUT_BASENAME (file, name);
11920 assemble_name (file, name);
11923 /* Output a TOC entry. We derive the entry name from what is being
11927 output_toc (file, x, labelno, mode)
11931 enum machine_mode mode;
11934 const char *name = buf;
11935 const char *real_name;
11942 /* When the linker won't eliminate them, don't output duplicate
11943 TOC entries (this happens on AIX if there is any kind of TOC,
11944 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11946 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
11948 struct toc_hash_struct *h;
11951 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11952 time because GGC is not initialised at that point. */
11953 if (toc_hash_table == NULL)
11954 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
11955 toc_hash_eq, NULL);
11957 h = ggc_alloc (sizeof (*h));
11959 h->key_mode = mode;
11960 h->labelno = labelno;
11962 found = htab_find_slot (toc_hash_table, h, 1);
11963 if (*found == NULL)
11965 else /* This is indeed a duplicate.
11966 Set this label equal to that label. */
11968 fputs ("\t.set ", file);
11969 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11970 fprintf (file, "%d,", labelno);
11971 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11972 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11978 /* If we're going to put a double constant in the TOC, make sure it's
11979 aligned properly when strict alignment is on. */
11980 if (GET_CODE (x) == CONST_DOUBLE
11981 && STRICT_ALIGNMENT
11982 && GET_MODE_BITSIZE (mode) >= 64
11983 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11984 ASM_OUTPUT_ALIGN (file, 3);
11987 (*targetm.asm_out.internal_label) (file, "LC", labelno);
11989 /* Handle FP constants specially. Note that if we have a minimal
11990 TOC, things we put here aren't actually in the TOC, so we can allow
11992 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
11994 REAL_VALUE_TYPE rv;
11997 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11998 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12002 if (TARGET_MINIMAL_TOC)
12003 fputs (DOUBLE_INT_ASM_OP, file);
12005 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12006 k[0] & 0xffffffff, k[1] & 0xffffffff,
12007 k[2] & 0xffffffff, k[3] & 0xffffffff);
12008 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12009 k[0] & 0xffffffff, k[1] & 0xffffffff,
12010 k[2] & 0xffffffff, k[3] & 0xffffffff);
12015 if (TARGET_MINIMAL_TOC)
12016 fputs ("\t.long ", file);
12018 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12019 k[0] & 0xffffffff, k[1] & 0xffffffff,
12020 k[2] & 0xffffffff, k[3] & 0xffffffff);
12021 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12022 k[0] & 0xffffffff, k[1] & 0xffffffff,
12023 k[2] & 0xffffffff, k[3] & 0xffffffff);
12027 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12029 REAL_VALUE_TYPE rv;
12032 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12033 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12037 if (TARGET_MINIMAL_TOC)
12038 fputs (DOUBLE_INT_ASM_OP, file);
12040 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12041 k[0] & 0xffffffff, k[1] & 0xffffffff);
12042 fprintf (file, "0x%lx%08lx\n",
12043 k[0] & 0xffffffff, k[1] & 0xffffffff);
12048 if (TARGET_MINIMAL_TOC)
12049 fputs ("\t.long ", file);
12051 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12052 k[0] & 0xffffffff, k[1] & 0xffffffff);
12053 fprintf (file, "0x%lx,0x%lx\n",
12054 k[0] & 0xffffffff, k[1] & 0xffffffff);
12058 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12060 REAL_VALUE_TYPE rv;
12063 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12064 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12068 if (TARGET_MINIMAL_TOC)
12069 fputs (DOUBLE_INT_ASM_OP, file);
12071 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12072 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12077 if (TARGET_MINIMAL_TOC)
12078 fputs ("\t.long ", file);
12080 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12081 fprintf (file, "0x%lx\n", l & 0xffffffff);
12085 else if (GET_MODE (x) == VOIDmode
12086 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12088 unsigned HOST_WIDE_INT low;
12089 HOST_WIDE_INT high;
12091 if (GET_CODE (x) == CONST_DOUBLE)
12093 low = CONST_DOUBLE_LOW (x);
12094 high = CONST_DOUBLE_HIGH (x);
12097 #if HOST_BITS_PER_WIDE_INT == 32
12100 high = (low & 0x80000000) ? ~0 : 0;
12104 low = INTVAL (x) & 0xffffffff;
12105 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12109 /* TOC entries are always Pmode-sized, but since this
12110 is a bigendian machine then if we're putting smaller
12111 integer constants in the TOC we have to pad them.
12112 (This is still a win over putting the constants in
12113 a separate constant pool, because then we'd have
12114 to have both a TOC entry _and_ the actual constant.)
12116 For a 32-bit target, CONST_INT values are loaded and shifted
12117 entirely within `low' and can be stored in one TOC entry. */
12119 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12120 abort ();/* It would be easy to make this work, but it doesn't now. */
12122 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12124 #if HOST_BITS_PER_WIDE_INT == 32
12125 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12126 POINTER_SIZE, &low, &high, 0);
12129 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12130 high = (HOST_WIDE_INT) low >> 32;
12137 if (TARGET_MINIMAL_TOC)
12138 fputs (DOUBLE_INT_ASM_OP, file);
12140 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12141 (long) high & 0xffffffff, (long) low & 0xffffffff);
12142 fprintf (file, "0x%lx%08lx\n",
12143 (long) high & 0xffffffff, (long) low & 0xffffffff);
12148 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12150 if (TARGET_MINIMAL_TOC)
12151 fputs ("\t.long ", file);
12153 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12154 (long) high & 0xffffffff, (long) low & 0xffffffff);
12155 fprintf (file, "0x%lx,0x%lx\n",
12156 (long) high & 0xffffffff, (long) low & 0xffffffff);
12160 if (TARGET_MINIMAL_TOC)
12161 fputs ("\t.long ", file);
12163 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12164 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12170 if (GET_CODE (x) == CONST)
12172 if (GET_CODE (XEXP (x, 0)) != PLUS)
12175 base = XEXP (XEXP (x, 0), 0);
12176 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12179 if (GET_CODE (base) == SYMBOL_REF)
12180 name = XSTR (base, 0);
12181 else if (GET_CODE (base) == LABEL_REF)
12182 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12183 else if (GET_CODE (base) == CODE_LABEL)
12184 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12188 real_name = (*targetm.strip_name_encoding) (name);
12189 if (TARGET_MINIMAL_TOC)
12190 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12193 fprintf (file, "\t.tc %s", real_name);
12196 fprintf (file, ".N%d", - offset);
12198 fprintf (file, ".P%d", offset);
12200 fputs ("[TC],", file);
12203 /* Currently C++ toc references to vtables can be emitted before it
12204 is decided whether the vtable is public or private. If this is
12205 the case, then the linker will eventually complain that there is
12206 a TOC reference to an unknown section. Thus, for vtables only,
12207 we emit the TOC reference to reference the symbol and not the
12209 if (VTABLE_NAME_P (name))
12211 RS6000_OUTPUT_BASENAME (file, name);
12213 fprintf (file, "%d", offset);
12214 else if (offset > 0)
12215 fprintf (file, "+%d", offset);
12218 output_addr_const (file, x);
12222 /* Output an assembler pseudo-op to write an ASCII string of N characters
12223 starting at P to FILE.
12225 On the RS/6000, we have to do this using the .byte operation and
12226 write out special characters outside the quoted string.
12227 Also, the assembler is broken; very long strings are truncated,
12228 so we must artificially break them up early. */
12231 output_ascii (file, p, n)
12237 int i, count_string;
12238 const char *for_string = "\t.byte \"";
12239 const char *for_decimal = "\t.byte ";
12240 const char *to_close = NULL;
12243 for (i = 0; i < n; i++)
12246 if (c >= ' ' && c < 0177)
12249 fputs (for_string, file);
12252 /* Write two quotes to get one. */
12260 for_decimal = "\"\n\t.byte ";
12264 if (count_string >= 512)
12266 fputs (to_close, file);
12268 for_string = "\t.byte \"";
12269 for_decimal = "\t.byte ";
12277 fputs (for_decimal, file);
12278 fprintf (file, "%d", c);
12280 for_string = "\n\t.byte \"";
12281 for_decimal = ", ";
12287 /* Now close the string if we have written one. Then end the line. */
12289 fputs (to_close, file);
12292 /* Generate a unique section name for FILENAME for a section type
12293 represented by SECTION_DESC. Output goes into BUF.
12295 SECTION_DESC can be any string, as long as it is different for each
12296 possible section type.
12298 We name the section in the same manner as xlc. The name begins with an
12299 underscore followed by the filename (after stripping any leading directory
12300 names) with the last period replaced by the string SECTION_DESC. If
12301 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12305 rs6000_gen_section_name (buf, filename, section_desc)
12307 const char *filename;
12308 const char *section_desc;
12310 const char *q, *after_last_slash, *last_period = 0;
12314 after_last_slash = filename;
12315 for (q = filename; *q; q++)
12318 after_last_slash = q + 1;
12319 else if (*q == '.')
12323 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12324 *buf = (char *) xmalloc (len);
12329 for (q = after_last_slash; *q; q++)
12331 if (q == last_period)
12333 strcpy (p, section_desc);
12334 p += strlen (section_desc);
12338 else if (ISALNUM (*q))
12342 if (last_period == 0)
12343 strcpy (p, section_desc);
12348 /* Emit profile function. */
12351 output_profile_hook (labelno)
12352 int labelno ATTRIBUTE_UNUSED;
12354 if (TARGET_PROFILE_KERNEL)
12357 if (DEFAULT_ABI == ABI_AIX)
12359 #ifdef NO_PROFILE_COUNTERS
12360 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12363 const char *label_name;
12366 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12367 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12368 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12370 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12374 else if (DEFAULT_ABI == ABI_DARWIN)
12376 const char *mcount_name = RS6000_MCOUNT;
12377 int caller_addr_regno = LINK_REGISTER_REGNUM;
12379 /* Be conservative and always set this, at least for now. */
12380 current_function_uses_pic_offset_table = 1;
12383 /* For PIC code, set up a stub and collect the caller's address
12384 from r0, which is where the prologue puts it. */
12385 if (MACHOPIC_INDIRECT)
12387 mcount_name = machopic_stub_name (mcount_name);
12388 if (current_function_uses_pic_offset_table)
12389 caller_addr_regno = 0;
12392 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12394 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12398 /* Write function profiler code. */
12401 output_function_profiler (file, labelno)
12408 switch (DEFAULT_ABI)
12415 /* Fall through. */
12417 case ABI_AIX_NODESC:
12420 warning ("no profiling of 64-bit code for this ABI");
12423 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12424 fprintf (file, "\tmflr %s\n", reg_names[0]);
12427 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12428 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12429 reg_names[0], save_lr, reg_names[1]);
12430 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12431 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12432 assemble_name (file, buf);
12433 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12435 else if (flag_pic > 1)
12437 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12438 reg_names[0], save_lr, reg_names[1]);
12439 /* Now, we need to get the address of the label. */
12440 fputs ("\tbl 1f\n\t.long ", file);
12441 assemble_name (file, buf);
12442 fputs ("-.\n1:", file);
12443 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12444 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12445 reg_names[0], reg_names[11]);
12446 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12447 reg_names[0], reg_names[0], reg_names[11]);
12451 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12452 assemble_name (file, buf);
12453 fputs ("@ha\n", file);
12454 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12455 reg_names[0], save_lr, reg_names[1]);
12456 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12457 assemble_name (file, buf);
12458 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12461 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12463 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12464 reg_names[STATIC_CHAIN_REGNUM],
12466 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12467 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12468 reg_names[STATIC_CHAIN_REGNUM],
12472 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12473 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12478 if (!TARGET_PROFILE_KERNEL)
12480 /* Don't do anything, done in output_profile_hook (). */
12487 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
12488 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
12490 if (current_function_needs_context)
12492 asm_fprintf (file, "\tstd %s,24(%s)\n",
12493 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12494 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12495 asm_fprintf (file, "\tld %s,24(%s)\n",
12496 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12499 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12507 rs6000_use_dfa_pipeline_interface ()
12512 /* Power4 load update and store update instructions are cracked into a
12513 load or store and an integer insn which are executed in the same cycle.
12514 Branches have their own dispatch slot which does not count against the
12515 GCC issue rate, but it changes the program flow so there are no other
12516 instructions to issue in this cycle. */
12519 rs6000_variable_issue (stream, verbose, insn, more)
12520 FILE *stream ATTRIBUTE_UNUSED;
12521 int verbose ATTRIBUTE_UNUSED;
12525 if (GET_CODE (PATTERN (insn)) == USE
12526 || GET_CODE (PATTERN (insn)) == CLOBBER)
12529 if (rs6000_cpu == PROCESSOR_POWER4)
12531 enum attr_type type = get_attr_type (insn);
12532 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12533 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
12535 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12536 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12537 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
12538 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
12539 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
12540 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
12541 || type == TYPE_IDIV || type == TYPE_LDIV)
12542 return more > 2 ? more - 2 : 0;
12548 /* Adjust the cost of a scheduling dependency. Return the new cost of
12549 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12552 rs6000_adjust_cost (insn, link, dep_insn, cost)
12555 rtx dep_insn ATTRIBUTE_UNUSED;
12558 if (! recog_memoized (insn))
12561 if (REG_NOTE_KIND (link) != 0)
12564 if (REG_NOTE_KIND (link) == 0)
12566 /* Data dependency; DEP_INSN writes a register that INSN reads
12567 some cycles later. */
12568 switch (get_attr_type (insn))
12571 /* Tell the first scheduling pass about the latency between
12572 a mtctr and bctr (and mtlr and br/blr). The first
12573 scheduling pass will not know about this latency since
12574 the mtctr instruction, which has the latency associated
12575 to it, will be generated by reload. */
12576 return TARGET_POWER ? 5 : 4;
12578 /* Leave some extra cycles between a compare and its
12579 dependent branch, to inhibit expensive mispredicts. */
12580 if ((rs6000_cpu_attr == CPU_PPC603
12581 || rs6000_cpu_attr == CPU_PPC604
12582 || rs6000_cpu_attr == CPU_PPC604E
12583 || rs6000_cpu_attr == CPU_PPC620
12584 || rs6000_cpu_attr == CPU_PPC630
12585 || rs6000_cpu_attr == CPU_PPC750
12586 || rs6000_cpu_attr == CPU_PPC7400
12587 || rs6000_cpu_attr == CPU_PPC7450
12588 || rs6000_cpu_attr == CPU_POWER4)
12589 && recog_memoized (dep_insn)
12590 && (INSN_CODE (dep_insn) >= 0)
12591 && (get_attr_type (dep_insn) == TYPE_CMP
12592 || get_attr_type (dep_insn) == TYPE_COMPARE
12593 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12594 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
12595 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
12596 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12597 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12598 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12603 /* Fall out to return default cost. */
12609 /* A C statement (sans semicolon) to update the integer scheduling
12610 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12611 INSN earlier, increase the priority to execute INSN later. Do not
12612 define this macro if you do not need to adjust the scheduling
12613 priorities of insns. */
12616 rs6000_adjust_priority (insn, priority)
12617 rtx insn ATTRIBUTE_UNUSED;
12620 /* On machines (like the 750) which have asymmetric integer units,
12621 where one integer unit can do multiply and divides and the other
12622 can't, reduce the priority of multiply/divide so it is scheduled
12623 before other integer operations. */
12626 if (! INSN_P (insn))
12629 if (GET_CODE (PATTERN (insn)) == USE)
12632 switch (rs6000_cpu_attr) {
12634 switch (get_attr_type (insn))
12641 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12642 priority, priority);
12643 if (priority >= 0 && priority < 0x01000000)
12653 /* Return how many instructions the machine can issue per cycle. */
12656 rs6000_issue_rate ()
12658 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12659 if (!reload_completed)
12662 switch (rs6000_cpu_attr) {
12663 case CPU_RIOS1: /* ? */
12665 case CPU_PPC601: /* ? */
12686 /* Return how many instructions to look ahead for better insn
12690 rs6000_use_sched_lookahead ()
12692 if (rs6000_cpu_attr == CPU_PPC8540)
12698 /* Length in units of the trampoline for entering a nested function. */
12701 rs6000_trampoline_size ()
12705 switch (DEFAULT_ABI)
12711 ret = (TARGET_32BIT) ? 12 : 24;
12716 case ABI_AIX_NODESC:
12717 ret = (TARGET_32BIT) ? 40 : 48;
12724 /* Emit RTL insns to initialize the variable parts of a trampoline.
12725 FNADDR is an RTX for the address of the function's pure code.
12726 CXT is an RTX for the static chain value for the function. */
12729 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12734 enum machine_mode pmode = Pmode;
12735 int regsize = (TARGET_32BIT) ? 4 : 8;
12736 rtx ctx_reg = force_reg (pmode, cxt);
12738 switch (DEFAULT_ABI)
12743 /* Macros to shorten the code expansions below. */
12744 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12745 #define MEM_PLUS(addr,offset) \
12746 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12748 /* Under AIX, just build the 3 word function descriptor */
12751 rtx fn_reg = gen_reg_rtx (pmode);
12752 rtx toc_reg = gen_reg_rtx (pmode);
12753 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12754 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12755 emit_move_insn (MEM_DEREF (addr), fn_reg);
12756 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12757 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12761 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12764 case ABI_AIX_NODESC:
12765 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12766 FALSE, VOIDmode, 4,
12768 GEN_INT (rs6000_trampoline_size ()), SImode,
12778 /* Table of valid machine attributes. */
12780 const struct attribute_spec rs6000_attribute_table[] =
12782 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12783 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12784 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12785 { NULL, 0, 0, false, false, false, NULL }
12788 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12789 struct attribute_spec.handler. */
12792 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12795 tree args ATTRIBUTE_UNUSED;
12796 int flags ATTRIBUTE_UNUSED;
12797 bool *no_add_attrs;
12799 if (TREE_CODE (*node) != FUNCTION_TYPE
12800 && TREE_CODE (*node) != FIELD_DECL
12801 && TREE_CODE (*node) != TYPE_DECL)
12803 warning ("`%s' attribute only applies to functions",
12804 IDENTIFIER_POINTER (name));
12805 *no_add_attrs = true;
12811 /* Set longcall attributes on all functions declared when
12812 rs6000_default_long_calls is true. */
12814 rs6000_set_default_type_attributes (type)
12817 if (rs6000_default_long_calls
12818 && (TREE_CODE (type) == FUNCTION_TYPE
12819 || TREE_CODE (type) == METHOD_TYPE))
12820 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12822 TYPE_ATTRIBUTES (type));
12825 /* Return a reference suitable for calling a function with the
12826 longcall attribute. */
12829 rs6000_longcall_ref (call_ref)
12832 const char *call_name;
12835 if (GET_CODE (call_ref) != SYMBOL_REF)
12838 /* System V adds '.' to the internal name, so skip them. */
12839 call_name = XSTR (call_ref, 0);
12840 if (*call_name == '.')
12842 while (*call_name == '.')
12845 node = get_identifier (call_name);
12846 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12849 return force_reg (Pmode, call_ref);
12853 #ifdef USING_ELFOS_H
12855 /* A C statement or statements to switch to the appropriate section
12856 for output of RTX in mode MODE. You can assume that RTX is some
12857 kind of constant in RTL. The argument MODE is redundant except in
12858 the case of a `const_int' rtx. Select the section by calling
12859 `text_section' or one of the alternatives for other sections.
12861 Do not define this macro if you put all constants in the read-only
12865 rs6000_elf_select_rtx_section (mode, x, align)
12866 enum machine_mode mode;
12868 unsigned HOST_WIDE_INT align;
12870 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12873 default_elf_select_rtx_section (mode, x, align);
12876 /* A C statement or statements to switch to the appropriate
12877 section for output of DECL. DECL is either a `VAR_DECL' node
12878 or a constant of some sort. RELOC indicates whether forming
12879 the initial value of DECL requires link-time relocations. */
12882 rs6000_elf_select_section (decl, reloc, align)
12885 unsigned HOST_WIDE_INT align;
12887 /* Pretend that we're always building for a shared library when
12888 ABI_AIX, because otherwise we end up with dynamic relocations
12889 in read-only sections. This happens for function pointers,
12890 references to vtables in typeinfo, and probably other cases. */
12891 default_elf_select_section_1 (decl, reloc, align,
12892 flag_pic || DEFAULT_ABI == ABI_AIX);
12895 /* A C statement to build up a unique section name, expressed as a
12896 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12897 RELOC indicates whether the initial value of EXP requires
12898 link-time relocations. If you do not define this macro, GCC will use
12899 the symbol name prefixed by `.' as the section name. Note - this
12900 macro can now be called for uninitialized data items as well as
12901 initialized data and functions. */
12904 rs6000_elf_unique_section (decl, reloc)
12908 /* As above, pretend that we're always building for a shared library
12909 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
12910 default_unique_section_1 (decl, reloc,
12911 flag_pic || DEFAULT_ABI == ABI_AIX);
12914 /* If we are referencing a function that is static or is known to be
12915 in this file, make the SYMBOL_REF special. We can use this to indicate
12916 that we can branch to this function without emitting a no-op after the
12917 call. For real AIX calling sequences, we also replace the
12918 function name with the real name (1 or 2 leading .'s), rather than
12919 the function descriptor name. This saves a lot of overriding code
12920 to read the prefixes. */
12923 rs6000_elf_encode_section_info (decl, first)
12930 if (TREE_CODE (decl) == FUNCTION_DECL)
12932 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12933 if ((*targetm.binds_local_p) (decl))
12934 SYMBOL_REF_FLAG (sym_ref) = 1;
12936 if (!TARGET_AIX && DEFAULT_ABI == ABI_AIX)
12938 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12939 size_t len2 = strlen (XSTR (sym_ref, 0));
12940 char *str = alloca (len1 + len2 + 1);
12943 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12945 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12948 else if (rs6000_sdata != SDATA_NONE
12949 && DEFAULT_ABI == ABI_V4
12950 && TREE_CODE (decl) == VAR_DECL)
12952 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12953 int size = int_size_in_bytes (TREE_TYPE (decl));
12954 tree section_name = DECL_SECTION_NAME (decl);
12955 const char *name = (char *)0;
12958 if ((*targetm.binds_local_p) (decl))
12959 SYMBOL_REF_FLAG (sym_ref) = 1;
12963 if (TREE_CODE (section_name) == STRING_CST)
12965 name = TREE_STRING_POINTER (section_name);
12966 len = TREE_STRING_LENGTH (section_name);
12973 ? ((len == sizeof (".sdata") - 1
12974 && strcmp (name, ".sdata") == 0)
12975 || (len == sizeof (".sdata2") - 1
12976 && strcmp (name, ".sdata2") == 0)
12977 || (len == sizeof (".sbss") - 1
12978 && strcmp (name, ".sbss") == 0)
12979 || (len == sizeof (".sbss2") - 1
12980 && strcmp (name, ".sbss2") == 0)
12981 || (len == sizeof (".PPC.EMB.sdata0") - 1
12982 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12983 || (len == sizeof (".PPC.EMB.sbss0") - 1
12984 && strcmp (name, ".PPC.EMB.sbss0") == 0))
12985 : (size > 0 && size <= g_switch_value))
12987 size_t len = strlen (XSTR (sym_ref, 0));
12988 char *str = alloca (len + 2);
12991 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12992 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12997 static const char *
12998 rs6000_elf_strip_name_encoding (str)
13001 while (*str == '*' || *str == '@')
13007 rs6000_elf_in_small_data_p (decl)
13010 if (rs6000_sdata == SDATA_NONE)
13013 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13015 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13016 if (strcmp (section, ".sdata") == 0
13017 || strcmp (section, ".sdata2") == 0
13018 || strcmp (section, ".sbss") == 0)
13023 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13026 && size <= g_switch_value
13027 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13034 #endif /* USING_ELFOS_H */
13037 /* Return a REG that occurs in ADDR with coefficient 1.
13038 ADDR can be effectively incremented by incrementing REG.
13040 r0 is special and we must not select it as an address
13041 register by this routine since our caller will try to
13042 increment the returned register via an "la" instruction. */
13045 find_addr_reg (addr)
13048 while (GET_CODE (addr) == PLUS)
13050 if (GET_CODE (XEXP (addr, 0)) == REG
13051 && REGNO (XEXP (addr, 0)) != 0)
13052 addr = XEXP (addr, 0);
13053 else if (GET_CODE (XEXP (addr, 1)) == REG
13054 && REGNO (XEXP (addr, 1)) != 0)
13055 addr = XEXP (addr, 1);
13056 else if (CONSTANT_P (XEXP (addr, 0)))
13057 addr = XEXP (addr, 1);
13058 else if (CONSTANT_P (XEXP (addr, 1)))
13059 addr = XEXP (addr, 0);
13063 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13069 rs6000_fatal_bad_address (op)
13072 fatal_insn ("bad address", op);
13078 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13079 reference and a constant. */
13082 symbolic_operand (op)
13085 switch (GET_CODE (op))
13092 return (GET_CODE (op) == SYMBOL_REF ||
13093 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13094 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13095 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13102 #ifdef RS6000_LONG_BRANCH
13104 static tree stub_list = 0;
13106 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13107 procedure calls to the linked list. */
13110 add_compiler_stub (label_name, function_name, line_number)
13112 tree function_name;
13115 tree stub = build_tree_list (function_name, label_name);
13116 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13117 TREE_CHAIN (stub) = stub_list;
13121 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13122 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13123 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13125 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13126 handling procedure calls from the linked list and initializes the
13130 output_compiler_stub ()
13133 char label_buf[256];
13137 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13139 fprintf (asm_out_file,
13140 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13142 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13143 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13144 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13145 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13147 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13149 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13152 label_buf[0] = '_';
13153 strcpy (label_buf+1,
13154 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13157 strcpy (tmp_buf, "lis r12,hi16(");
13158 strcat (tmp_buf, label_buf);
13159 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13160 strcat (tmp_buf, label_buf);
13161 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13162 output_asm_insn (tmp_buf, 0);
13164 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13165 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13166 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13167 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13173 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13174 already there or not. */
13177 no_previous_def (function_name)
13178 tree function_name;
13181 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13182 if (function_name == STUB_FUNCTION_NAME (stub))
13187 /* GET_PREV_LABEL gets the label name from the previous definition of
13191 get_prev_label (function_name)
13192 tree function_name;
13195 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13196 if (function_name == STUB_FUNCTION_NAME (stub))
13197 return STUB_LABEL_NAME (stub);
13201 /* INSN is either a function call or a millicode call. It may have an
13202 unconditional jump in its delay slot.
13204 CALL_DEST is the routine we are calling. */
13207 output_call (insn, call_dest, operand_number)
13210 int operand_number;
13212 static char buf[256];
13213 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13216 tree funname = get_identifier (XSTR (call_dest, 0));
13218 if (no_previous_def (funname))
13220 int line_number = 0;
13221 rtx label_rtx = gen_label_rtx ();
13222 char *label_buf, temp_buf[256];
13223 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13224 CODE_LABEL_NUMBER (label_rtx));
13225 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13226 labelname = get_identifier (label_buf);
13227 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13229 line_number = NOTE_LINE_NUMBER (insn);
13230 add_compiler_stub (labelname, funname, line_number);
13233 labelname = get_prev_label (funname);
13235 sprintf (buf, "jbsr %%z%d,%.246s",
13236 operand_number, IDENTIFIER_POINTER (labelname));
13241 sprintf (buf, "bl %%z%d", operand_number);
13246 #endif /* RS6000_LONG_BRANCH */
13248 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13250 const char *const symbol_ = (SYMBOL); \
13251 char *buffer_ = (BUF); \
13252 if (symbol_[0] == '"') \
13254 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13256 else if (name_needs_quotes(symbol_)) \
13258 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13262 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13267 /* Generate PIC and indirect symbol stubs. */
13270 machopic_output_stub (file, symb, stub)
13272 const char *symb, *stub;
13274 unsigned int length;
13275 char *symbol_name, *lazy_ptr_name;
13276 char *local_label_0;
13277 static int label = 0;
13279 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13280 symb = (*targetm.strip_name_encoding) (symb);
13284 length = strlen (symb);
13285 symbol_name = alloca (length + 32);
13286 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13288 lazy_ptr_name = alloca (length + 32);
13289 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13291 local_label_0 = alloca (length + 32);
13292 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13295 machopic_picsymbol_stub1_section ();
13297 machopic_symbol_stub1_section ();
13298 fprintf (file, "\t.align 2\n");
13300 fprintf (file, "%s:\n", stub);
13301 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13305 fprintf (file, "\tmflr r0\n");
13306 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13307 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13308 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13309 lazy_ptr_name, local_label_0);
13310 fprintf (file, "\tmtlr r0\n");
13311 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13312 lazy_ptr_name, local_label_0);
13313 fprintf (file, "\tmtctr r12\n");
13314 fprintf (file, "\tbctr\n");
13318 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13319 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13320 fprintf (file, "\tmtctr r12\n");
13321 fprintf (file, "\tbctr\n");
13324 machopic_lazy_symbol_ptr_section ();
13325 fprintf (file, "%s:\n", lazy_ptr_name);
13326 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13327 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13330 /* Legitimize PIC addresses. If the address is already
13331 position-independent, we return ORIG. Newly generated
13332 position-independent addresses go into a reg. This is REG if non
13333 zero, otherwise we allocate register(s) as necessary. */
13335 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13338 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13340 enum machine_mode mode;
13345 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13346 reg = gen_reg_rtx (Pmode);
13348 if (GET_CODE (orig) == CONST)
13350 if (GET_CODE (XEXP (orig, 0)) == PLUS
13351 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13354 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13357 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13360 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13366 if (GET_CODE (offset) == CONST_INT)
13368 if (SMALL_INT (offset))
13369 return plus_constant (base, INTVAL (offset));
13370 else if (! reload_in_progress && ! reload_completed)
13371 offset = force_reg (Pmode, offset);
13374 rtx mem = force_const_mem (Pmode, orig);
13375 return machopic_legitimize_pic_address (mem, Pmode, reg);
13378 return gen_rtx (PLUS, Pmode, base, offset);
13381 /* Fall back on generic machopic code. */
13382 return machopic_legitimize_pic_address (orig, mode, reg);
13385 /* This is just a placeholder to make linking work without having to
13386 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13387 ever needed for Darwin (not too likely!) this would have to get a
13388 real definition. */
13395 #endif /* TARGET_MACHO */
13398 static unsigned int
13399 rs6000_elf_section_type_flags (decl, name, reloc)
13405 = default_section_type_flags_1 (decl, name, reloc,
13406 flag_pic || DEFAULT_ABI == ABI_AIX);
13408 if (TARGET_RELOCATABLE)
13409 flags |= SECTION_WRITE;
13414 /* Record an element in the table of global constructors. SYMBOL is
13415 a SYMBOL_REF of the function to be called; PRIORITY is a number
13416 between 0 and MAX_INIT_PRIORITY.
13418 This differs from default_named_section_asm_out_constructor in
13419 that we have special handling for -mrelocatable. */
13422 rs6000_elf_asm_out_constructor (symbol, priority)
13426 const char *section = ".ctors";
13429 if (priority != DEFAULT_INIT_PRIORITY)
13431 sprintf (buf, ".ctors.%.5u",
13432 /* Invert the numbering so the linker puts us in the proper
13433 order; constructors are run from right to left, and the
13434 linker sorts in increasing order. */
13435 MAX_INIT_PRIORITY - priority);
13439 named_section_flags (section, SECTION_WRITE);
13440 assemble_align (POINTER_SIZE);
13442 if (TARGET_RELOCATABLE)
13444 fputs ("\t.long (", asm_out_file);
13445 output_addr_const (asm_out_file, symbol);
13446 fputs (")@fixup\n", asm_out_file);
13449 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13453 rs6000_elf_asm_out_destructor (symbol, priority)
13457 const char *section = ".dtors";
13460 if (priority != DEFAULT_INIT_PRIORITY)
13462 sprintf (buf, ".dtors.%.5u",
13463 /* Invert the numbering so the linker puts us in the proper
13464 order; constructors are run from right to left, and the
13465 linker sorts in increasing order. */
13466 MAX_INIT_PRIORITY - priority);
13470 named_section_flags (section, SECTION_WRITE);
13471 assemble_align (POINTER_SIZE);
13473 if (TARGET_RELOCATABLE)
13475 fputs ("\t.long (", asm_out_file);
13476 output_addr_const (asm_out_file, symbol);
13477 fputs (")@fixup\n", asm_out_file);
13480 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13486 rs6000_xcoff_asm_globalize_label (stream, name)
13490 fputs (GLOBAL_ASM_OP, stream);
13491 RS6000_OUTPUT_BASENAME (stream, name);
13492 putc ('\n', stream);
13496 rs6000_xcoff_asm_named_section (name, flags)
13498 unsigned int flags;
13501 static const char * const suffix[3] = { "PR", "RO", "RW" };
13503 if (flags & SECTION_CODE)
13505 else if (flags & SECTION_WRITE)
13510 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13511 (flags & SECTION_CODE) ? "." : "",
13512 name, suffix[smclass], flags & SECTION_ENTSIZE);
13516 rs6000_xcoff_select_section (decl, reloc, align)
13519 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13521 if (decl_readonly_section_1 (decl, reloc, 1))
13523 if (TREE_PUBLIC (decl))
13524 read_only_data_section ();
13526 read_only_private_data_section ();
13530 if (TREE_PUBLIC (decl))
13533 private_data_section ();
13538 rs6000_xcoff_unique_section (decl, reloc)
13540 int reloc ATTRIBUTE_UNUSED;
13544 /* Use select_section for private and uninitialized data. */
13545 if (!TREE_PUBLIC (decl)
13546 || DECL_COMMON (decl)
13547 || DECL_INITIAL (decl) == NULL_TREE
13548 || DECL_INITIAL (decl) == error_mark_node
13549 || (flag_zero_initialized_in_bss
13550 && initializer_zerop (DECL_INITIAL (decl))))
13553 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13554 name = (*targetm.strip_name_encoding) (name);
13555 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13558 /* Select section for constant in constant pool.
13560 On RS/6000, all constants are in the private read-only data area.
13561 However, if this is being placed in the TOC it must be output as a
13565 rs6000_xcoff_select_rtx_section (mode, x, align)
13566 enum machine_mode mode;
13568 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13570 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13573 read_only_private_data_section ();
13576 /* Remove any trailing [DS] or the like from the symbol name. */
13578 static const char *
13579 rs6000_xcoff_strip_name_encoding (name)
13585 len = strlen (name);
13586 if (name[len - 1] == ']')
13587 return ggc_alloc_string (name, len - 4);
13592 /* Section attributes. AIX is always PIC. */
13594 static unsigned int
13595 rs6000_xcoff_section_type_flags (decl, name, reloc)
13600 unsigned int align;
13601 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13603 /* Align to at least UNIT size. */
13604 if (flags & SECTION_CODE)
13605 align = MIN_UNITS_PER_WORD;
13607 /* Increase alignment of large objects if not already stricter. */
13608 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13609 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13610 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13612 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13616 rs6000_xcoff_encode_section_info (decl, first)
13618 int first ATTRIBUTE_UNUSED;
13620 if (TREE_CODE (decl) == FUNCTION_DECL
13621 && (*targetm.binds_local_p) (decl))
13622 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13624 #endif /* TARGET_XCOFF */
13627 /* Cross-module name binding. Darwin does not support overriding
13628 functions at dynamic-link time. */
13631 rs6000_binds_local_p (decl)
13634 return default_binds_local_p_1 (decl, 0);
13638 /* Compute a (partial) cost for rtx X. Return true if the complete
13639 cost has been computed, and false if subexpressions should be
13640 scanned. In either case, *TOTAL contains the cost result. */
13643 rs6000_rtx_costs (x, code, outer_code, total)
13645 int code, outer_code ATTRIBUTE_UNUSED;
13650 /* On the RS/6000, if it is valid in the insn, it is free.
13651 So this always returns 0. */
13662 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13663 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
13664 + 0x8000) >= 0x10000)
13665 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13666 ? COSTS_N_INSNS (2)
13667 : COSTS_N_INSNS (1));
13673 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13674 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
13675 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13676 ? COSTS_N_INSNS (2)
13677 : COSTS_N_INSNS (1));
13683 *total = COSTS_N_INSNS (2);
13686 switch (rs6000_cpu)
13688 case PROCESSOR_RIOS1:
13689 case PROCESSOR_PPC405:
13690 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13691 ? COSTS_N_INSNS (5)
13692 : (INTVAL (XEXP (x, 1)) >= -256
13693 && INTVAL (XEXP (x, 1)) <= 255)
13694 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13697 case PROCESSOR_RS64A:
13698 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13699 ? GET_MODE (XEXP (x, 1)) != DImode
13700 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13701 : (INTVAL (XEXP (x, 1)) >= -256
13702 && INTVAL (XEXP (x, 1)) <= 255)
13703 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13706 case PROCESSOR_RIOS2:
13707 case PROCESSOR_MPCCORE:
13708 case PROCESSOR_PPC604e:
13709 *total = COSTS_N_INSNS (2);
13712 case PROCESSOR_PPC601:
13713 *total = COSTS_N_INSNS (5);
13716 case PROCESSOR_PPC603:
13717 case PROCESSOR_PPC7400:
13718 case PROCESSOR_PPC750:
13719 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13720 ? COSTS_N_INSNS (5)
13721 : (INTVAL (XEXP (x, 1)) >= -256
13722 && INTVAL (XEXP (x, 1)) <= 255)
13723 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13726 case PROCESSOR_PPC7450:
13727 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13728 ? COSTS_N_INSNS (4)
13729 : COSTS_N_INSNS (3));
13732 case PROCESSOR_PPC403:
13733 case PROCESSOR_PPC604:
13734 case PROCESSOR_PPC8540:
13735 *total = COSTS_N_INSNS (4);
13738 case PROCESSOR_PPC620:
13739 case PROCESSOR_PPC630:
13740 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13741 ? GET_MODE (XEXP (x, 1)) != DImode
13742 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13743 : (INTVAL (XEXP (x, 1)) >= -256
13744 && INTVAL (XEXP (x, 1)) <= 255)
13745 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13748 case PROCESSOR_POWER4:
13749 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13750 ? GET_MODE (XEXP (x, 1)) != DImode
13751 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
13752 : COSTS_N_INSNS (2));
13761 if (GET_CODE (XEXP (x, 1)) == CONST_INT
13762 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
13764 *total = COSTS_N_INSNS (2);
13771 switch (rs6000_cpu)
13773 case PROCESSOR_RIOS1:
13774 *total = COSTS_N_INSNS (19);
13777 case PROCESSOR_RIOS2:
13778 *total = COSTS_N_INSNS (13);
13781 case PROCESSOR_RS64A:
13782 *total = (GET_MODE (XEXP (x, 1)) != DImode
13783 ? COSTS_N_INSNS (65)
13784 : COSTS_N_INSNS (67));
13787 case PROCESSOR_MPCCORE:
13788 *total = COSTS_N_INSNS (6);
13791 case PROCESSOR_PPC403:
13792 *total = COSTS_N_INSNS (33);
13795 case PROCESSOR_PPC405:
13796 *total = COSTS_N_INSNS (35);
13799 case PROCESSOR_PPC601:
13800 *total = COSTS_N_INSNS (36);
13803 case PROCESSOR_PPC603:
13804 *total = COSTS_N_INSNS (37);
13807 case PROCESSOR_PPC604:
13808 case PROCESSOR_PPC604e:
13809 *total = COSTS_N_INSNS (20);
13812 case PROCESSOR_PPC620:
13813 case PROCESSOR_PPC630:
13814 *total = (GET_MODE (XEXP (x, 1)) != DImode
13815 ? COSTS_N_INSNS (21)
13816 : COSTS_N_INSNS (37));
13819 case PROCESSOR_PPC750:
13820 case PROCESSOR_PPC8540:
13821 case PROCESSOR_PPC7400:
13822 *total = COSTS_N_INSNS (19);
13825 case PROCESSOR_PPC7450:
13826 *total = COSTS_N_INSNS (23);
13829 case PROCESSOR_POWER4:
13830 *total = (GET_MODE (XEXP (x, 1)) != DImode
13831 ? COSTS_N_INSNS (18)
13832 : COSTS_N_INSNS (34));
13840 *total = COSTS_N_INSNS (4);
13844 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13853 /* A C expression returning the cost of moving data from a register of class
13854 CLASS1 to one of CLASS2. */
13857 rs6000_register_move_cost (mode, from, to)
13858 enum machine_mode mode;
13859 enum reg_class from, to;
13861 /* Moves from/to GENERAL_REGS. */
13862 if (reg_classes_intersect_p (to, GENERAL_REGS)
13863 || reg_classes_intersect_p (from, GENERAL_REGS))
13865 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13868 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13869 return (rs6000_memory_move_cost (mode, from, 0)
13870 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13872 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13873 else if (from == CR_REGS)
13877 /* A move will cost one instruction per GPR moved. */
13878 return 2 * HARD_REGNO_NREGS (0, mode);
13881 /* Moving between two similar registers is just one instruction. */
13882 else if (reg_classes_intersect_p (to, from))
13883 return mode == TFmode ? 4 : 2;
13885 /* Everything else has to go through GENERAL_REGS. */
13887 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13888 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13891 /* A C expressions returning the cost of moving data of MODE from a register to
13895 rs6000_memory_move_cost (mode, class, in)
13896 enum machine_mode mode;
13897 enum reg_class class;
13898 int in ATTRIBUTE_UNUSED;
13900 if (reg_classes_intersect_p (class, GENERAL_REGS))
13901 return 4 * HARD_REGNO_NREGS (0, mode);
13902 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13903 return 4 * HARD_REGNO_NREGS (32, mode);
13904 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13905 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13907 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13910 /* Return true if TYPE is of type __ev64_opaque__. */
13913 is_ev64_opaque_type (type)
13917 && TREE_CODE (type) == VECTOR_TYPE
13918 && TYPE_NAME (type)
13919 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13920 && DECL_NAME (TYPE_NAME (type))
13921 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
13922 "__ev64_opaque__") == 0);
13926 rs6000_dwarf_register_span (reg)
13931 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
13934 regno = REGNO (reg);
13936 /* The duality of the SPE register size wreaks all kinds of havoc.
13937 This is a way of distinguishing r0 in 32-bits from r0 in
13940 gen_rtx_PARALLEL (VOIDmode,
13943 gen_rtx_REG (SImode, regno + 1200),
13944 gen_rtx_REG (SImode, regno))
13946 gen_rtx_REG (SImode, regno),
13947 gen_rtx_REG (SImode, regno + 1200)));
13950 #include "gt-rs6000.h"