1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
83 /* Nonzero if we want SPE ABI extensions. */
86 /* Whether isel instructions should be generated. */
89 /* Nonzero if we have FPRs. */
92 /* String from -misel=. */
93 const char *rs6000_isel_string;
95 /* Set to non-zero once AIX common-mode calls have been defined. */
96 static int common_mode_defined;
98 /* Save information from a "cmpxx" operation until the branch or scc is
100 rtx rs6000_compare_op0, rs6000_compare_op1;
101 int rs6000_compare_fp_p;
103 /* Label number of label created for -mrelocatable, to call to so we can
104 get the address of the GOT section */
105 int rs6000_pic_labelno;
108 /* Which abi to adhere to */
109 const char *rs6000_abi_name = RS6000_ABI_NAME;
111 /* Semantics of the small data area */
112 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
114 /* Which small data model to use */
115 const char *rs6000_sdata_name = (char *)0;
117 /* Counter for labels which are to be placed in .fixup. */
118 int fixuplabelno = 0;
121 /* ABI enumeration available for subtarget to use. */
122 enum rs6000_abi rs6000_current_abi;
124 /* ABI string from -mabi= option. */
125 const char *rs6000_abi_string;
128 const char *rs6000_debug_name;
129 int rs6000_debug_stack; /* debug stack applications */
130 int rs6000_debug_arg; /* debug argument handling */
132 /* Flag to say the TOC is initialized */
134 char toc_label_name[10];
136 /* Alias set for saves and restores from the rs6000 stack. */
137 static int rs6000_sr_alias_set;
139 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
140 The only place that looks at this is rs6000_set_default_type_attributes;
141 everywhere else should rely on the presence or absence of a longcall
142 attribute on the function declaration. */
143 int rs6000_default_long_calls;
144 const char *rs6000_longcall_switch;
146 struct builtin_description
148 /* mask is not const because we're going to alter it below. This
149 nonsense will go away when we rewrite the -march infrastructure
150 to give us more target flag bits. */
152 const enum insn_code icode;
153 const char *const name;
154 const enum rs6000_builtins code;
157 static void rs6000_add_gc_roots PARAMS ((void));
158 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
159 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
160 static void validate_condition_mode
161 PARAMS ((enum rtx_code, enum machine_mode));
162 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
163 static void rs6000_maybe_dead PARAMS ((rtx));
164 static void rs6000_emit_stack_tie PARAMS ((void));
165 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
166 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
167 unsigned int, int, int));
168 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
169 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
170 static unsigned rs6000_hash_constant PARAMS ((rtx));
171 static unsigned toc_hash_function PARAMS ((const void *));
172 static int toc_hash_eq PARAMS ((const void *, const void *));
173 static int toc_hash_mark_entry PARAMS ((void **, void *));
174 static void toc_hash_mark_table PARAMS ((void *));
175 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
176 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
177 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
178 static int rs6000_ra_ever_killed PARAMS ((void));
179 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
180 const struct attribute_spec rs6000_attribute_table[];
181 static void rs6000_set_default_type_attributes PARAMS ((tree));
182 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
183 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
184 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
185 HOST_WIDE_INT, HOST_WIDE_INT));
187 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
189 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
190 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
191 static void rs6000_elf_select_section PARAMS ((tree, int,
192 unsigned HOST_WIDE_INT));
193 static void rs6000_elf_unique_section PARAMS ((tree, int));
194 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
195 unsigned HOST_WIDE_INT));
196 static void rs6000_elf_encode_section_info PARAMS ((tree, int));
197 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
200 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
201 static void rs6000_xcoff_select_section PARAMS ((tree, int,
202 unsigned HOST_WIDE_INT));
203 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
204 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
205 unsigned HOST_WIDE_INT));
206 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
208 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
210 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
211 static int rs6000_adjust_priority PARAMS ((rtx, int));
212 static int rs6000_issue_rate PARAMS ((void));
214 static void rs6000_init_builtins PARAMS ((void));
215 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
216 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
217 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
218 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
219 static void altivec_init_builtins PARAMS ((void));
220 static void rs6000_common_init_builtins PARAMS ((void));
222 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
223 int, enum rs6000_builtins,
224 enum rs6000_builtins));
225 static void spe_init_builtins PARAMS ((void));
226 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
227 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
228 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
229 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
231 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
232 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
233 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
234 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
235 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
236 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
237 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
238 static void rs6000_parse_abi_options PARAMS ((void));
239 static void rs6000_parse_vrsave_option PARAMS ((void));
240 static void rs6000_parse_isel_option PARAMS ((void));
241 static int first_altivec_reg_to_save PARAMS ((void));
242 static unsigned int compute_vrsave_mask PARAMS ((void));
243 static void is_altivec_return_reg PARAMS ((rtx, void *));
244 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
245 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
246 static int easy_vector_constant PARAMS ((rtx));
248 /* Default register names. */
249 char rs6000_reg_names[][8] =
251 "0", "1", "2", "3", "4", "5", "6", "7",
252 "8", "9", "10", "11", "12", "13", "14", "15",
253 "16", "17", "18", "19", "20", "21", "22", "23",
254 "24", "25", "26", "27", "28", "29", "30", "31",
255 "0", "1", "2", "3", "4", "5", "6", "7",
256 "8", "9", "10", "11", "12", "13", "14", "15",
257 "16", "17", "18", "19", "20", "21", "22", "23",
258 "24", "25", "26", "27", "28", "29", "30", "31",
259 "mq", "lr", "ctr","ap",
260 "0", "1", "2", "3", "4", "5", "6", "7",
262 /* AltiVec registers. */
263 "0", "1", "2", "3", "4", "5", "6", "7",
264 "8", "9", "10", "11", "12", "13", "14", "15",
265 "16", "17", "18", "19", "20", "21", "22", "23",
266 "24", "25", "26", "27", "28", "29", "30", "31",
270 #ifdef TARGET_REGNAMES
271 static const char alt_reg_names[][8] =
273 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
274 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
275 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
276 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
277 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
278 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
279 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
280 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
281 "mq", "lr", "ctr", "ap",
282 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
284 /* AltiVec registers. */
285 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
286 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
287 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
288 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
293 #ifndef MASK_STRICT_ALIGN
294 #define MASK_STRICT_ALIGN 0
297 /* Initialize the GCC target structure. */
298 #undef TARGET_ATTRIBUTE_TABLE
299 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
300 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
301 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
303 #undef TARGET_ASM_ALIGNED_DI_OP
304 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
306 /* Default unaligned ops are only provided for ELF. Find the ops needed
307 for non-ELF systems. */
308 #ifndef OBJECT_FORMAT_ELF
310 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
312 #undef TARGET_ASM_UNALIGNED_HI_OP
313 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
314 #undef TARGET_ASM_UNALIGNED_SI_OP
315 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
316 #undef TARGET_ASM_UNALIGNED_DI_OP
317 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
320 #undef TARGET_ASM_UNALIGNED_HI_OP
321 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
322 #undef TARGET_ASM_UNALIGNED_SI_OP
323 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
327 /* This hook deals with fixups for relocatable code and DI-mode objects
329 #undef TARGET_ASM_INTEGER
330 #define TARGET_ASM_INTEGER rs6000_assemble_integer
332 #undef TARGET_ASM_FUNCTION_PROLOGUE
333 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
334 #undef TARGET_ASM_FUNCTION_EPILOGUE
335 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
338 #undef TARGET_SECTION_TYPE_FLAGS
339 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
342 #undef TARGET_SCHED_ISSUE_RATE
343 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
344 #undef TARGET_SCHED_ADJUST_COST
345 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
346 #undef TARGET_SCHED_ADJUST_PRIORITY
347 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
349 #undef TARGET_INIT_BUILTINS
350 #define TARGET_INIT_BUILTINS rs6000_init_builtins
352 #undef TARGET_EXPAND_BUILTIN
353 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
355 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
356 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
358 struct gcc_target targetm = TARGET_INITIALIZER;
360 /* Override command line options. Mostly we process the processor
361 type and sometimes adjust other TARGET_ options. */
364 rs6000_override_options (default_cpu)
365 const char *default_cpu;
368 struct rs6000_cpu_select *ptr;
370 /* Simplify the entries below by making a mask for any POWER
371 variant and any PowerPC variant. */
373 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
374 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
375 | MASK_PPC_GFXOPT | MASK_POWERPC64)
376 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
380 const char *const name; /* Canonical processor name. */
381 const enum processor_type processor; /* Processor type enum value. */
382 const int target_enable; /* Target flags to enable. */
383 const int target_disable; /* Target flags to disable. */
384 } const processor_target_table[]
385 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
386 POWER_MASKS | POWERPC_MASKS},
387 {"power", PROCESSOR_POWER,
388 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
389 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
390 {"power2", PROCESSOR_POWER,
391 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
392 POWERPC_MASKS | MASK_NEW_MNEMONICS},
393 {"power3", PROCESSOR_PPC630,
394 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
395 POWER_MASKS | MASK_PPC_GPOPT},
396 {"power4", PROCESSOR_POWER4,
397 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
398 POWER_MASKS | MASK_PPC_GPOPT},
399 {"powerpc", PROCESSOR_POWERPC,
400 MASK_POWERPC | MASK_NEW_MNEMONICS,
401 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
402 {"powerpc64", PROCESSOR_POWERPC64,
403 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
404 POWER_MASKS | POWERPC_OPT_MASKS},
405 {"rios", PROCESSOR_RIOS1,
406 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
407 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
408 {"rios1", PROCESSOR_RIOS1,
409 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
410 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
411 {"rsc", PROCESSOR_PPC601,
412 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
413 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
414 {"rsc1", PROCESSOR_PPC601,
415 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
416 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
417 {"rios2", PROCESSOR_RIOS2,
418 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
419 POWERPC_MASKS | MASK_NEW_MNEMONICS},
420 {"rs64a", PROCESSOR_RS64A,
421 MASK_POWERPC | MASK_NEW_MNEMONICS,
422 POWER_MASKS | POWERPC_OPT_MASKS},
423 {"401", PROCESSOR_PPC403,
424 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
425 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
426 {"403", PROCESSOR_PPC403,
427 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
428 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
429 {"405", PROCESSOR_PPC405,
430 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
431 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
432 {"505", PROCESSOR_MPCCORE,
433 MASK_POWERPC | MASK_NEW_MNEMONICS,
434 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
435 {"601", PROCESSOR_PPC601,
436 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
437 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
438 {"602", PROCESSOR_PPC603,
439 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
440 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
441 {"603", PROCESSOR_PPC603,
442 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
443 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
444 {"603e", PROCESSOR_PPC603,
445 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
446 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
447 {"ec603e", PROCESSOR_PPC603,
448 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
449 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
450 {"604", PROCESSOR_PPC604,
451 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
452 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
453 {"604e", PROCESSOR_PPC604e,
454 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
455 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
456 {"620", PROCESSOR_PPC620,
457 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
458 POWER_MASKS | MASK_PPC_GPOPT},
459 {"630", PROCESSOR_PPC630,
460 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
461 POWER_MASKS | MASK_PPC_GPOPT},
462 {"740", PROCESSOR_PPC750,
463 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
464 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
465 {"750", PROCESSOR_PPC750,
466 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
467 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
468 {"7400", PROCESSOR_PPC7400,
469 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
470 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
471 {"7450", PROCESSOR_PPC7450,
472 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
473 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
474 {"8540", PROCESSOR_PPC8540,
475 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
476 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
477 {"801", PROCESSOR_MPCCORE,
478 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
479 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
480 {"821", PROCESSOR_MPCCORE,
481 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
482 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
483 {"823", PROCESSOR_MPCCORE,
484 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
485 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
486 {"860", PROCESSOR_MPCCORE,
487 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
488 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
490 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
492 /* Save current -mmultiple/-mno-multiple status. */
493 int multiple = TARGET_MULTIPLE;
494 /* Save current -mstring/-mno-string status. */
495 int string = TARGET_STRING;
497 /* Identify the processor type. */
498 rs6000_select[0].string = default_cpu;
499 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
501 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
503 ptr = &rs6000_select[i];
504 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
506 for (j = 0; j < ptt_size; j++)
507 if (! strcmp (ptr->string, processor_target_table[j].name))
510 rs6000_cpu = processor_target_table[j].processor;
514 target_flags |= processor_target_table[j].target_enable;
515 target_flags &= ~processor_target_table[j].target_disable;
521 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
525 if (rs6000_cpu == PROCESSOR_PPC8540)
528 /* If we are optimizing big endian systems for space, use the store
529 multiple instructions. */
530 if (BYTES_BIG_ENDIAN && optimize_size)
531 target_flags |= MASK_MULTIPLE;
533 /* If -mmultiple or -mno-multiple was explicitly used, don't
534 override with the processor default */
535 if (TARGET_MULTIPLE_SET)
536 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
538 /* If -mstring or -mno-string was explicitly used, don't override
539 with the processor default. */
540 if (TARGET_STRING_SET)
541 target_flags = (target_flags & ~MASK_STRING) | string;
543 /* Don't allow -mmultiple or -mstring on little endian systems
544 unless the cpu is a 750, because the hardware doesn't support the
545 instructions used in little endian mode, and causes an alignment
546 trap. The 750 does not cause an alignment trap (except when the
547 target is unaligned). */
549 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
553 target_flags &= ~MASK_MULTIPLE;
554 if (TARGET_MULTIPLE_SET)
555 warning ("-mmultiple is not supported on little endian systems");
560 target_flags &= ~MASK_STRING;
561 if (TARGET_STRING_SET)
562 warning ("-mstring is not supported on little endian systems");
566 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
571 warning ("-f%s ignored (all code is position independent)",
572 (flag_pic > 1) ? "PIC" : "pic");
575 #ifdef XCOFF_DEBUGGING_INFO
576 if (flag_function_sections && (write_symbols != NO_DEBUG)
577 && DEFAULT_ABI == ABI_AIX)
579 warning ("-ffunction-sections disabled on AIX when debugging");
580 flag_function_sections = 0;
583 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
585 warning ("-fdata-sections not supported on AIX");
586 flag_data_sections = 0;
590 /* Set debug flags */
591 if (rs6000_debug_name)
593 if (! strcmp (rs6000_debug_name, "all"))
594 rs6000_debug_stack = rs6000_debug_arg = 1;
595 else if (! strcmp (rs6000_debug_name, "stack"))
596 rs6000_debug_stack = 1;
597 else if (! strcmp (rs6000_debug_name, "arg"))
598 rs6000_debug_arg = 1;
600 error ("unknown -mdebug-%s switch", rs6000_debug_name);
603 /* Set size of long double */
604 rs6000_long_double_type_size = 64;
605 if (rs6000_long_double_size_string)
608 int size = strtol (rs6000_long_double_size_string, &tail, 10);
609 if (*tail != '\0' || (size != 64 && size != 128))
610 error ("Unknown switch -mlong-double-%s",
611 rs6000_long_double_size_string);
613 rs6000_long_double_type_size = size;
616 /* Handle -mabi= options. */
617 rs6000_parse_abi_options ();
619 /* Handle -mvrsave= option. */
620 rs6000_parse_vrsave_option ();
622 /* Handle -misel= option. */
623 rs6000_parse_isel_option ();
625 #ifdef SUBTARGET_OVERRIDE_OPTIONS
626 SUBTARGET_OVERRIDE_OPTIONS;
628 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
629 SUBSUBTARGET_OVERRIDE_OPTIONS;
632 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
633 using TARGET_OPTIONS to handle a toggle switch, but we're out of
634 bits in target_flags so TARGET_SWITCHES cannot be used.
635 Assumption here is that rs6000_longcall_switch points into the
636 text of the complete option, rather than being a copy, so we can
637 scan back for the presence or absence of the no- modifier. */
638 if (rs6000_longcall_switch)
640 const char *base = rs6000_longcall_switch;
641 while (base[-1] != 'm') base--;
643 if (*rs6000_longcall_switch != '\0')
644 error ("invalid option `%s'", base);
645 rs6000_default_long_calls = (base[0] != 'n');
648 #ifdef TARGET_REGNAMES
649 /* If the user desires alternate register names, copy in the
650 alternate names now. */
652 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
655 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
656 If -maix-struct-return or -msvr4-struct-return was explicitly
657 used, don't override with the ABI default. */
658 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
660 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
661 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
663 target_flags |= MASK_AIX_STRUCT_RET;
666 /* Register global variables with the garbage collector. */
667 rs6000_add_gc_roots ();
669 /* Allocate an alias set for register saves & restores from stack. */
670 rs6000_sr_alias_set = new_alias_set ();
673 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
675 /* We can only guarantee the availability of DI pseudo-ops when
676 assembling for 64-bit targets. */
679 targetm.asm_out.aligned_op.di = NULL;
680 targetm.asm_out.unaligned_op.di = NULL;
683 /* Arrange to save and restore machine status around nested functions. */
684 init_machine_status = rs6000_init_machine_status;
687 /* Handle -misel= option. */
689 rs6000_parse_isel_option ()
691 if (rs6000_isel_string == 0)
693 else if (! strcmp (rs6000_isel_string, "yes"))
695 else if (! strcmp (rs6000_isel_string, "no"))
698 error ("unknown -misel= option specified: '%s'",
702 /* Handle -mvrsave= options. */
704 rs6000_parse_vrsave_option ()
706 /* Generate VRSAVE instructions by default. */
707 if (rs6000_altivec_vrsave_string == 0
708 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
709 rs6000_altivec_vrsave = 1;
710 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
711 rs6000_altivec_vrsave = 0;
713 error ("unknown -mvrsave= option specified: '%s'",
714 rs6000_altivec_vrsave_string);
717 /* Handle -mabi= options. */
719 rs6000_parse_abi_options ()
721 if (rs6000_abi_string == 0)
723 else if (! strcmp (rs6000_abi_string, "altivec"))
724 rs6000_altivec_abi = 1;
725 else if (! strcmp (rs6000_abi_string, "no-altivec"))
726 rs6000_altivec_abi = 0;
727 else if (! strcmp (rs6000_abi_string, "spe"))
729 else if (! strcmp (rs6000_abi_string, "no-spe"))
732 error ("unknown ABI specified: '%s'", rs6000_abi_string);
736 optimization_options (level, size)
737 int level ATTRIBUTE_UNUSED;
738 int size ATTRIBUTE_UNUSED;
742 /* Do anything needed at the start of the asm file. */
745 rs6000_file_start (file, default_cpu)
747 const char *default_cpu;
751 const char *start = buffer;
752 struct rs6000_cpu_select *ptr;
754 if (flag_verbose_asm)
756 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
757 rs6000_select[0].string = default_cpu;
759 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
761 ptr = &rs6000_select[i];
762 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
764 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
770 switch (rs6000_sdata)
772 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
773 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
774 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
775 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
778 if (rs6000_sdata && g_switch_value)
780 fprintf (file, "%s -G %d", start, g_switch_value);
790 /* Return non-zero if this function is known to have a null epilogue. */
795 if (reload_completed)
797 rs6000_stack_t *info = rs6000_stack_info ();
799 if (info->first_gp_reg_save == 32
800 && info->first_fp_reg_save == 64
801 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
804 && info->vrsave_mask == 0
812 /* Returns 1 always. */
815 any_operand (op, mode)
816 rtx op ATTRIBUTE_UNUSED;
817 enum machine_mode mode ATTRIBUTE_UNUSED;
822 /* Returns 1 if op is the count register. */
824 count_register_operand (op, mode)
826 enum machine_mode mode ATTRIBUTE_UNUSED;
828 if (GET_CODE (op) != REG)
831 if (REGNO (op) == COUNT_REGISTER_REGNUM)
834 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
840 /* Returns 1 if op is an altivec register. */
842 altivec_register_operand (op, mode)
844 enum machine_mode mode ATTRIBUTE_UNUSED;
847 return (register_operand (op, mode)
848 && (GET_CODE (op) != REG
849 || REGNO (op) > FIRST_PSEUDO_REGISTER
850 || ALTIVEC_REGNO_P (REGNO (op))));
854 xer_operand (op, mode)
856 enum machine_mode mode ATTRIBUTE_UNUSED;
858 if (GET_CODE (op) != REG)
861 if (XER_REGNO_P (REGNO (op)))
867 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
868 by such constants completes more quickly. */
871 s8bit_cint_operand (op, mode)
873 enum machine_mode mode ATTRIBUTE_UNUSED;
875 return ( GET_CODE (op) == CONST_INT
876 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
879 /* Return 1 if OP is a constant that can fit in a D field. */
882 short_cint_operand (op, mode)
884 enum machine_mode mode ATTRIBUTE_UNUSED;
886 return (GET_CODE (op) == CONST_INT
887 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
890 /* Similar for an unsigned D field. */
893 u_short_cint_operand (op, mode)
895 enum machine_mode mode ATTRIBUTE_UNUSED;
897 return (GET_CODE (op) == CONST_INT
898 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
901 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
904 non_short_cint_operand (op, mode)
906 enum machine_mode mode ATTRIBUTE_UNUSED;
908 return (GET_CODE (op) == CONST_INT
909 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
912 /* Returns 1 if OP is a CONST_INT that is a positive value
913 and an exact power of 2. */
916 exact_log2_cint_operand (op, mode)
918 enum machine_mode mode ATTRIBUTE_UNUSED;
920 return (GET_CODE (op) == CONST_INT
922 && exact_log2 (INTVAL (op)) >= 0);
925 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
929 gpc_reg_operand (op, mode)
931 enum machine_mode mode;
933 return (register_operand (op, mode)
934 && (GET_CODE (op) != REG
935 || (REGNO (op) >= ARG_POINTER_REGNUM
936 && !XER_REGNO_P (REGNO (op)))
937 || REGNO (op) < MQ_REGNO));
940 /* Returns 1 if OP is either a pseudo-register or a register denoting a
944 cc_reg_operand (op, mode)
946 enum machine_mode mode;
948 return (register_operand (op, mode)
949 && (GET_CODE (op) != REG
950 || REGNO (op) >= FIRST_PSEUDO_REGISTER
951 || CR_REGNO_P (REGNO (op))));
954 /* Returns 1 if OP is either a pseudo-register or a register denoting a
955 CR field that isn't CR0. */
958 cc_reg_not_cr0_operand (op, mode)
960 enum machine_mode mode;
962 return (register_operand (op, mode)
963 && (GET_CODE (op) != REG
964 || REGNO (op) >= FIRST_PSEUDO_REGISTER
965 || CR_REGNO_NOT_CR0_P (REGNO (op))));
968 /* Returns 1 if OP is either a constant integer valid for a D-field or
969 a non-special register. If a register, it must be in the proper
970 mode unless MODE is VOIDmode. */
973 reg_or_short_operand (op, mode)
975 enum machine_mode mode;
977 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
980 /* Similar, except check if the negation of the constant would be
981 valid for a D-field. */
984 reg_or_neg_short_operand (op, mode)
986 enum machine_mode mode;
988 if (GET_CODE (op) == CONST_INT)
989 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
991 return gpc_reg_operand (op, mode);
994 /* Returns 1 if OP is either a constant integer valid for a DS-field or
995 a non-special register. If a register, it must be in the proper
996 mode unless MODE is VOIDmode. */
999 reg_or_aligned_short_operand (op, mode)
1001 enum machine_mode mode;
1003 if (gpc_reg_operand (op, mode))
1005 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1012 /* Return 1 if the operand is either a register or an integer whose
1013 high-order 16 bits are zero. */
1016 reg_or_u_short_operand (op, mode)
1018 enum machine_mode mode;
1020 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1023 /* Return 1 is the operand is either a non-special register or ANY
1024 constant integer. */
1027 reg_or_cint_operand (op, mode)
1029 enum machine_mode mode;
1031 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1034 /* Return 1 is the operand is either a non-special register or ANY
1035 32-bit signed constant integer. */
1038 reg_or_arith_cint_operand (op, mode)
1040 enum machine_mode mode;
1042 return (gpc_reg_operand (op, mode)
1043 || (GET_CODE (op) == CONST_INT
1044 #if HOST_BITS_PER_WIDE_INT != 32
1045 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1046 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1051 /* Return 1 is the operand is either a non-special register or a 32-bit
1052 signed constant integer valid for 64-bit addition. */
1055 reg_or_add_cint64_operand (op, mode)
1057 enum machine_mode mode;
1059 return (gpc_reg_operand (op, mode)
1060 || (GET_CODE (op) == CONST_INT
1061 #if HOST_BITS_PER_WIDE_INT == 32
1062 && INTVAL (op) < 0x7fff8000
1064 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1070 /* Return 1 is the operand is either a non-special register or a 32-bit
1071 signed constant integer valid for 64-bit subtraction. */
1074 reg_or_sub_cint64_operand (op, mode)
1076 enum machine_mode mode;
1078 return (gpc_reg_operand (op, mode)
1079 || (GET_CODE (op) == CONST_INT
1080 #if HOST_BITS_PER_WIDE_INT == 32
1081 && (- INTVAL (op)) < 0x7fff8000
1083 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1089 /* Return 1 is the operand is either a non-special register or ANY
1090 32-bit unsigned constant integer. */
1093 reg_or_logical_cint_operand (op, mode)
1095 enum machine_mode mode;
1097 if (GET_CODE (op) == CONST_INT)
1099 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1101 if (GET_MODE_BITSIZE (mode) <= 32)
1104 if (INTVAL (op) < 0)
1108 return ((INTVAL (op) & GET_MODE_MASK (mode)
1109 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1111 else if (GET_CODE (op) == CONST_DOUBLE)
1113 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1117 return CONST_DOUBLE_HIGH (op) == 0;
1120 return gpc_reg_operand (op, mode);
1123 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1126 got_operand (op, mode)
1128 enum machine_mode mode ATTRIBUTE_UNUSED;
1130 return (GET_CODE (op) == SYMBOL_REF
1131 || GET_CODE (op) == CONST
1132 || GET_CODE (op) == LABEL_REF);
1135 /* Return 1 if the operand is a simple references that can be loaded via
1136 the GOT (labels involving addition aren't allowed). */
1139 got_no_const_operand (op, mode)
1141 enum machine_mode mode ATTRIBUTE_UNUSED;
1143 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1146 /* Return the number of instructions it takes to form a constant in an
1147 integer register. */
1150 num_insns_constant_wide (value)
1151 HOST_WIDE_INT value;
1153 /* signed constant loadable with {cal|addi} */
1154 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1157 /* constant loadable with {cau|addis} */
1158 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1161 #if HOST_BITS_PER_WIDE_INT == 64
1162 else if (TARGET_POWERPC64)
1164 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1165 HOST_WIDE_INT high = value >> 31;
1167 if (high == 0 || high == -1)
1173 return num_insns_constant_wide (high) + 1;
1175 return (num_insns_constant_wide (high)
1176 + num_insns_constant_wide (low) + 1);
1185 num_insns_constant (op, mode)
1187 enum machine_mode mode;
1189 if (GET_CODE (op) == CONST_INT)
1191 #if HOST_BITS_PER_WIDE_INT == 64
1192 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1193 && mask64_operand (op, mode))
1197 return num_insns_constant_wide (INTVAL (op));
1200 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1205 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1206 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1207 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1210 else if (GET_CODE (op) == CONST_DOUBLE)
1216 int endian = (WORDS_BIG_ENDIAN == 0);
1218 if (mode == VOIDmode || mode == DImode)
1220 high = CONST_DOUBLE_HIGH (op);
1221 low = CONST_DOUBLE_LOW (op);
1225 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1226 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1228 low = l[1 - endian];
1232 return (num_insns_constant_wide (low)
1233 + num_insns_constant_wide (high));
1237 if (high == 0 && low >= 0)
1238 return num_insns_constant_wide (low);
1240 else if (high == -1 && low < 0)
1241 return num_insns_constant_wide (low);
1243 else if (mask64_operand (op, mode))
1247 return num_insns_constant_wide (high) + 1;
1250 return (num_insns_constant_wide (high)
1251 + num_insns_constant_wide (low) + 1);
1259 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1260 register with one instruction per word. We only do this if we can
1261 safely read CONST_DOUBLE_{LOW,HIGH}. */
1264 easy_fp_constant (op, mode)
1266 enum machine_mode mode;
1268 if (GET_CODE (op) != CONST_DOUBLE
1269 || GET_MODE (op) != mode
1270 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1273 /* Consider all constants with -msoft-float to be easy. */
1274 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1278 /* If we are using V.4 style PIC, consider all constants to be hard. */
1279 if (flag_pic && DEFAULT_ABI == ABI_V4)
1282 #ifdef TARGET_RELOCATABLE
1283 /* Similarly if we are using -mrelocatable, consider all constants
1285 if (TARGET_RELOCATABLE)
1294 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1295 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1297 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1298 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1301 else if (mode == SFmode)
1306 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1307 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1309 return num_insns_constant_wide (l) == 1;
1312 else if (mode == DImode)
1313 return ((TARGET_POWERPC64
1314 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1315 || (num_insns_constant (op, DImode) <= 2));
1317 else if (mode == SImode)
1323 /* Return 1 if the operand is a CONST_INT and can be put into a
1324 register with one instruction. */
1327 easy_vector_constant (op)
1333 if (GET_CODE (op) != CONST_VECTOR)
1336 units = CONST_VECTOR_NUNITS (op);
1338 /* We can generate 0 easily. Look for that. */
1339 for (i = 0; i < units; ++i)
1341 elt = CONST_VECTOR_ELT (op, i);
1343 /* We could probably simplify this by just checking for equality
1344 with CONST0_RTX for the current mode, but let's be safe
1347 switch (GET_CODE (elt))
1350 if (INTVAL (elt) != 0)
1354 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1362 /* We could probably generate a few other constants trivially, but
1363 gcc doesn't generate them yet. FIXME later. */
1367 /* Return 1 if the operand is the constant 0. This works for scalars
1368 as well as vectors. */
1370 zero_constant (op, mode)
1372 enum machine_mode mode;
1374 return op == CONST0_RTX (mode);
1377 /* Return 1 if the operand is 0.0. */
1379 zero_fp_constant (op, mode)
1381 enum machine_mode mode;
1383 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1386 /* Return 1 if the operand is in volatile memory. Note that during
1387 the RTL generation phase, memory_operand does not return TRUE for
1388 volatile memory references. So this function allows us to
1389 recognize volatile references where its safe. */
1392 volatile_mem_operand (op, mode)
1394 enum machine_mode mode;
1396 if (GET_CODE (op) != MEM)
1399 if (!MEM_VOLATILE_P (op))
1402 if (mode != GET_MODE (op))
1405 if (reload_completed)
1406 return memory_operand (op, mode);
1408 if (reload_in_progress)
1409 return strict_memory_address_p (mode, XEXP (op, 0));
1411 return memory_address_p (mode, XEXP (op, 0));
1414 /* Return 1 if the operand is an offsettable memory operand. */
1417 offsettable_mem_operand (op, mode)
1419 enum machine_mode mode;
1421 return ((GET_CODE (op) == MEM)
1422 && offsettable_address_p (reload_completed || reload_in_progress,
1423 mode, XEXP (op, 0)));
1426 /* Return 1 if the operand is either an easy FP constant (see above) or
1430 mem_or_easy_const_operand (op, mode)
1432 enum machine_mode mode;
1434 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1437 /* Return 1 if the operand is either a non-special register or an item
1438 that can be used as the operand of a `mode' add insn. */
1441 add_operand (op, mode)
1443 enum machine_mode mode;
1445 if (GET_CODE (op) == CONST_INT)
1446 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1447 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1449 return gpc_reg_operand (op, mode);
1452 /* Return 1 if OP is a constant but not a valid add_operand. */
1455 non_add_cint_operand (op, mode)
1457 enum machine_mode mode ATTRIBUTE_UNUSED;
1459 return (GET_CODE (op) == CONST_INT
1460 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1461 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1464 /* Return 1 if the operand is a non-special register or a constant that
1465 can be used as the operand of an OR or XOR insn on the RS/6000. */
1468 logical_operand (op, mode)
1470 enum machine_mode mode;
1472 HOST_WIDE_INT opl, oph;
1474 if (gpc_reg_operand (op, mode))
1477 if (GET_CODE (op) == CONST_INT)
1479 opl = INTVAL (op) & GET_MODE_MASK (mode);
1481 #if HOST_BITS_PER_WIDE_INT <= 32
1482 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1486 else if (GET_CODE (op) == CONST_DOUBLE)
1488 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1491 opl = CONST_DOUBLE_LOW (op);
1492 oph = CONST_DOUBLE_HIGH (op);
1499 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1500 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1503 /* Return 1 if C is a constant that is not a logical operand (as
1504 above), but could be split into one. */
1507 non_logical_cint_operand (op, mode)
1509 enum machine_mode mode;
1511 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1512 && ! logical_operand (op, mode)
1513 && reg_or_logical_cint_operand (op, mode));
1516 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1517 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1518 Reject all ones and all zeros, since these should have been optimized
1519 away and confuse the making of MB and ME. */
1522 mask_operand (op, mode)
1524 enum machine_mode mode ATTRIBUTE_UNUSED;
1526 HOST_WIDE_INT c, lsb;
1528 if (GET_CODE (op) != CONST_INT)
1533 /* Fail in 64-bit mode if the mask wraps around because the upper
1534 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1535 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1538 /* We don't change the number of transitions by inverting,
1539 so make sure we start with the LS bit zero. */
1543 /* Reject all zeros or all ones. */
1547 /* Find the first transition. */
1550 /* Invert to look for a second transition. */
1553 /* Erase first transition. */
1556 /* Find the second transition (if any). */
1559 /* Match if all the bits above are 1's (or c is zero). */
1563 /* Return 1 for the PowerPC64 rlwinm corner case. */
1566 mask_operand_wrap (op, mode)
1568 enum machine_mode mode ATTRIBUTE_UNUSED;
1570 HOST_WIDE_INT c, lsb;
1572 if (GET_CODE (op) != CONST_INT)
1577 if ((c & 0x80000001) != 0x80000001)
1591 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1592 It is if there are no more than one 1->0 or 0->1 transitions.
1593 Reject all zeros, since zero should have been optimized away and
1594 confuses the making of MB and ME. */
1597 mask64_operand (op, mode)
1599 enum machine_mode mode ATTRIBUTE_UNUSED;
1601 if (GET_CODE (op) == CONST_INT)
1603 HOST_WIDE_INT c, lsb;
1607 /* Reject all zeros. */
1611 /* We don't change the number of transitions by inverting,
1612 so make sure we start with the LS bit zero. */
1616 /* Find the transition, and check that all bits above are 1's. */
1623 /* Like mask64_operand, but allow up to three transitions. This
1624 predicate is used by insn patterns that generate two rldicl or
1625 rldicr machine insns. */
1628 mask64_2_operand (op, mode)
1630 enum machine_mode mode ATTRIBUTE_UNUSED;
1632 if (GET_CODE (op) == CONST_INT)
1634 HOST_WIDE_INT c, lsb;
1638 /* Disallow all zeros. */
1642 /* We don't change the number of transitions by inverting,
1643 so make sure we start with the LS bit zero. */
1647 /* Find the first transition. */
1650 /* Invert to look for a second transition. */
1653 /* Erase first transition. */
1656 /* Find the second transition. */
1659 /* Invert to look for a third transition. */
1662 /* Erase second transition. */
1665 /* Find the third transition (if any). */
1668 /* Match if all the bits above are 1's (or c is zero). */
1674 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1675 implement ANDing by the mask IN. */
1677 build_mask64_2_operands (in, out)
1681 #if HOST_BITS_PER_WIDE_INT >= 64
1682 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1685 if (GET_CODE (in) != CONST_INT)
1691 /* Assume c initially something like 0x00fff000000fffff. The idea
1692 is to rotate the word so that the middle ^^^^^^ group of zeros
1693 is at the MS end and can be cleared with an rldicl mask. We then
1694 rotate back and clear off the MS ^^ group of zeros with a
1696 c = ~c; /* c == 0xff000ffffff00000 */
1697 lsb = c & -c; /* lsb == 0x0000000000100000 */
1698 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1699 c = ~c; /* c == 0x00fff000000fffff */
1700 c &= -lsb; /* c == 0x00fff00000000000 */
1701 lsb = c & -c; /* lsb == 0x0000100000000000 */
1702 c = ~c; /* c == 0xff000fffffffffff */
1703 c &= -lsb; /* c == 0xff00000000000000 */
1705 while ((lsb >>= 1) != 0)
1706 shift++; /* shift == 44 on exit from loop */
1707 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1708 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1709 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1713 /* Assume c initially something like 0xff000f0000000000. The idea
1714 is to rotate the word so that the ^^^ middle group of zeros
1715 is at the LS end and can be cleared with an rldicr mask. We then
1716 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1718 lsb = c & -c; /* lsb == 0x0000010000000000 */
1719 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1720 c = ~c; /* c == 0x00fff0ffffffffff */
1721 c &= -lsb; /* c == 0x00fff00000000000 */
1722 lsb = c & -c; /* lsb == 0x0000100000000000 */
1723 c = ~c; /* c == 0xff000fffffffffff */
1724 c &= -lsb; /* c == 0xff00000000000000 */
1726 while ((lsb >>= 1) != 0)
1727 shift++; /* shift == 44 on exit from loop */
1728 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1729 m1 >>= shift; /* m1 == 0x0000000000000fff */
1730 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1733 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1734 masks will be all 1's. We are guaranteed more than one transition. */
1735 out[0] = GEN_INT (64 - shift);
1736 out[1] = GEN_INT (m1);
1737 out[2] = GEN_INT (shift);
1738 out[3] = GEN_INT (m2);
1744 /* Return 1 if the operand is either a non-special register or a constant
1745 that can be used as the operand of a PowerPC64 logical AND insn. */
1748 and64_operand (op, mode)
1750 enum machine_mode mode;
1752 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1753 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1755 return (logical_operand (op, mode) || mask64_operand (op, mode));
1758 /* Like the above, but also match constants that can be implemented
1759 with two rldicl or rldicr insns. */
1762 and64_2_operand (op, mode)
1764 enum machine_mode mode;
1766 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1767 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1769 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1772 /* Return 1 if the operand is either a non-special register or a
1773 constant that can be used as the operand of an RS/6000 logical AND insn. */
1776 and_operand (op, mode)
1778 enum machine_mode mode;
1780 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1781 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1783 return (logical_operand (op, mode) || mask_operand (op, mode));
1786 /* Return 1 if the operand is a general register or memory operand. */
1789 reg_or_mem_operand (op, mode)
1791 enum machine_mode mode;
1793 return (gpc_reg_operand (op, mode)
1794 || memory_operand (op, mode)
1795 || volatile_mem_operand (op, mode));
1798 /* Return 1 if the operand is a general register or memory operand without
1799 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1803 lwa_operand (op, mode)
1805 enum machine_mode mode;
1809 if (reload_completed && GET_CODE (inner) == SUBREG)
1810 inner = SUBREG_REG (inner);
1812 return gpc_reg_operand (inner, mode)
1813 || (memory_operand (inner, mode)
1814 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1815 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1816 && (GET_CODE (XEXP (inner, 0)) != PLUS
1817 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1818 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1821 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1824 symbol_ref_operand (op, mode)
1826 enum machine_mode mode;
1828 if (mode != VOIDmode && GET_MODE (op) != mode)
1831 return (GET_CODE (op) == SYMBOL_REF);
1834 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1835 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1838 call_operand (op, mode)
1840 enum machine_mode mode;
1842 if (mode != VOIDmode && GET_MODE (op) != mode)
1845 return (GET_CODE (op) == SYMBOL_REF
1846 || (GET_CODE (op) == REG
1847 && (REGNO (op) == LINK_REGISTER_REGNUM
1848 || REGNO (op) == COUNT_REGISTER_REGNUM
1849 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1852 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1853 this file and the function is not weakly defined. */
1856 current_file_function_operand (op, mode)
1858 enum machine_mode mode ATTRIBUTE_UNUSED;
1860 return (GET_CODE (op) == SYMBOL_REF
1861 && (SYMBOL_REF_FLAG (op)
1862 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1863 && ! DECL_WEAK (current_function_decl))));
1866 /* Return 1 if this operand is a valid input for a move insn. */
1869 input_operand (op, mode)
1871 enum machine_mode mode;
1873 /* Memory is always valid. */
1874 if (memory_operand (op, mode))
1877 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1878 if (GET_CODE (op) == CONSTANT_P_RTX)
1881 /* For floating-point, easy constants are valid. */
1882 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1884 && easy_fp_constant (op, mode))
1887 /* Allow any integer constant. */
1888 if (GET_MODE_CLASS (mode) == MODE_INT
1889 && (GET_CODE (op) == CONST_INT
1890 || GET_CODE (op) == CONST_DOUBLE))
1893 /* For floating-point or multi-word mode, the only remaining valid type
1895 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1896 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1897 return register_operand (op, mode);
1899 /* The only cases left are integral modes one word or smaller (we
1900 do not get called for MODE_CC values). These can be in any
1902 if (register_operand (op, mode))
1905 /* A SYMBOL_REF referring to the TOC is valid. */
1906 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1909 /* A constant pool expression (relative to the TOC) is valid */
1910 if (TOC_RELATIVE_EXPR_P (op))
1913 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1915 if (DEFAULT_ABI == ABI_V4
1916 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1917 && small_data_operand (op, Pmode))
1923 /* Return 1 for an operand in small memory on V.4/eabi. */
1926 small_data_operand (op, mode)
1927 rtx op ATTRIBUTE_UNUSED;
1928 enum machine_mode mode ATTRIBUTE_UNUSED;
1933 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1936 if (DEFAULT_ABI != ABI_V4)
1939 if (GET_CODE (op) == SYMBOL_REF)
1942 else if (GET_CODE (op) != CONST
1943 || GET_CODE (XEXP (op, 0)) != PLUS
1944 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1945 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1950 rtx sum = XEXP (op, 0);
1951 HOST_WIDE_INT summand;
1953 /* We have to be careful here, because it is the referenced address
1954 that must be 32k from _SDA_BASE_, not just the symbol. */
1955 summand = INTVAL (XEXP (sum, 1));
1956 if (summand < 0 || summand > g_switch_value)
1959 sym_ref = XEXP (sum, 0);
1962 if (*XSTR (sym_ref, 0) != '@')
1973 constant_pool_expr_1 (op, have_sym, have_toc)
1978 switch (GET_CODE(op))
1981 if (CONSTANT_POOL_ADDRESS_P (op))
1983 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1991 else if (! strcmp (XSTR (op, 0), toc_label_name))
2000 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2001 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2003 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2012 constant_pool_expr_p (op)
2017 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2021 toc_relative_expr_p (op)
2026 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2029 /* Try machine-dependent ways of modifying an illegitimate address
2030 to be legitimate. If we find one, return the new, valid address.
2031 This is used from only one place: `memory_address' in explow.c.
2033 OLDX is the address as it was before break_out_memory_refs was
2034 called. In some cases it is useful to look at this to decide what
2037 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2039 It is always safe for this function to do nothing. It exists to
2040 recognize opportunities to optimize the output.
2042 On RS/6000, first check for the sum of a register with a constant
2043 integer that is out of range. If so, generate code to add the
2044 constant with the low-order 16 bits masked to the register and force
2045 this result into another register (this can be done with `cau').
2046 Then generate an address of REG+(CONST&0xffff), allowing for the
2047 possibility of bit 16 being a one.
2049 Then check for the sum of a register and something not constant, try to
2050 load the other things into a register and return the sum. */
2052 rs6000_legitimize_address (x, oldx, mode)
2054 rtx oldx ATTRIBUTE_UNUSED;
2055 enum machine_mode mode;
2057 if (GET_CODE (x) == PLUS
2058 && GET_CODE (XEXP (x, 0)) == REG
2059 && GET_CODE (XEXP (x, 1)) == CONST_INT
2060 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2062 HOST_WIDE_INT high_int, low_int;
2064 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2065 high_int = INTVAL (XEXP (x, 1)) - low_int;
2066 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2067 GEN_INT (high_int)), 0);
2068 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2070 else if (GET_CODE (x) == PLUS
2071 && GET_CODE (XEXP (x, 0)) == REG
2072 && GET_CODE (XEXP (x, 1)) != CONST_INT
2073 && GET_MODE_NUNITS (mode) == 1
2074 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2077 && (TARGET_POWERPC64 || mode != DImode)
2080 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2081 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2083 else if (ALTIVEC_VECTOR_MODE (mode))
2087 /* Make sure both operands are registers. */
2088 if (GET_CODE (x) == PLUS)
2089 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2090 force_reg (Pmode, XEXP (x, 1)));
2092 reg = force_reg (Pmode, x);
2095 else if (SPE_VECTOR_MODE (mode))
2097 /* We accept [reg + reg] and [reg + OFFSET]. */
2099 if (GET_CODE (x) == PLUS)
2101 rtx op1 = XEXP (x, 0);
2102 rtx op2 = XEXP (x, 1);
2104 op1 = force_reg (Pmode, op1);
2106 if (GET_CODE (op2) != REG
2107 && (GET_CODE (op2) != CONST_INT
2108 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2109 op2 = force_reg (Pmode, op2);
2111 return gen_rtx_PLUS (Pmode, op1, op2);
2114 return force_reg (Pmode, x);
2116 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2117 && GET_CODE (x) != CONST_INT
2118 && GET_CODE (x) != CONST_DOUBLE
2120 && GET_MODE_NUNITS (mode) == 1
2121 && (GET_MODE_BITSIZE (mode) <= 32
2122 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2124 rtx reg = gen_reg_rtx (Pmode);
2125 emit_insn (gen_elf_high (reg, (x)));
2126 return gen_rtx_LO_SUM (Pmode, reg, (x));
2128 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2130 && GET_CODE (x) != CONST_INT
2131 && GET_CODE (x) != CONST_DOUBLE
2133 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2137 rtx reg = gen_reg_rtx (Pmode);
2138 emit_insn (gen_macho_high (reg, (x)));
2139 return gen_rtx_LO_SUM (Pmode, reg, (x));
2142 && CONSTANT_POOL_EXPR_P (x)
2143 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2145 return create_TOC_reference (x);
2151 /* The convention appears to be to define this wherever it is used.
2152 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2153 is now used here. */
2154 #ifndef REG_MODE_OK_FOR_BASE_P
2155 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2158 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2159 replace the input X, or the original X if no replacement is called for.
2160 The output parameter *WIN is 1 if the calling macro should goto WIN,
2163 For RS/6000, we wish to handle large displacements off a base
2164 register by splitting the addend across an addiu/addis and the mem insn.
2165 This cuts number of extra insns needed from 3 to 1.
2167 On Darwin, we use this to generate code for floating point constants.
2168 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2169 The Darwin code is inside #if TARGET_MACHO because only then is
2170 machopic_function_base_name() defined. */
2172 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2174 enum machine_mode mode;
2177 int ind_levels ATTRIBUTE_UNUSED;
2180 /* We must recognize output that we have already generated ourselves. */
2181 if (GET_CODE (x) == PLUS
2182 && GET_CODE (XEXP (x, 0)) == PLUS
2183 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2184 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2185 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2187 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2188 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2189 opnum, (enum reload_type)type);
2195 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2196 && GET_CODE (x) == LO_SUM
2197 && GET_CODE (XEXP (x, 0)) == PLUS
2198 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2199 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2200 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2201 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2202 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2203 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2204 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2206 /* Result of previous invocation of this function on Darwin
2207 floating point constant. */
2208 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2209 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2210 opnum, (enum reload_type)type);
2215 if (GET_CODE (x) == PLUS
2216 && GET_CODE (XEXP (x, 0)) == REG
2217 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2218 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2219 && GET_CODE (XEXP (x, 1)) == CONST_INT
2220 && !SPE_VECTOR_MODE (mode)
2221 && !ALTIVEC_VECTOR_MODE (mode))
2223 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2224 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2226 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2228 /* Check for 32-bit overflow. */
2229 if (high + low != val)
2235 /* Reload the high part into a base reg; leave the low part
2236 in the mem directly. */
2238 x = gen_rtx_PLUS (GET_MODE (x),
2239 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2243 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2244 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2245 opnum, (enum reload_type)type);
2250 if (GET_CODE (x) == SYMBOL_REF
2251 && DEFAULT_ABI == ABI_DARWIN
2252 && !ALTIVEC_VECTOR_MODE (mode)
2255 /* Darwin load of floating point constant. */
2256 rtx offset = gen_rtx (CONST, Pmode,
2257 gen_rtx (MINUS, Pmode, x,
2258 gen_rtx (SYMBOL_REF, Pmode,
2259 machopic_function_base_name ())));
2260 x = gen_rtx (LO_SUM, GET_MODE (x),
2261 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2262 gen_rtx (HIGH, Pmode, offset)), offset);
2263 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2264 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2265 opnum, (enum reload_type)type);
2271 && CONSTANT_POOL_EXPR_P (x)
2272 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2274 (x) = create_TOC_reference (x);
2282 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2283 that is a valid memory address for an instruction.
2284 The MODE argument is the machine mode for the MEM expression
2285 that wants to use this address.
2287 On the RS/6000, there are four valid address: a SYMBOL_REF that
2288 refers to a constant pool entry of an address (or the sum of it
2289 plus a constant), a short (16-bit signed) constant plus a register,
2290 the sum of two registers, or a register indirect, possibly with an
2291 auto-increment. For DFmode and DImode with an constant plus register,
2292 we must ensure that both words are addressable or PowerPC64 with offset
2295 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2296 32-bit DImode, TImode), indexed addressing cannot be used because
2297 adjacent memory cells are accessed by adding word-sized offsets
2298 during assembly output. */
2300 rs6000_legitimate_address (mode, x, reg_ok_strict)
2301 enum machine_mode mode;
2305 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2307 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2308 && !ALTIVEC_VECTOR_MODE (mode)
2309 && !SPE_VECTOR_MODE (mode)
2311 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2313 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2315 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2317 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2319 && GET_CODE (x) == PLUS
2320 && GET_CODE (XEXP (x, 0)) == REG
2321 && XEXP (x, 0) == virtual_stack_vars_rtx
2322 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2324 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2327 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2330 && (TARGET_POWERPC64 || mode != DImode)
2331 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2333 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2338 /* Try to output insns to set TARGET equal to the constant C if it can
2339 be done in less than N insns. Do all computations in MODE.
2340 Returns the place where the output has been placed if it can be
2341 done and the insns have been emitted. If it would take more than N
2342 insns, zero is returned and no insns and emitted. */
2345 rs6000_emit_set_const (dest, mode, source, n)
2347 enum machine_mode mode;
2348 int n ATTRIBUTE_UNUSED;
2350 HOST_WIDE_INT c0, c1;
2352 if (mode == QImode || mode == HImode || mode == SImode)
2355 dest = gen_reg_rtx (mode);
2356 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2360 if (GET_CODE (source) == CONST_INT)
2362 c0 = INTVAL (source);
2365 else if (GET_CODE (source) == CONST_DOUBLE)
2367 #if HOST_BITS_PER_WIDE_INT >= 64
2368 c0 = CONST_DOUBLE_LOW (source);
2371 c0 = CONST_DOUBLE_LOW (source);
2372 c1 = CONST_DOUBLE_HIGH (source);
2378 return rs6000_emit_set_long_const (dest, c0, c1);
2381 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2382 fall back to a straight forward decomposition. We do this to avoid
2383 exponential run times encountered when looking for longer sequences
2384 with rs6000_emit_set_const. */
2386 rs6000_emit_set_long_const (dest, c1, c2)
2388 HOST_WIDE_INT c1, c2;
2390 if (!TARGET_POWERPC64)
2392 rtx operand1, operand2;
2394 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2396 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2398 emit_move_insn (operand1, GEN_INT (c1));
2399 emit_move_insn (operand2, GEN_INT (c2));
2403 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2406 ud2 = (c1 & 0xffff0000) >> 16;
2407 #if HOST_BITS_PER_WIDE_INT >= 64
2411 ud4 = (c2 & 0xffff0000) >> 16;
2413 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2414 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2417 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2419 emit_move_insn (dest, GEN_INT (ud1));
2422 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2423 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2426 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2429 emit_move_insn (dest, GEN_INT (ud2 << 16));
2431 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2433 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2434 || (ud4 == 0 && ! (ud3 & 0x8000)))
2437 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2440 emit_move_insn (dest, GEN_INT (ud3 << 16));
2443 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2444 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2446 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2451 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2454 emit_move_insn (dest, GEN_INT (ud4 << 16));
2457 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2459 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2461 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2462 GEN_INT (ud2 << 16)));
2464 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2470 /* Emit a move from SOURCE to DEST in mode MODE. */
2472 rs6000_emit_move (dest, source, mode)
2475 enum machine_mode mode;
2479 operands[1] = source;
2481 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2482 if (GET_CODE (operands[1]) == CONST_DOUBLE
2483 && ! FLOAT_MODE_P (mode)
2484 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2486 /* FIXME. This should never happen. */
2487 /* Since it seems that it does, do the safe thing and convert
2489 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2491 if (GET_CODE (operands[1]) == CONST_DOUBLE
2492 && ! FLOAT_MODE_P (mode)
2493 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2494 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2495 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2496 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2499 /* Check if GCC is setting up a block move that will end up using FP
2500 registers as temporaries. We must make sure this is acceptable. */
2501 if (GET_CODE (operands[0]) == MEM
2502 && GET_CODE (operands[1]) == MEM
2504 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2505 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2506 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2507 ? 32 : MEM_ALIGN (operands[0])))
2508 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2510 : MEM_ALIGN (operands[1]))))
2511 && ! MEM_VOLATILE_P (operands [0])
2512 && ! MEM_VOLATILE_P (operands [1]))
2514 emit_move_insn (adjust_address (operands[0], SImode, 0),
2515 adjust_address (operands[1], SImode, 0));
2516 emit_move_insn (adjust_address (operands[0], SImode, 4),
2517 adjust_address (operands[1], SImode, 4));
2521 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2522 operands[1] = force_reg (mode, operands[1]);
2524 if (mode == SFmode && ! TARGET_POWERPC
2525 && TARGET_HARD_FLOAT && TARGET_FPRS
2526 && GET_CODE (operands[0]) == MEM)
2530 if (reload_in_progress || reload_completed)
2531 regnum = true_regnum (operands[1]);
2532 else if (GET_CODE (operands[1]) == REG)
2533 regnum = REGNO (operands[1]);
2537 /* If operands[1] is a register, on POWER it may have
2538 double-precision data in it, so truncate it to single
2540 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2543 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2544 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2545 operands[1] = newreg;
2549 /* Handle the case where reload calls us with an invalid address;
2550 and the case of CONSTANT_P_RTX. */
2551 if (!ALTIVEC_VECTOR_MODE (mode)
2552 && (! general_operand (operands[1], mode)
2553 || ! nonimmediate_operand (operands[0], mode)
2554 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2556 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2560 /* FIXME: In the long term, this switch statement should go away
2561 and be replaced by a sequence of tests based on things like
2567 if (CONSTANT_P (operands[1])
2568 && GET_CODE (operands[1]) != CONST_INT)
2569 operands[1] = force_const_mem (mode, operands[1]);
2575 if (CONSTANT_P (operands[1])
2576 && ! easy_fp_constant (operands[1], mode))
2577 operands[1] = force_const_mem (mode, operands[1]);
2587 if (CONSTANT_P (operands[1])
2588 && !easy_vector_constant (operands[1]))
2589 operands[1] = force_const_mem (mode, operands[1]);
2594 /* Use default pattern for address of ELF small data */
2597 && DEFAULT_ABI == ABI_V4
2598 && (GET_CODE (operands[1]) == SYMBOL_REF
2599 || GET_CODE (operands[1]) == CONST)
2600 && small_data_operand (operands[1], mode))
2602 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2606 if (DEFAULT_ABI == ABI_V4
2607 && mode == Pmode && mode == SImode
2608 && flag_pic == 1 && got_operand (operands[1], mode))
2610 emit_insn (gen_movsi_got (operands[0], operands[1]));
2614 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2615 && TARGET_NO_TOC && ! flag_pic
2617 && CONSTANT_P (operands[1])
2618 && GET_CODE (operands[1]) != HIGH
2619 && GET_CODE (operands[1]) != CONST_INT)
2621 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2623 /* If this is a function address on -mcall-aixdesc,
2624 convert it to the address of the descriptor. */
2625 if (DEFAULT_ABI == ABI_AIX
2626 && GET_CODE (operands[1]) == SYMBOL_REF
2627 && XSTR (operands[1], 0)[0] == '.')
2629 const char *name = XSTR (operands[1], 0);
2631 while (*name == '.')
2633 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2634 CONSTANT_POOL_ADDRESS_P (new_ref)
2635 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2636 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2637 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2638 operands[1] = new_ref;
2641 if (DEFAULT_ABI == ABI_DARWIN)
2643 emit_insn (gen_macho_high (target, operands[1]));
2644 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2648 emit_insn (gen_elf_high (target, operands[1]));
2649 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2653 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2654 and we have put it in the TOC, we just need to make a TOC-relative
2657 && GET_CODE (operands[1]) == SYMBOL_REF
2658 && CONSTANT_POOL_EXPR_P (operands[1])
2659 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2660 get_pool_mode (operands[1])))
2662 operands[1] = create_TOC_reference (operands[1]);
2664 else if (mode == Pmode
2665 && CONSTANT_P (operands[1])
2666 && ((GET_CODE (operands[1]) != CONST_INT
2667 && ! easy_fp_constant (operands[1], mode))
2668 || (GET_CODE (operands[1]) == CONST_INT
2669 && num_insns_constant (operands[1], mode) > 2)
2670 || (GET_CODE (operands[0]) == REG
2671 && FP_REGNO_P (REGNO (operands[0]))))
2672 && GET_CODE (operands[1]) != HIGH
2673 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2674 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2676 /* Emit a USE operation so that the constant isn't deleted if
2677 expensive optimizations are turned on because nobody
2678 references it. This should only be done for operands that
2679 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2680 This should not be done for operands that contain LABEL_REFs.
2681 For now, we just handle the obvious case. */
2682 if (GET_CODE (operands[1]) != LABEL_REF)
2683 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2686 /* Darwin uses a special PIC legitimizer. */
2687 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2690 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2692 if (operands[0] != operands[1])
2693 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2698 /* If we are to limit the number of things we put in the TOC and
2699 this is a symbol plus a constant we can add in one insn,
2700 just put the symbol in the TOC and add the constant. Don't do
2701 this if reload is in progress. */
2702 if (GET_CODE (operands[1]) == CONST
2703 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2704 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2705 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2706 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2707 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2708 && ! side_effects_p (operands[0]))
2711 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2712 rtx other = XEXP (XEXP (operands[1], 0), 1);
2714 sym = force_reg (mode, sym);
2716 emit_insn (gen_addsi3 (operands[0], sym, other));
2718 emit_insn (gen_adddi3 (operands[0], sym, other));
2722 operands[1] = force_const_mem (mode, operands[1]);
2725 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2726 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2727 get_pool_constant (XEXP (operands[1], 0)),
2728 get_pool_mode (XEXP (operands[1], 0))))
2731 = gen_rtx_MEM (mode,
2732 create_TOC_reference (XEXP (operands[1], 0)));
2733 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2734 RTX_UNCHANGING_P (operands[1]) = 1;
2740 if (GET_CODE (operands[0]) == MEM
2741 && GET_CODE (XEXP (operands[0], 0)) != REG
2742 && ! reload_in_progress)
2744 = replace_equiv_address (operands[0],
2745 copy_addr_to_reg (XEXP (operands[0], 0)));
2747 if (GET_CODE (operands[1]) == MEM
2748 && GET_CODE (XEXP (operands[1], 0)) != REG
2749 && ! reload_in_progress)
2751 = replace_equiv_address (operands[1],
2752 copy_addr_to_reg (XEXP (operands[1], 0)));
2759 /* Above, we may have called force_const_mem which may have returned
2760 an invalid address. If we can, fix this up; otherwise, reload will
2761 have to deal with it. */
2762 if (GET_CODE (operands[1]) == MEM
2763 && ! memory_address_p (mode, XEXP (operands[1], 0))
2764 && ! reload_in_progress)
2765 operands[1] = adjust_address (operands[1], mode, 0);
2767 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2771 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2772 for a call to a function whose data type is FNTYPE.
2773 For a library call, FNTYPE is 0.
2775 For incoming args we set the number of arguments in the prototype large
2776 so we never return a PARALLEL. */
2779 init_cumulative_args (cum, fntype, libname, incoming)
2780 CUMULATIVE_ARGS *cum;
2782 rtx libname ATTRIBUTE_UNUSED;
2785 static CUMULATIVE_ARGS zero_cumulative;
2787 *cum = zero_cumulative;
2789 cum->fregno = FP_ARG_MIN_REG;
2790 cum->vregno = ALTIVEC_ARG_MIN_REG;
2791 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2792 cum->call_cookie = CALL_NORMAL;
2793 cum->sysv_gregno = GP_ARG_MIN_REG;
2796 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2798 else if (cum->prototype)
2799 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2800 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2801 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2804 cum->nargs_prototype = 0;
2806 cum->orig_nargs = cum->nargs_prototype;
2808 /* Check for a longcall attribute. */
2810 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2811 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2812 cum->call_cookie = CALL_LONG;
2814 if (TARGET_DEBUG_ARG)
2816 fprintf (stderr, "\ninit_cumulative_args:");
2819 tree ret_type = TREE_TYPE (fntype);
2820 fprintf (stderr, " ret code = %s,",
2821 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2824 if (cum->call_cookie & CALL_LONG)
2825 fprintf (stderr, " longcall,");
2827 fprintf (stderr, " proto = %d, nargs = %d\n",
2828 cum->prototype, cum->nargs_prototype);
2832 /* If defined, a C expression which determines whether, and in which
2833 direction, to pad out an argument with extra space. The value
2834 should be of type `enum direction': either `upward' to pad above
2835 the argument, `downward' to pad below, or `none' to inhibit
2838 For the AIX ABI structs are always stored left shifted in their
2842 function_arg_padding (mode, type)
2843 enum machine_mode mode;
2846 if (type != 0 && AGGREGATE_TYPE_P (type))
2849 /* This is the default definition. */
2850 return (! BYTES_BIG_ENDIAN
2853 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2854 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2855 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2856 ? downward : upward));
2859 /* If defined, a C expression that gives the alignment boundary, in bits,
2860 of an argument with the specified mode and type. If it is not defined,
2861 PARM_BOUNDARY is used for all arguments.
2863 V.4 wants long longs to be double word aligned. */
2866 function_arg_boundary (mode, type)
2867 enum machine_mode mode;
2868 tree type ATTRIBUTE_UNUSED;
2870 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2872 else if (SPE_VECTOR_MODE (mode))
2874 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2877 return PARM_BOUNDARY;
2880 /* Update the data in CUM to advance over an argument
2881 of mode MODE and data type TYPE.
2882 (TYPE is null for libcalls where that information may not be available.) */
2885 function_arg_advance (cum, mode, type, named)
2886 CUMULATIVE_ARGS *cum;
2887 enum machine_mode mode;
2891 cum->nargs_prototype--;
2893 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2895 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2898 cum->words += RS6000_ARG_SIZE (mode, type);
2900 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
2902 cum->words += RS6000_ARG_SIZE (mode, type);
2905 else if (DEFAULT_ABI == ABI_V4)
2907 if (TARGET_HARD_FLOAT && TARGET_FPRS
2908 && (mode == SFmode || mode == DFmode))
2910 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2915 cum->words += cum->words & 1;
2916 cum->words += RS6000_ARG_SIZE (mode, type);
2922 int gregno = cum->sysv_gregno;
2924 /* Aggregates and IEEE quad get passed by reference. */
2925 if ((type && AGGREGATE_TYPE_P (type))
2929 n_words = RS6000_ARG_SIZE (mode, type);
2931 /* Long long is put in odd registers. */
2932 if (n_words == 2 && (gregno & 1) == 0)
2935 /* Long long is not split between registers and stack. */
2936 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2938 /* Long long is aligned on the stack. */
2940 cum->words += cum->words & 1;
2941 cum->words += n_words;
2944 /* Note: continuing to accumulate gregno past when we've started
2945 spilling to the stack indicates the fact that we've started
2946 spilling to the stack to expand_builtin_saveregs. */
2947 cum->sysv_gregno = gregno + n_words;
2950 if (TARGET_DEBUG_ARG)
2952 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2953 cum->words, cum->fregno);
2954 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2955 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2956 fprintf (stderr, "mode = %4s, named = %d\n",
2957 GET_MODE_NAME (mode), named);
2962 int align = (TARGET_32BIT && (cum->words & 1) != 0
2963 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2965 cum->words += align + RS6000_ARG_SIZE (mode, type);
2967 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2968 && TARGET_HARD_FLOAT && TARGET_FPRS)
2971 if (TARGET_DEBUG_ARG)
2973 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2974 cum->words, cum->fregno);
2975 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2976 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2977 fprintf (stderr, "named = %d, align = %d\n", named, align);
2982 /* Determine where to put an argument to a function.
2983 Value is zero to push the argument on the stack,
2984 or a hard register in which to store the argument.
2986 MODE is the argument's machine mode.
2987 TYPE is the data type of the argument (as a tree).
2988 This is null for libcalls where that information may
2990 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2991 the preceding args and about the function being called.
2992 NAMED is nonzero if this argument is a named parameter
2993 (otherwise it is an extra parameter matching an ellipsis).
2995 On RS/6000 the first eight words of non-FP are normally in registers
2996 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2997 Under V.4, the first 8 FP args are in registers.
2999 If this is floating-point and no prototype is specified, we use
3000 both an FP and integer register (or possibly FP reg and stack). Library
3001 functions (when TYPE is zero) always have the proper types for args,
3002 so we can pass the FP value just in one register. emit_library_function
3003 doesn't support PARALLEL anyway. */
3006 function_arg (cum, mode, type, named)
3007 CUMULATIVE_ARGS *cum;
3008 enum machine_mode mode;
3012 enum rs6000_abi abi = DEFAULT_ABI;
3014 /* Return a marker to indicate whether CR1 needs to set or clear the
3015 bit that V.4 uses to say fp args were passed in registers.
3016 Assume that we don't need the marker for software floating point,
3017 or compiler generated library calls. */
3018 if (mode == VOIDmode)
3021 && cum->nargs_prototype < 0
3022 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3024 /* For the SPE, we need to crxor CR6 always. */
3026 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3027 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3028 return GEN_INT (cum->call_cookie
3029 | ((cum->fregno == FP_ARG_MIN_REG)
3030 ? CALL_V4_SET_FP_ARGS
3031 : CALL_V4_CLEAR_FP_ARGS));
3034 return GEN_INT (cum->call_cookie);
3037 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3039 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3040 return gen_rtx_REG (mode, cum->vregno);
3044 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
3046 if (cum->sysv_gregno - 1 <= GP_ARG_MAX_REG)
3047 return gen_rtx_REG (mode, cum->sysv_gregno);
3051 else if (abi == ABI_V4)
3053 if (TARGET_HARD_FLOAT && TARGET_FPRS
3054 && (mode == SFmode || mode == DFmode))
3056 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3057 return gen_rtx_REG (mode, cum->fregno);
3064 int gregno = cum->sysv_gregno;
3066 /* Aggregates and IEEE quad get passed by reference. */
3067 if ((type && AGGREGATE_TYPE_P (type))
3071 n_words = RS6000_ARG_SIZE (mode, type);
3073 /* Long long is put in odd registers. */
3074 if (n_words == 2 && (gregno & 1) == 0)
3077 /* Long long is not split between registers and stack. */
3078 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3079 return gen_rtx_REG (mode, gregno);
3086 int align = (TARGET_32BIT && (cum->words & 1) != 0
3087 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3088 int align_words = cum->words + align;
3090 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3093 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3096 || ((cum->nargs_prototype > 0)
3097 /* IBM AIX extended its linkage convention definition always
3098 to require FP args after register save area hole on the
3100 && (DEFAULT_ABI != ABI_AIX
3102 || (align_words < GP_ARG_NUM_REG))))
3103 return gen_rtx_REG (mode, cum->fregno);
3105 return gen_rtx_PARALLEL (mode,
3107 gen_rtx_EXPR_LIST (VOIDmode,
3108 ((align_words >= GP_ARG_NUM_REG)
3111 + RS6000_ARG_SIZE (mode, type)
3113 /* If this is partially on the stack, then
3114 we only include the portion actually
3115 in registers here. */
3116 ? gen_rtx_REG (SImode,
3117 GP_ARG_MIN_REG + align_words)
3118 : gen_rtx_REG (mode,
3119 GP_ARG_MIN_REG + align_words))),
3121 gen_rtx_EXPR_LIST (VOIDmode,
3122 gen_rtx_REG (mode, cum->fregno),
3125 else if (align_words < GP_ARG_NUM_REG)
3126 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3132 /* For an arg passed partly in registers and partly in memory,
3133 this is the number of registers used.
3134 For args passed entirely in registers or entirely in memory, zero. */
3137 function_arg_partial_nregs (cum, mode, type, named)
3138 CUMULATIVE_ARGS *cum;
3139 enum machine_mode mode;
3141 int named ATTRIBUTE_UNUSED;
3143 if (DEFAULT_ABI == ABI_V4)
3146 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3147 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3149 if (cum->nargs_prototype >= 0)
3153 if (cum->words < GP_ARG_NUM_REG
3154 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3156 int ret = GP_ARG_NUM_REG - cum->words;
3157 if (ret && TARGET_DEBUG_ARG)
3158 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3166 /* A C expression that indicates when an argument must be passed by
3167 reference. If nonzero for an argument, a copy of that argument is
3168 made in memory and a pointer to the argument is passed instead of
3169 the argument itself. The pointer is passed in whatever way is
3170 appropriate for passing a pointer to that type.
3172 Under V.4, structures and unions are passed by reference. */
3175 function_arg_pass_by_reference (cum, mode, type, named)
3176 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3177 enum machine_mode mode ATTRIBUTE_UNUSED;
3179 int named ATTRIBUTE_UNUSED;
3181 if (DEFAULT_ABI == ABI_V4
3182 && ((type && AGGREGATE_TYPE_P (type))
3185 if (TARGET_DEBUG_ARG)
3186 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3194 /* Perform any needed actions needed for a function that is receiving a
3195 variable number of arguments.
3199 MODE and TYPE are the mode and type of the current parameter.
3201 PRETEND_SIZE is a variable that should be set to the amount of stack
3202 that must be pushed by the prolog to pretend that our caller pushed
3205 Normally, this macro will push all remaining incoming registers on the
3206 stack and set PRETEND_SIZE to the length of the registers pushed. */
3209 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3210 CUMULATIVE_ARGS *cum;
3211 enum machine_mode mode;
3217 CUMULATIVE_ARGS next_cum;
3218 int reg_size = TARGET_32BIT ? 4 : 8;
3219 rtx save_area = NULL_RTX, mem;
3220 int first_reg_offset, set;
3224 fntype = TREE_TYPE (current_function_decl);
3225 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3226 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3227 != void_type_node));
3229 /* For varargs, we do not want to skip the dummy va_dcl argument.
3230 For stdargs, we do want to skip the last named argument. */
3233 function_arg_advance (&next_cum, mode, type, 1);
3235 if (DEFAULT_ABI == ABI_V4)
3237 /* Indicate to allocate space on the stack for varargs save area. */
3238 cfun->machine->sysv_varargs_p = 1;
3240 save_area = plus_constant (virtual_stack_vars_rtx,
3241 - RS6000_VARARGS_SIZE);
3243 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3247 first_reg_offset = next_cum.words;
3248 save_area = virtual_incoming_args_rtx;
3249 cfun->machine->sysv_varargs_p = 0;
3251 if (MUST_PASS_IN_STACK (mode, type))
3252 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3255 set = get_varargs_alias_set ();
3256 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3258 mem = gen_rtx_MEM (BLKmode,
3259 plus_constant (save_area,
3260 first_reg_offset * reg_size)),
3261 set_mem_alias_set (mem, set);
3262 set_mem_align (mem, BITS_PER_WORD);
3265 (GP_ARG_MIN_REG + first_reg_offset, mem,
3266 GP_ARG_NUM_REG - first_reg_offset,
3267 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3269 /* ??? Does ABI_V4 need this at all? */
3270 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3273 /* Save FP registers if needed. */
3274 if (DEFAULT_ABI == ABI_V4
3275 && TARGET_HARD_FLOAT && TARGET_FPRS
3277 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3279 int fregno = next_cum.fregno;
3280 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3281 rtx lab = gen_label_rtx ();
3282 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3284 emit_jump_insn (gen_rtx_SET (VOIDmode,
3286 gen_rtx_IF_THEN_ELSE (VOIDmode,
3287 gen_rtx_NE (VOIDmode, cr1,
3289 gen_rtx_LABEL_REF (VOIDmode, lab),
3292 while (fregno <= FP_ARG_V4_MAX_REG)
3294 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3295 set_mem_alias_set (mem, set);
3296 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3305 /* Create the va_list data type. */
3308 rs6000_build_va_list ()
3310 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3312 /* For AIX, prefer 'char *' because that's what the system
3313 header files like. */
3314 if (DEFAULT_ABI != ABI_V4)
3315 return build_pointer_type (char_type_node);
3317 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3318 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3320 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3321 unsigned_char_type_node);
3322 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3323 unsigned_char_type_node);
3324 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3326 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3329 DECL_FIELD_CONTEXT (f_gpr) = record;
3330 DECL_FIELD_CONTEXT (f_fpr) = record;
3331 DECL_FIELD_CONTEXT (f_ovf) = record;
3332 DECL_FIELD_CONTEXT (f_sav) = record;
3334 TREE_CHAIN (record) = type_decl;
3335 TYPE_NAME (record) = type_decl;
3336 TYPE_FIELDS (record) = f_gpr;
3337 TREE_CHAIN (f_gpr) = f_fpr;
3338 TREE_CHAIN (f_fpr) = f_ovf;
3339 TREE_CHAIN (f_ovf) = f_sav;
3341 layout_type (record);
3343 /* The correct type is an array type of one element. */
3344 return build_array_type (record, build_index_type (size_zero_node));
3347 /* Implement va_start. */
3350 rs6000_va_start (valist, nextarg)
3354 HOST_WIDE_INT words, n_gpr, n_fpr;
3355 tree f_gpr, f_fpr, f_ovf, f_sav;
3356 tree gpr, fpr, ovf, sav, t;
3358 /* Only SVR4 needs something special. */
3359 if (DEFAULT_ABI != ABI_V4)
3361 std_expand_builtin_va_start (valist, nextarg);
3365 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3366 f_fpr = TREE_CHAIN (f_gpr);
3367 f_ovf = TREE_CHAIN (f_fpr);
3368 f_sav = TREE_CHAIN (f_ovf);
3370 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3371 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3372 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3373 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3374 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3376 /* Count number of gp and fp argument registers used. */
3377 words = current_function_args_info.words;
3378 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3379 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3381 if (TARGET_DEBUG_ARG)
3383 fputs ("va_start: words = ", stderr);
3384 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3385 fputs (", n_gpr = ", stderr);
3386 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3387 fputs (", n_fpr = ", stderr);
3388 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3389 putc ('\n', stderr);
3392 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3393 TREE_SIDE_EFFECTS (t) = 1;
3394 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3396 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3397 TREE_SIDE_EFFECTS (t) = 1;
3398 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3400 /* Find the overflow area. */
3401 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3403 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3404 build_int_2 (words * UNITS_PER_WORD, 0));
3405 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3406 TREE_SIDE_EFFECTS (t) = 1;
3407 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3409 /* Find the register save area. */
3410 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3411 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3412 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3413 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3414 TREE_SIDE_EFFECTS (t) = 1;
3415 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3418 /* Implement va_arg. */
3421 rs6000_va_arg (valist, type)
3424 tree f_gpr, f_fpr, f_ovf, f_sav;
3425 tree gpr, fpr, ovf, sav, reg, t, u;
3426 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3427 rtx lab_false, lab_over, addr_rtx, r;
3429 if (DEFAULT_ABI != ABI_V4)
3430 return std_expand_builtin_va_arg (valist, type);
3432 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3433 f_fpr = TREE_CHAIN (f_gpr);
3434 f_ovf = TREE_CHAIN (f_fpr);
3435 f_sav = TREE_CHAIN (f_ovf);
3437 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3438 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3439 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3440 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3441 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3443 size = int_size_in_bytes (type);
3444 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3446 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3448 /* Aggregates and long doubles are passed by reference. */
3454 size = UNITS_PER_WORD;
3457 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3459 /* FP args go in FP registers, if present. */
3468 /* Otherwise into GP registers. */
3476 /* Pull the value out of the saved registers ... */
3478 lab_false = gen_label_rtx ();
3479 lab_over = gen_label_rtx ();
3480 addr_rtx = gen_reg_rtx (Pmode);
3482 /* AltiVec vectors never go in registers. */
3483 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3485 TREE_THIS_VOLATILE (reg) = 1;
3486 emit_cmp_and_jump_insns
3487 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3488 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3491 /* Long long is aligned in the registers. */
3494 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3495 build_int_2 (n_reg - 1, 0));
3496 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3497 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3498 TREE_SIDE_EFFECTS (u) = 1;
3499 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3503 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3507 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3508 build_int_2 (n_reg, 0));
3509 TREE_SIDE_EFFECTS (u) = 1;
3511 u = build1 (CONVERT_EXPR, integer_type_node, u);
3512 TREE_SIDE_EFFECTS (u) = 1;
3514 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3515 TREE_SIDE_EFFECTS (u) = 1;
3517 t = build (PLUS_EXPR, ptr_type_node, t, u);
3518 TREE_SIDE_EFFECTS (t) = 1;
3520 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3522 emit_move_insn (addr_rtx, r);
3524 emit_jump_insn (gen_jump (lab_over));
3528 emit_label (lab_false);
3530 /* ... otherwise out of the overflow area. */
3532 /* Make sure we don't find reg 7 for the next int arg.
3534 All AltiVec vectors go in the overflow area. So in the AltiVec
3535 case we need to get the vectors from the overflow area, but
3536 remember where the GPRs and FPRs are. */
3537 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3538 || !TARGET_ALTIVEC))
3540 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3541 TREE_SIDE_EFFECTS (t) = 1;
3542 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3545 /* Care for on-stack alignment if needed. */
3552 /* AltiVec vectors are 16 byte aligned. */
3553 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3558 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3559 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3563 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3565 emit_move_insn (addr_rtx, r);
3567 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3568 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3569 TREE_SIDE_EFFECTS (t) = 1;
3570 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3572 emit_label (lab_over);
3576 r = gen_rtx_MEM (Pmode, addr_rtx);
3577 set_mem_alias_set (r, get_varargs_alias_set ());
3578 emit_move_insn (addr_rtx, r);
3586 #define def_builtin(MASK, NAME, TYPE, CODE) \
3588 if ((MASK) & target_flags) \
3589 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3593 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3595 static const struct builtin_description bdesc_3arg[] =
3597 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3598 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3599 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3600 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3601 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3602 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3603 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3604 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3605 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3606 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3607 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3608 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3609 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3610 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3611 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3612 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3613 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3614 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3615 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3616 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3617 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3618 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3619 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3622 /* DST operations: void foo (void *, const int, const char). */
3624 static const struct builtin_description bdesc_dst[] =
3626 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3627 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3628 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3629 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3632 /* Simple binary operations: VECc = foo (VECa, VECb). */
3634 static struct builtin_description bdesc_2arg[] =
3636 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3637 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3638 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3639 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3640 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3641 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3642 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3643 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3644 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3645 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3646 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3647 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3648 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3649 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3650 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3651 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3652 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3653 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3654 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3655 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3656 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3657 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3658 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3659 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3660 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3661 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3662 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3663 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3664 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3665 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3666 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3667 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3668 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3669 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3670 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3671 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3672 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3673 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3674 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3675 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3676 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3677 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3678 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3679 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3680 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3681 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3682 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3683 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3684 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3685 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3686 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3687 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3688 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3689 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3690 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3691 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3692 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3693 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3694 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3695 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3696 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3697 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3698 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3699 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3700 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3701 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3702 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3703 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3704 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3705 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3706 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3707 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3708 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3709 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3710 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3711 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3712 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3713 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3714 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3715 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3716 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3717 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3718 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3719 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3720 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3721 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3722 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3723 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3724 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3725 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3726 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3727 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3728 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3729 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3730 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3731 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3732 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3733 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3734 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3735 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3736 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3737 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3738 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3739 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3740 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3741 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3742 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3743 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3744 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3745 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3746 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3747 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3748 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3750 /* Place holder, leave as first spe builtin. */
3751 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3752 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3753 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3754 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3755 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3756 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3757 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3758 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3759 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3760 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3761 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3762 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3763 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3764 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3765 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3766 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3767 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3768 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3769 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3770 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3771 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3772 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3773 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3774 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3775 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3776 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3777 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3778 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3779 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3780 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3781 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3782 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3783 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3784 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3785 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3786 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3787 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3788 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3789 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3790 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3791 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3792 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3793 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3794 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3795 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3796 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3797 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3798 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3799 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3800 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3801 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3802 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3803 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3804 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3805 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3806 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3807 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3808 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3809 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3810 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3811 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3812 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3813 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3814 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3815 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
3816 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
3817 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
3818 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
3819 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
3820 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
3821 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
3822 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
3823 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
3824 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
3825 { 0, CODE_FOR_spe_evmwlsmf, "__builtin_spe_evmwlsmf", SPE_BUILTIN_EVMWLSMF },
3826 { 0, CODE_FOR_spe_evmwlsmfa, "__builtin_spe_evmwlsmfa", SPE_BUILTIN_EVMWLSMFA },
3827 { 0, CODE_FOR_spe_evmwlsmfaaw, "__builtin_spe_evmwlsmfaaw", SPE_BUILTIN_EVMWLSMFAAW },
3828 { 0, CODE_FOR_spe_evmwlsmfanw, "__builtin_spe_evmwlsmfanw", SPE_BUILTIN_EVMWLSMFANW },
3829 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
3830 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
3831 { 0, CODE_FOR_spe_evmwlssf, "__builtin_spe_evmwlssf", SPE_BUILTIN_EVMWLSSF },
3832 { 0, CODE_FOR_spe_evmwlssfa, "__builtin_spe_evmwlssfa", SPE_BUILTIN_EVMWLSSFA },
3833 { 0, CODE_FOR_spe_evmwlssfaaw, "__builtin_spe_evmwlssfaaw", SPE_BUILTIN_EVMWLSSFAAW },
3834 { 0, CODE_FOR_spe_evmwlssfanw, "__builtin_spe_evmwlssfanw", SPE_BUILTIN_EVMWLSSFANW },
3835 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
3836 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
3837 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
3838 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
3839 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
3840 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
3841 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
3842 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
3843 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
3844 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
3845 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
3846 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
3847 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
3848 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
3849 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
3850 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
3851 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
3852 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
3853 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
3854 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
3855 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
3856 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
3857 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
3858 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
3859 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
3860 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
3861 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
3862 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
3863 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
3864 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
3865 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
3866 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
3867 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
3869 /* SPE binary operations expecting a 5-bit unsigned literal. */
3870 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
3872 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
3873 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
3874 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
3875 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
3876 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
3877 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
3878 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
3879 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
3880 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
3881 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
3882 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
3883 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
3884 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
3885 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
3886 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
3887 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
3888 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
3889 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
3890 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
3891 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
3892 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
3893 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
3894 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
3895 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
3896 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
3897 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
3899 /* Place-holder. Leave as last binary SPE builtin. */
3900 { 0, CODE_FOR_spe_evxor, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
3903 /* AltiVec predicates. */
3905 struct builtin_description_predicates
3907 const unsigned int mask;
3908 const enum insn_code icode;
3910 const char *const name;
3911 const enum rs6000_builtins code;
3914 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3916 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3917 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3918 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3919 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3920 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3921 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3922 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3923 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3924 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3925 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3926 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3927 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3928 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3931 /* SPE predicates. */
3932 static struct builtin_description bdesc_spe_predicates[] =
3934 /* Place-holder. Leave as first. */
3935 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
3936 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
3937 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
3938 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
3939 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
3940 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
3941 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
3942 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
3943 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
3944 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
3945 /* Place-holder. Leave as last. */
3946 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
3949 /* SPE evsel predicates. */
3950 static struct builtin_description bdesc_spe_evsel[] =
3952 /* Place-holder. Leave as first. */
3953 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
3954 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
3955 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
3956 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
3957 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
3958 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
3959 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
3960 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
3961 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
3962 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
3963 /* Place-holder. Leave as last. */
3964 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
3967 /* ABS* opreations. */
3969 static const struct builtin_description bdesc_abs[] =
3971 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3972 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3973 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3974 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3975 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3976 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3977 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3980 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3983 static struct builtin_description bdesc_1arg[] =
3985 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3986 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3987 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3988 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3989 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3990 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3991 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3992 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3993 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3994 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3995 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3996 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3997 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3998 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3999 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4000 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4001 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4003 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4004 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4005 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4006 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4007 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4008 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4009 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4010 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4011 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4012 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4013 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4014 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4015 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4016 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4017 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4018 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4019 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4020 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4021 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4022 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4023 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4024 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4025 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4026 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4027 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4028 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4029 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4030 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4031 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4032 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4033 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4034 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4036 /* Place-holder. Leave as last unary SPE builtin. */
4037 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4041 rs6000_expand_unop_builtin (icode, arglist, target)
4042 enum insn_code icode;
4047 tree arg0 = TREE_VALUE (arglist);
4048 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4049 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4050 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4052 /* If we got invalid arguments bail out before generating bad rtl. */
4053 if (arg0 == error_mark_node)
4058 /* Only allow 5-bit *signed* literals. */
4059 case CODE_FOR_altivec_vspltisb:
4060 case CODE_FOR_altivec_vspltish:
4061 case CODE_FOR_altivec_vspltisw:
4062 case CODE_FOR_spe_evsplatfi:
4063 case CODE_FOR_spe_evsplati:
4064 if (GET_CODE (op0) != CONST_INT
4065 || INTVAL (op0) > 0x1f
4066 || INTVAL (op0) < -0x1f)
4068 error ("argument 1 must be a 5-bit signed literal");
4077 || GET_MODE (target) != tmode
4078 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4079 target = gen_reg_rtx (tmode);
4081 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4082 op0 = copy_to_mode_reg (mode0, op0);
4084 pat = GEN_FCN (icode) (target, op0);
4093 altivec_expand_abs_builtin (icode, arglist, target)
4094 enum insn_code icode;
4098 rtx pat, scratch1, scratch2;
4099 tree arg0 = TREE_VALUE (arglist);
4100 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4101 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4102 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4104 /* If we have invalid arguments, bail out before generating bad rtl. */
4105 if (arg0 == error_mark_node)
4109 || GET_MODE (target) != tmode
4110 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4111 target = gen_reg_rtx (tmode);
4113 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4114 op0 = copy_to_mode_reg (mode0, op0);
4116 scratch1 = gen_reg_rtx (mode0);
4117 scratch2 = gen_reg_rtx (mode0);
4119 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4128 rs6000_expand_binop_builtin (icode, arglist, target)
4129 enum insn_code icode;
4134 tree arg0 = TREE_VALUE (arglist);
4135 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4136 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4137 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4138 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4139 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4140 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4142 /* If we got invalid arguments bail out before generating bad rtl. */
4143 if (arg0 == error_mark_node || arg1 == error_mark_node)
4148 /* Only allow 5-bit unsigned literals. */
4149 case CODE_FOR_altivec_vcfux:
4150 case CODE_FOR_altivec_vcfsx:
4151 case CODE_FOR_altivec_vctsxs:
4152 case CODE_FOR_altivec_vctuxs:
4153 case CODE_FOR_altivec_vspltb:
4154 case CODE_FOR_altivec_vsplth:
4155 case CODE_FOR_altivec_vspltw:
4156 case CODE_FOR_spe_evaddiw:
4157 case CODE_FOR_spe_evldd:
4158 case CODE_FOR_spe_evldh:
4159 case CODE_FOR_spe_evldw:
4160 case CODE_FOR_spe_evlhhesplat:
4161 case CODE_FOR_spe_evlhhossplat:
4162 case CODE_FOR_spe_evlhhousplat:
4163 case CODE_FOR_spe_evlwhe:
4164 case CODE_FOR_spe_evlwhos:
4165 case CODE_FOR_spe_evlwhou:
4166 case CODE_FOR_spe_evlwhsplat:
4167 case CODE_FOR_spe_evlwwsplat:
4168 case CODE_FOR_spe_evrlwi:
4169 case CODE_FOR_spe_evslwi:
4170 case CODE_FOR_spe_evsrwis:
4171 case CODE_FOR_spe_evsrwiu:
4172 if (TREE_CODE (arg1) != INTEGER_CST
4173 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4175 error ("argument 2 must be a 5-bit unsigned literal");
4184 || GET_MODE (target) != tmode
4185 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4186 target = gen_reg_rtx (tmode);
4188 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4189 op0 = copy_to_mode_reg (mode0, op0);
4190 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4191 op1 = copy_to_mode_reg (mode1, op1);
4193 pat = GEN_FCN (icode) (target, op0, op1);
4202 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4203 enum insn_code icode;
4209 tree cr6_form = TREE_VALUE (arglist);
4210 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4211 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4212 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4213 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4214 enum machine_mode tmode = SImode;
4215 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4216 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4219 if (TREE_CODE (cr6_form) != INTEGER_CST)
4221 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4225 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4230 /* If we have invalid arguments, bail out before generating bad rtl. */
4231 if (arg0 == error_mark_node || arg1 == error_mark_node)
4235 || GET_MODE (target) != tmode
4236 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4237 target = gen_reg_rtx (tmode);
4239 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4240 op0 = copy_to_mode_reg (mode0, op0);
4241 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4242 op1 = copy_to_mode_reg (mode1, op1);
4244 scratch = gen_reg_rtx (mode0);
4246 pat = GEN_FCN (icode) (scratch, op0, op1,
4247 gen_rtx (SYMBOL_REF, Pmode, opcode));
4252 /* The vec_any* and vec_all* predicates use the same opcodes for two
4253 different operations, but the bits in CR6 will be different
4254 depending on what information we want. So we have to play tricks
4255 with CR6 to get the right bits out.
4257 If you think this is disgusting, look at the specs for the
4258 AltiVec predicates. */
4260 switch (cr6_form_int)
4263 emit_insn (gen_cr6_test_for_zero (target));
4266 emit_insn (gen_cr6_test_for_zero_reverse (target));
4269 emit_insn (gen_cr6_test_for_lt (target));
4272 emit_insn (gen_cr6_test_for_lt_reverse (target));
4275 error ("argument 1 of __builtin_altivec_predicate is out of range");
4283 altivec_expand_stv_builtin (icode, arglist)
4284 enum insn_code icode;
4287 tree arg0 = TREE_VALUE (arglist);
4288 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4289 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4290 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4291 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4292 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4294 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4295 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4296 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4298 /* Invalid arguments. Bail before doing anything stoopid! */
4299 if (arg0 == error_mark_node
4300 || arg1 == error_mark_node
4301 || arg2 == error_mark_node)
4304 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4305 op0 = copy_to_mode_reg (mode2, op0);
4306 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4307 op1 = copy_to_mode_reg (mode0, op1);
4308 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4309 op2 = copy_to_mode_reg (mode1, op2);
4311 pat = GEN_FCN (icode) (op1, op2, op0);
4318 rs6000_expand_ternop_builtin (icode, arglist, target)
4319 enum insn_code icode;
4324 tree arg0 = TREE_VALUE (arglist);
4325 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4326 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4327 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4328 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4329 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4330 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4331 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4332 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4333 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4335 /* If we got invalid arguments bail out before generating bad rtl. */
4336 if (arg0 == error_mark_node
4337 || arg1 == error_mark_node
4338 || arg2 == error_mark_node)
4343 /* Only allow 4-bit unsigned literals. */
4344 case CODE_FOR_altivec_vsldoi_4sf:
4345 case CODE_FOR_altivec_vsldoi_4si:
4346 case CODE_FOR_altivec_vsldoi_8hi:
4347 case CODE_FOR_altivec_vsldoi_16qi:
4348 if (TREE_CODE (arg2) != INTEGER_CST
4349 || TREE_INT_CST_LOW (arg2) & ~0xf)
4351 error ("argument 3 must be a 4-bit unsigned literal");
4360 || GET_MODE (target) != tmode
4361 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4362 target = gen_reg_rtx (tmode);
4364 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4365 op0 = copy_to_mode_reg (mode0, op0);
4366 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4367 op1 = copy_to_mode_reg (mode1, op1);
4368 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4369 op2 = copy_to_mode_reg (mode2, op2);
4371 pat = GEN_FCN (icode) (target, op0, op1, op2);
4379 /* Expand the lvx builtins. */
4381 altivec_expand_ld_builtin (exp, target, expandedp)
4386 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4387 tree arglist = TREE_OPERAND (exp, 1);
4388 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4390 enum machine_mode tmode, mode0;
4392 enum insn_code icode;
4396 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4397 icode = CODE_FOR_altivec_lvx_16qi;
4399 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4400 icode = CODE_FOR_altivec_lvx_8hi;
4402 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4403 icode = CODE_FOR_altivec_lvx_4si;
4405 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4406 icode = CODE_FOR_altivec_lvx_4sf;
4415 arg0 = TREE_VALUE (arglist);
4416 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4417 tmode = insn_data[icode].operand[0].mode;
4418 mode0 = insn_data[icode].operand[1].mode;
4421 || GET_MODE (target) != tmode
4422 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4423 target = gen_reg_rtx (tmode);
4425 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4426 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4428 pat = GEN_FCN (icode) (target, op0);
4435 /* Expand the stvx builtins. */
4437 altivec_expand_st_builtin (exp, target, expandedp)
4439 rtx target ATTRIBUTE_UNUSED;
4442 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4443 tree arglist = TREE_OPERAND (exp, 1);
4444 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4446 enum machine_mode mode0, mode1;
4448 enum insn_code icode;
4452 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4453 icode = CODE_FOR_altivec_stvx_16qi;
4455 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4456 icode = CODE_FOR_altivec_stvx_8hi;
4458 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4459 icode = CODE_FOR_altivec_stvx_4si;
4461 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4462 icode = CODE_FOR_altivec_stvx_4sf;
4469 arg0 = TREE_VALUE (arglist);
4470 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4471 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4472 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4473 mode0 = insn_data[icode].operand[0].mode;
4474 mode1 = insn_data[icode].operand[1].mode;
4476 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4477 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4478 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4479 op1 = copy_to_mode_reg (mode1, op1);
4481 pat = GEN_FCN (icode) (op0, op1);
4489 /* Expand the dst builtins. */
4491 altivec_expand_dst_builtin (exp, target, expandedp)
4493 rtx target ATTRIBUTE_UNUSED;
4496 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4497 tree arglist = TREE_OPERAND (exp, 1);
4498 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4499 tree arg0, arg1, arg2;
4500 enum machine_mode mode0, mode1, mode2;
4501 rtx pat, op0, op1, op2;
4502 struct builtin_description *d;
4507 /* Handle DST variants. */
4508 d = (struct builtin_description *) bdesc_dst;
4509 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4510 if (d->code == fcode)
4512 arg0 = TREE_VALUE (arglist);
4513 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4514 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4515 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4516 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4517 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4518 mode0 = insn_data[d->icode].operand[0].mode;
4519 mode1 = insn_data[d->icode].operand[1].mode;
4520 mode2 = insn_data[d->icode].operand[2].mode;
4522 /* Invalid arguments, bail out before generating bad rtl. */
4523 if (arg0 == error_mark_node
4524 || arg1 == error_mark_node
4525 || arg2 == error_mark_node)
4528 if (TREE_CODE (arg2) != INTEGER_CST
4529 || TREE_INT_CST_LOW (arg2) & ~0x3)
4531 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4535 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4536 op0 = copy_to_mode_reg (mode0, op0);
4537 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4538 op1 = copy_to_mode_reg (mode1, op1);
4540 pat = GEN_FCN (d->icode) (op0, op1, op2);
4551 /* Expand the builtin in EXP and store the result in TARGET. Store
4552 true in *EXPANDEDP if we found a builtin to expand. */
4554 altivec_expand_builtin (exp, target, expandedp)
4559 struct builtin_description *d;
4560 struct builtin_description_predicates *dp;
4562 enum insn_code icode;
4563 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4564 tree arglist = TREE_OPERAND (exp, 1);
4567 enum machine_mode tmode, mode0;
4568 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4570 target = altivec_expand_ld_builtin (exp, target, expandedp);
4574 target = altivec_expand_st_builtin (exp, target, expandedp);
4578 target = altivec_expand_dst_builtin (exp, target, expandedp);
4586 case ALTIVEC_BUILTIN_STVX:
4587 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4588 case ALTIVEC_BUILTIN_STVEBX:
4589 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4590 case ALTIVEC_BUILTIN_STVEHX:
4591 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4592 case ALTIVEC_BUILTIN_STVEWX:
4593 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4594 case ALTIVEC_BUILTIN_STVXL:
4595 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4597 case ALTIVEC_BUILTIN_MFVSCR:
4598 icode = CODE_FOR_altivec_mfvscr;
4599 tmode = insn_data[icode].operand[0].mode;
4602 || GET_MODE (target) != tmode
4603 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4604 target = gen_reg_rtx (tmode);
4606 pat = GEN_FCN (icode) (target);
4612 case ALTIVEC_BUILTIN_MTVSCR:
4613 icode = CODE_FOR_altivec_mtvscr;
4614 arg0 = TREE_VALUE (arglist);
4615 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4616 mode0 = insn_data[icode].operand[0].mode;
4618 /* If we got invalid arguments bail out before generating bad rtl. */
4619 if (arg0 == error_mark_node)
4622 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4623 op0 = copy_to_mode_reg (mode0, op0);
4625 pat = GEN_FCN (icode) (op0);
4630 case ALTIVEC_BUILTIN_DSSALL:
4631 emit_insn (gen_altivec_dssall ());
4634 case ALTIVEC_BUILTIN_DSS:
4635 icode = CODE_FOR_altivec_dss;
4636 arg0 = TREE_VALUE (arglist);
4637 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4638 mode0 = insn_data[icode].operand[0].mode;
4640 /* If we got invalid arguments bail out before generating bad rtl. */
4641 if (arg0 == error_mark_node)
4644 if (TREE_CODE (arg0) != INTEGER_CST
4645 || TREE_INT_CST_LOW (arg0) & ~0x3)
4647 error ("argument to dss must be a 2-bit unsigned literal");
4651 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4652 op0 = copy_to_mode_reg (mode0, op0);
4654 emit_insn (gen_altivec_dss (op0));
4658 /* Expand abs* operations. */
4659 d = (struct builtin_description *) bdesc_abs;
4660 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4661 if (d->code == fcode)
4662 return altivec_expand_abs_builtin (d->icode, arglist, target);
4664 /* Expand the AltiVec predicates. */
4665 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4666 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4667 if (dp->code == fcode)
4668 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4670 /* LV* are funky. We initialized them differently. */
4673 case ALTIVEC_BUILTIN_LVSL:
4674 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4676 case ALTIVEC_BUILTIN_LVSR:
4677 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4679 case ALTIVEC_BUILTIN_LVEBX:
4680 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4682 case ALTIVEC_BUILTIN_LVEHX:
4683 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4685 case ALTIVEC_BUILTIN_LVEWX:
4686 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4688 case ALTIVEC_BUILTIN_LVXL:
4689 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4691 case ALTIVEC_BUILTIN_LVX:
4692 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4703 /* Binops that need to be initialized manually, but can be expanded
4704 automagically by rs6000_expand_binop_builtin. */
4705 static struct builtin_description bdesc_2arg_spe[] =
4707 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4708 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4709 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4710 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4711 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4712 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4713 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4714 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4715 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4716 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4717 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4718 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4719 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4720 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4721 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4722 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4723 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4724 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4725 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4726 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4727 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4728 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4731 /* Expand the builtin in EXP and store the result in TARGET. Store
4732 true in *EXPANDEDP if we found a builtin to expand.
4734 This expands the SPE builtins that are not simple unary and binary
4737 spe_expand_builtin (exp, target, expandedp)
4742 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4743 tree arglist = TREE_OPERAND (exp, 1);
4745 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4746 enum insn_code icode;
4747 enum machine_mode tmode, mode0;
4749 struct builtin_description *d;
4754 /* Syntax check for a 5-bit unsigned immediate. */
4757 case SPE_BUILTIN_EVSTDD:
4758 case SPE_BUILTIN_EVSTDH:
4759 case SPE_BUILTIN_EVSTDW:
4760 case SPE_BUILTIN_EVSTWHE:
4761 case SPE_BUILTIN_EVSTWHO:
4762 case SPE_BUILTIN_EVSTWWE:
4763 case SPE_BUILTIN_EVSTWWO:
4764 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4765 if (TREE_CODE (arg1) != INTEGER_CST
4766 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4768 error ("argument 2 must be a 5-bit unsigned literal");
4776 d = (struct builtin_description *) bdesc_2arg_spe;
4777 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4778 if (d->code == fcode)
4779 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4781 d = (struct builtin_description *) bdesc_spe_predicates;
4782 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4783 if (d->code == fcode)
4784 return spe_expand_predicate_builtin (d->icode, arglist, target);
4786 d = (struct builtin_description *) bdesc_spe_evsel;
4787 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4788 if (d->code == fcode)
4789 return spe_expand_evsel_builtin (d->icode, arglist, target);
4793 case SPE_BUILTIN_EVSTDDX:
4794 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4795 case SPE_BUILTIN_EVSTDHX:
4796 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4797 case SPE_BUILTIN_EVSTDWX:
4798 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4799 case SPE_BUILTIN_EVSTWHEX:
4800 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4801 case SPE_BUILTIN_EVSTWHOX:
4802 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4803 case SPE_BUILTIN_EVSTWWEX:
4804 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4805 case SPE_BUILTIN_EVSTWWOX:
4806 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4807 case SPE_BUILTIN_EVSTDD:
4808 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4809 case SPE_BUILTIN_EVSTDH:
4810 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4811 case SPE_BUILTIN_EVSTDW:
4812 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4813 case SPE_BUILTIN_EVSTWHE:
4814 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4815 case SPE_BUILTIN_EVSTWHO:
4816 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4817 case SPE_BUILTIN_EVSTWWE:
4818 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4819 case SPE_BUILTIN_EVSTWWO:
4820 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4821 case SPE_BUILTIN_MFSPEFSCR:
4822 icode = CODE_FOR_spe_mfspefscr;
4823 tmode = insn_data[icode].operand[0].mode;
4826 || GET_MODE (target) != tmode
4827 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4828 target = gen_reg_rtx (tmode);
4830 pat = GEN_FCN (icode) (target);
4835 case SPE_BUILTIN_MTSPEFSCR:
4836 icode = CODE_FOR_spe_mtspefscr;
4837 arg0 = TREE_VALUE (arglist);
4838 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4839 mode0 = insn_data[icode].operand[0].mode;
4841 if (arg0 == error_mark_node)
4844 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4845 op0 = copy_to_mode_reg (mode0, op0);
4847 pat = GEN_FCN (icode) (op0);
4860 spe_expand_predicate_builtin (icode, arglist, target)
4861 enum insn_code icode;
4865 rtx pat, scratch, tmp;
4866 tree form = TREE_VALUE (arglist);
4867 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4868 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4869 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4870 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4871 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4872 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4876 if (TREE_CODE (form) != INTEGER_CST)
4878 error ("argument 1 of __builtin_spe_predicate must be a constant");
4882 form_int = TREE_INT_CST_LOW (form);
4887 if (arg0 == error_mark_node || arg1 == error_mark_node)
4891 || GET_MODE (target) != SImode
4892 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
4893 target = gen_reg_rtx (SImode);
4895 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4896 op0 = copy_to_mode_reg (mode0, op0);
4897 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4898 op1 = copy_to_mode_reg (mode1, op1);
4900 scratch = gen_reg_rtx (CCmode);
4902 pat = GEN_FCN (icode) (scratch, op0, op1);
4907 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
4908 _lower_. We use one compare, but look in different bits of the
4909 CR for each variant.
4911 There are 2 elements in each SPE simd type (upper/lower). The CR
4912 bits are set as follows:
4914 BIT0 | BIT 1 | BIT 2 | BIT 3
4915 U | L | (U | L) | (U & L)
4917 So, for an "all" relationship, BIT 3 would be set.
4918 For an "any" relationship, BIT 2 would be set. Etc.
4920 Following traditional nomenclature, these bits map to:
4922 BIT0 | BIT 1 | BIT 2 | BIT 3
4925 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
4930 /* All variant. OV bit. */
4932 /* We need to get to the OV bit, which is the ORDERED bit. We
4933 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
4934 that's ugly and will trigger a validate_condition_mode abort.
4935 So let's just use another pattern. */
4936 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
4938 /* Any variant. EQ bit. */
4942 /* Upper variant. LT bit. */
4946 /* Lower variant. GT bit. */
4951 error ("argument 1 of __builtin_spe_predicate is out of range");
4955 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
4956 emit_move_insn (target, tmp);
4961 /* The evsel builtins look like this:
4963 e = __builtin_spe_evsel_OP (a, b, c, d);
4967 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
4968 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
4972 spe_expand_evsel_builtin (icode, arglist, target)
4973 enum insn_code icode;
4978 tree arg0 = TREE_VALUE (arglist);
4979 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4980 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4981 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
4982 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4983 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4984 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4985 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
4986 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4987 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4992 if (arg0 == error_mark_node || arg1 == error_mark_node
4993 || arg2 == error_mark_node || arg3 == error_mark_node)
4997 || GET_MODE (target) != mode0
4998 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
4999 target = gen_reg_rtx (mode0);
5001 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5002 op0 = copy_to_mode_reg (mode0, op0);
5003 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5004 op1 = copy_to_mode_reg (mode0, op1);
5005 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5006 op2 = copy_to_mode_reg (mode0, op2);
5007 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5008 op3 = copy_to_mode_reg (mode0, op3);
5010 /* Generate the compare. */
5011 scratch = gen_reg_rtx (CCmode);
5012 pat = GEN_FCN (icode) (scratch, op0, op1);
5017 if (mode0 == V2SImode)
5018 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5020 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5025 /* Expand an expression EXP that calls a built-in function,
5026 with result going to TARGET if that's convenient
5027 (and in mode MODE if that's convenient).
5028 SUBTARGET may be used as the target for computing one of EXP's operands.
5029 IGNORE is nonzero if the value is to be ignored. */
5032 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5035 rtx subtarget ATTRIBUTE_UNUSED;
5036 enum machine_mode mode ATTRIBUTE_UNUSED;
5037 int ignore ATTRIBUTE_UNUSED;
5039 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5040 tree arglist = TREE_OPERAND (exp, 1);
5041 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5042 struct builtin_description *d;
5049 ret = altivec_expand_builtin (exp, target, &success);
5056 ret = spe_expand_builtin (exp, target, &success);
5062 /* Handle simple unary operations. */
5063 d = (struct builtin_description *) bdesc_1arg;
5064 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5065 if (d->code == fcode)
5066 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5068 /* Handle simple binary operations. */
5069 d = (struct builtin_description *) bdesc_2arg;
5070 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5071 if (d->code == fcode)
5072 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5074 /* Handle simple ternary operations. */
5075 d = (struct builtin_description *) bdesc_3arg;
5076 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5077 if (d->code == fcode)
5078 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5085 rs6000_init_builtins ()
5088 spe_init_builtins ();
5090 altivec_init_builtins ();
5091 rs6000_common_init_builtins ();
5094 /* Search through a set of builtins and enable the mask bits.
5095 DESC is an array of builtins.
5096 SIZE is the totaly number of builtins.
5097 START is the builtin enum at which to start.
5098 END is the builtin enum at which to end. */
5100 enable_mask_for_builtins (desc, size, start, end)
5101 struct builtin_description *desc;
5103 enum rs6000_builtins start, end;
5107 for (i = 0; i < size; ++i)
5108 if (desc[i].code == start)
5114 for (; i < size; ++i)
5116 /* Flip all the bits on. */
5117 desc[i].mask = target_flags;
5118 if (desc[i].code == end)
5124 spe_init_builtins (void)
5126 tree endlink = void_list_node;
5127 tree puint_type_node = build_pointer_type (unsigned_type_node);
5128 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5129 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5130 struct builtin_description *d;
5133 tree v2si_ftype_4_v2si
5134 = build_function_type
5136 tree_cons (NULL_TREE, V2SI_type_node,
5137 tree_cons (NULL_TREE, V2SI_type_node,
5138 tree_cons (NULL_TREE, V2SI_type_node,
5139 tree_cons (NULL_TREE, V2SI_type_node,
5142 tree v2sf_ftype_4_v2sf
5143 = build_function_type
5145 tree_cons (NULL_TREE, V2SF_type_node,
5146 tree_cons (NULL_TREE, V2SF_type_node,
5147 tree_cons (NULL_TREE, V2SF_type_node,
5148 tree_cons (NULL_TREE, V2SF_type_node,
5151 tree int_ftype_int_v2si_v2si
5152 = build_function_type
5154 tree_cons (NULL_TREE, integer_type_node,
5155 tree_cons (NULL_TREE, V2SI_type_node,
5156 tree_cons (NULL_TREE, V2SI_type_node,
5159 tree int_ftype_int_v2sf_v2sf
5160 = build_function_type
5162 tree_cons (NULL_TREE, integer_type_node,
5163 tree_cons (NULL_TREE, V2SF_type_node,
5164 tree_cons (NULL_TREE, V2SF_type_node,
5167 tree void_ftype_v2si_puint_int
5168 = build_function_type (void_type_node,
5169 tree_cons (NULL_TREE, V2SI_type_node,
5170 tree_cons (NULL_TREE, puint_type_node,
5171 tree_cons (NULL_TREE,
5175 tree void_ftype_v2si_puint_char
5176 = build_function_type (void_type_node,
5177 tree_cons (NULL_TREE, V2SI_type_node,
5178 tree_cons (NULL_TREE, puint_type_node,
5179 tree_cons (NULL_TREE,
5183 tree void_ftype_v2si_pv2si_int
5184 = build_function_type (void_type_node,
5185 tree_cons (NULL_TREE, V2SI_type_node,
5186 tree_cons (NULL_TREE, pv2si_type_node,
5187 tree_cons (NULL_TREE,
5191 tree void_ftype_v2si_pv2si_char
5192 = build_function_type (void_type_node,
5193 tree_cons (NULL_TREE, V2SI_type_node,
5194 tree_cons (NULL_TREE, pv2si_type_node,
5195 tree_cons (NULL_TREE,
5200 = build_function_type (void_type_node,
5201 tree_cons (NULL_TREE, integer_type_node, endlink));
5204 = build_function_type (integer_type_node,
5205 tree_cons (NULL_TREE, void_type_node, endlink));
5207 tree v2si_ftype_pv2si_int
5208 = build_function_type (V2SI_type_node,
5209 tree_cons (NULL_TREE, pv2si_type_node,
5210 tree_cons (NULL_TREE, integer_type_node,
5213 tree v2si_ftype_puint_int
5214 = build_function_type (V2SI_type_node,
5215 tree_cons (NULL_TREE, puint_type_node,
5216 tree_cons (NULL_TREE, integer_type_node,
5219 tree v2si_ftype_pushort_int
5220 = build_function_type (V2SI_type_node,
5221 tree_cons (NULL_TREE, pushort_type_node,
5222 tree_cons (NULL_TREE, integer_type_node,
5225 /* The initialization of the simple binary and unary builtins is
5226 done in rs6000_common_init_builtins, but we have to enable the
5227 mask bits here manually because we have run out of `target_flags'
5228 bits. We really need to redesign this mask business. */
5230 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5231 ARRAY_SIZE (bdesc_2arg),
5234 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5235 ARRAY_SIZE (bdesc_1arg),
5237 SPE_BUILTIN_EVSUBFUSIAAW);
5238 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5239 ARRAY_SIZE (bdesc_spe_predicates),
5240 SPE_BUILTIN_EVCMPEQ,
5241 SPE_BUILTIN_EVFSTSTLT);
5242 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5243 ARRAY_SIZE (bdesc_spe_evsel),
5244 SPE_BUILTIN_EVSEL_CMPGTS,
5245 SPE_BUILTIN_EVSEL_FSTSTEQ);
5247 /* Initialize irregular SPE builtins. */
5249 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5250 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5251 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5252 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5253 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5254 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5255 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5256 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5257 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5258 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5259 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5260 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5261 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5262 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5263 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5264 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5267 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5268 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5269 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5270 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5271 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5272 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5273 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5274 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5275 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5276 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5277 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5278 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5279 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5280 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5281 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5282 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5283 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5284 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5285 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5286 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5287 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5288 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5291 d = (struct builtin_description *) bdesc_spe_predicates;
5292 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5296 switch (insn_data[d->icode].operand[1].mode)
5299 type = int_ftype_int_v2si_v2si;
5302 type = int_ftype_int_v2sf_v2sf;
5308 def_builtin (d->mask, d->name, type, d->code);
5311 /* Evsel predicates. */
5312 d = (struct builtin_description *) bdesc_spe_evsel;
5313 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5317 switch (insn_data[d->icode].operand[1].mode)
5320 type = v2si_ftype_4_v2si;
5323 type = v2sf_ftype_4_v2sf;
5329 def_builtin (d->mask, d->name, type, d->code);
5334 altivec_init_builtins (void)
5336 struct builtin_description *d;
5337 struct builtin_description_predicates *dp;
5339 tree pfloat_type_node = build_pointer_type (float_type_node);
5340 tree pint_type_node = build_pointer_type (integer_type_node);
5341 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5342 tree pchar_type_node = build_pointer_type (char_type_node);
5344 tree pvoid_type_node = build_pointer_type (void_type_node);
5346 tree int_ftype_int_v4si_v4si
5347 = build_function_type_list (integer_type_node,
5348 integer_type_node, V4SI_type_node,
5349 V4SI_type_node, NULL_TREE);
5350 tree v4sf_ftype_pfloat
5351 = build_function_type_list (V4SF_type_node, pfloat_type_node, NULL_TREE);
5352 tree void_ftype_pfloat_v4sf
5353 = build_function_type_list (void_type_node,
5354 pfloat_type_node, V4SF_type_node, NULL_TREE);
5355 tree v4si_ftype_pint
5356 = build_function_type_list (V4SI_type_node, pint_type_node, NULL_TREE); tree void_ftype_pint_v4si
5357 = build_function_type_list (void_type_node,
5358 pint_type_node, V4SI_type_node, NULL_TREE);
5359 tree v8hi_ftype_pshort
5360 = build_function_type_list (V8HI_type_node, pshort_type_node, NULL_TREE);
5361 tree void_ftype_pshort_v8hi
5362 = build_function_type_list (void_type_node,
5363 pshort_type_node, V8HI_type_node, NULL_TREE);
5364 tree v16qi_ftype_pchar
5365 = build_function_type_list (V16QI_type_node, pchar_type_node, NULL_TREE);
5366 tree void_ftype_pchar_v16qi
5367 = build_function_type_list (void_type_node,
5368 pchar_type_node, V16QI_type_node, NULL_TREE);
5369 tree void_ftype_v4si
5370 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5371 tree v8hi_ftype_void
5372 = build_function_type (V8HI_type_node, void_list_node);
5373 tree void_ftype_void
5374 = build_function_type (void_type_node, void_list_node);
5376 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5377 tree v16qi_ftype_int_pvoid
5378 = build_function_type_list (V16QI_type_node,
5379 integer_type_node, pvoid_type_node, NULL_TREE);
5380 tree v8hi_ftype_int_pvoid
5381 = build_function_type_list (V8HI_type_node,
5382 integer_type_node, pvoid_type_node, NULL_TREE);
5383 tree v4si_ftype_int_pvoid
5384 = build_function_type_list (V4SI_type_node,
5385 integer_type_node, pvoid_type_node, NULL_TREE);
5386 tree void_ftype_v4si_int_pvoid
5387 = build_function_type_list (void_type_node,
5388 V4SI_type_node, integer_type_node,
5389 pvoid_type_node, NULL_TREE);
5390 tree void_ftype_v16qi_int_pvoid
5391 = build_function_type_list (void_type_node,
5392 V16QI_type_node, integer_type_node,
5393 pvoid_type_node, NULL_TREE);
5394 tree void_ftype_v8hi_int_pvoid
5395 = build_function_type_list (void_type_node,
5396 V8HI_type_node, integer_type_node,
5397 pvoid_type_node, NULL_TREE);
5398 tree int_ftype_int_v8hi_v8hi
5399 = build_function_type_list (integer_type_node,
5400 integer_type_node, V8HI_type_node,
5401 V8HI_type_node, NULL_TREE);
5402 tree int_ftype_int_v16qi_v16qi
5403 = build_function_type_list (integer_type_node,
5404 integer_type_node, V16QI_type_node,
5405 V16QI_type_node, NULL_TREE);
5406 tree int_ftype_int_v4sf_v4sf
5407 = build_function_type_list (integer_type_node,
5408 integer_type_node, V4SF_type_node,
5409 V4SF_type_node, NULL_TREE);
5410 tree v4si_ftype_v4si
5411 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5412 tree v8hi_ftype_v8hi
5413 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5414 tree v16qi_ftype_v16qi
5415 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5416 tree v4sf_ftype_v4sf
5417 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5418 tree void_ftype_pvoid_int_char
5419 = build_function_type_list (void_type_node,
5420 pvoid_type_node, integer_type_node,
5421 char_type_node, NULL_TREE);
5423 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5424 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5425 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5426 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5427 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5428 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5429 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5430 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5431 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5432 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5433 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5434 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5435 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
5436 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
5437 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
5438 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
5439 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
5440 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
5441 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
5442 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5443 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5444 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5445 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5446 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5448 /* Add the DST variants. */
5449 d = (struct builtin_description *) bdesc_dst;
5450 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5451 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
5453 /* Initialize the predicates. */
5454 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5455 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5457 enum machine_mode mode1;
5460 mode1 = insn_data[dp->icode].operand[1].mode;
5465 type = int_ftype_int_v4si_v4si;
5468 type = int_ftype_int_v8hi_v8hi;
5471 type = int_ftype_int_v16qi_v16qi;
5474 type = int_ftype_int_v4sf_v4sf;
5480 def_builtin (dp->mask, dp->name, type, dp->code);
5483 /* Initialize the abs* operators. */
5484 d = (struct builtin_description *) bdesc_abs;
5485 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5487 enum machine_mode mode0;
5490 mode0 = insn_data[d->icode].operand[0].mode;
5495 type = v4si_ftype_v4si;
5498 type = v8hi_ftype_v8hi;
5501 type = v16qi_ftype_v16qi;
5504 type = v4sf_ftype_v4sf;
5510 def_builtin (d->mask, d->name, type, d->code);
5515 rs6000_common_init_builtins (void)
5517 struct builtin_description *d;
5520 tree v4sf_ftype_v4sf_v4sf_v16qi
5521 = build_function_type_list (V4SF_type_node,
5522 V4SF_type_node, V4SF_type_node,
5523 V16QI_type_node, NULL_TREE);
5524 tree v4si_ftype_v4si_v4si_v16qi
5525 = build_function_type_list (V4SI_type_node,
5526 V4SI_type_node, V4SI_type_node,
5527 V16QI_type_node, NULL_TREE);
5528 tree v8hi_ftype_v8hi_v8hi_v16qi
5529 = build_function_type_list (V8HI_type_node,
5530 V8HI_type_node, V8HI_type_node,
5531 V16QI_type_node, NULL_TREE);
5532 tree v16qi_ftype_v16qi_v16qi_v16qi
5533 = build_function_type_list (V16QI_type_node,
5534 V16QI_type_node, V16QI_type_node,
5535 V16QI_type_node, NULL_TREE);
5536 tree v4si_ftype_char
5537 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5538 tree v8hi_ftype_char
5539 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5540 tree v16qi_ftype_char
5541 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5542 tree v8hi_ftype_v16qi
5543 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5544 tree v4sf_ftype_v4sf
5545 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5547 tree v2si_ftype_v2si_v2si
5548 = build_function_type_list (V2SI_type_node,
5549 V2SI_type_node, V2SI_type_node, NULL_TREE);
5551 tree v2sf_ftype_v2sf_v2sf
5552 = build_function_type_list (V2SF_type_node,
5553 V2SF_type_node, V2SF_type_node, NULL_TREE);
5555 tree v2si_ftype_int_int
5556 = build_function_type_list (V2SI_type_node,
5557 integer_type_node, integer_type_node,
5560 tree v2si_ftype_v2si
5561 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5563 tree v2sf_ftype_v2sf
5564 = build_function_type_list (V2SF_type_node,
5565 V2SF_type_node, NULL_TREE);
5567 tree v2sf_ftype_v2si
5568 = build_function_type_list (V2SF_type_node,
5569 V2SI_type_node, NULL_TREE);
5571 tree v2si_ftype_v2sf
5572 = build_function_type_list (V2SI_type_node,
5573 V2SF_type_node, NULL_TREE);
5575 tree v2si_ftype_v2si_char
5576 = build_function_type_list (V2SI_type_node,
5577 V2SI_type_node, char_type_node, NULL_TREE);
5579 tree v2si_ftype_int_char
5580 = build_function_type_list (V2SI_type_node,
5581 integer_type_node, char_type_node, NULL_TREE);
5583 tree v2si_ftype_char
5584 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5586 tree int_ftype_int_int
5587 = build_function_type_list (integer_type_node,
5588 integer_type_node, integer_type_node,
5591 tree v4si_ftype_v4si_v4si
5592 = build_function_type_list (V4SI_type_node,
5593 V4SI_type_node, V4SI_type_node, NULL_TREE);
5594 tree v4sf_ftype_v4si_char
5595 = build_function_type_list (V4SF_type_node,
5596 V4SI_type_node, char_type_node, NULL_TREE);
5597 tree v4si_ftype_v4sf_char
5598 = build_function_type_list (V4SI_type_node,
5599 V4SF_type_node, char_type_node, NULL_TREE);
5600 tree v4si_ftype_v4si_char
5601 = build_function_type_list (V4SI_type_node,
5602 V4SI_type_node, char_type_node, NULL_TREE);
5603 tree v8hi_ftype_v8hi_char
5604 = build_function_type_list (V8HI_type_node,
5605 V8HI_type_node, char_type_node, NULL_TREE);
5606 tree v16qi_ftype_v16qi_char
5607 = build_function_type_list (V16QI_type_node,
5608 V16QI_type_node, char_type_node, NULL_TREE);
5609 tree v16qi_ftype_v16qi_v16qi_char
5610 = build_function_type_list (V16QI_type_node,
5611 V16QI_type_node, V16QI_type_node,
5612 char_type_node, NULL_TREE);
5613 tree v8hi_ftype_v8hi_v8hi_char
5614 = build_function_type_list (V8HI_type_node,
5615 V8HI_type_node, V8HI_type_node,
5616 char_type_node, NULL_TREE);
5617 tree v4si_ftype_v4si_v4si_char
5618 = build_function_type_list (V4SI_type_node,
5619 V4SI_type_node, V4SI_type_node,
5620 char_type_node, NULL_TREE);
5621 tree v4sf_ftype_v4sf_v4sf_char
5622 = build_function_type_list (V4SF_type_node,
5623 V4SF_type_node, V4SF_type_node,
5624 char_type_node, NULL_TREE);
5625 tree v4sf_ftype_v4sf_v4sf
5626 = build_function_type_list (V4SF_type_node,
5627 V4SF_type_node, V4SF_type_node, NULL_TREE);
5628 tree v4sf_ftype_v4sf_v4sf_v4si
5629 = build_function_type_list (V4SF_type_node,
5630 V4SF_type_node, V4SF_type_node,
5631 V4SI_type_node, NULL_TREE);
5632 tree v4sf_ftype_v4sf_v4sf_v4sf
5633 = build_function_type_list (V4SF_type_node,
5634 V4SF_type_node, V4SF_type_node,
5635 V4SF_type_node, NULL_TREE);
5636 tree v4si_ftype_v4si_v4si_v4si
5637 = build_function_type_list (V4SI_type_node,
5638 V4SI_type_node, V4SI_type_node,
5639 V4SI_type_node, NULL_TREE);
5640 tree v8hi_ftype_v8hi_v8hi
5641 = build_function_type_list (V8HI_type_node,
5642 V8HI_type_node, V8HI_type_node, NULL_TREE);
5643 tree v8hi_ftype_v8hi_v8hi_v8hi
5644 = build_function_type_list (V8HI_type_node,
5645 V8HI_type_node, V8HI_type_node,
5646 V8HI_type_node, NULL_TREE);
5647 tree v4si_ftype_v8hi_v8hi_v4si
5648 = build_function_type_list (V4SI_type_node,
5649 V8HI_type_node, V8HI_type_node,
5650 V4SI_type_node, NULL_TREE);
5651 tree v4si_ftype_v16qi_v16qi_v4si
5652 = build_function_type_list (V4SI_type_node,
5653 V16QI_type_node, V16QI_type_node,
5654 V4SI_type_node, NULL_TREE);
5655 tree v16qi_ftype_v16qi_v16qi
5656 = build_function_type_list (V16QI_type_node,
5657 V16QI_type_node, V16QI_type_node, NULL_TREE);
5658 tree v4si_ftype_v4sf_v4sf
5659 = build_function_type_list (V4SI_type_node,
5660 V4SF_type_node, V4SF_type_node, NULL_TREE);
5661 tree v8hi_ftype_v16qi_v16qi
5662 = build_function_type_list (V8HI_type_node,
5663 V16QI_type_node, V16QI_type_node, NULL_TREE);
5664 tree v4si_ftype_v8hi_v8hi
5665 = build_function_type_list (V4SI_type_node,
5666 V8HI_type_node, V8HI_type_node, NULL_TREE);
5667 tree v8hi_ftype_v4si_v4si
5668 = build_function_type_list (V8HI_type_node,
5669 V4SI_type_node, V4SI_type_node, NULL_TREE);
5670 tree v16qi_ftype_v8hi_v8hi
5671 = build_function_type_list (V16QI_type_node,
5672 V8HI_type_node, V8HI_type_node, NULL_TREE);
5673 tree v4si_ftype_v16qi_v4si
5674 = build_function_type_list (V4SI_type_node,
5675 V16QI_type_node, V4SI_type_node, NULL_TREE);
5676 tree v4si_ftype_v16qi_v16qi
5677 = build_function_type_list (V4SI_type_node,
5678 V16QI_type_node, V16QI_type_node, NULL_TREE);
5679 tree v4si_ftype_v8hi_v4si
5680 = build_function_type_list (V4SI_type_node,
5681 V8HI_type_node, V4SI_type_node, NULL_TREE);
5682 tree v4si_ftype_v8hi
5683 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5684 tree int_ftype_v4si_v4si
5685 = build_function_type_list (integer_type_node,
5686 V4SI_type_node, V4SI_type_node, NULL_TREE);
5687 tree int_ftype_v4sf_v4sf
5688 = build_function_type_list (integer_type_node,
5689 V4SF_type_node, V4SF_type_node, NULL_TREE);
5690 tree int_ftype_v16qi_v16qi
5691 = build_function_type_list (integer_type_node,
5692 V16QI_type_node, V16QI_type_node, NULL_TREE);
5693 tree int_ftype_v8hi_v8hi
5694 = build_function_type_list (integer_type_node,
5695 V8HI_type_node, V8HI_type_node, NULL_TREE);
5697 /* Add the simple ternary operators. */
5698 d = (struct builtin_description *) bdesc_3arg;
5699 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5702 enum machine_mode mode0, mode1, mode2, mode3;
5708 mode0 = insn_data[d->icode].operand[0].mode;
5709 mode1 = insn_data[d->icode].operand[1].mode;
5710 mode2 = insn_data[d->icode].operand[2].mode;
5711 mode3 = insn_data[d->icode].operand[3].mode;
5713 /* When all four are of the same mode. */
5714 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5719 type = v4si_ftype_v4si_v4si_v4si;
5722 type = v4sf_ftype_v4sf_v4sf_v4sf;
5725 type = v8hi_ftype_v8hi_v8hi_v8hi;
5728 type = v16qi_ftype_v16qi_v16qi_v16qi;
5734 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5739 type = v4si_ftype_v4si_v4si_v16qi;
5742 type = v4sf_ftype_v4sf_v4sf_v16qi;
5745 type = v8hi_ftype_v8hi_v8hi_v16qi;
5748 type = v16qi_ftype_v16qi_v16qi_v16qi;
5754 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5755 && mode3 == V4SImode)
5756 type = v4si_ftype_v16qi_v16qi_v4si;
5757 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5758 && mode3 == V4SImode)
5759 type = v4si_ftype_v8hi_v8hi_v4si;
5760 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5761 && mode3 == V4SImode)
5762 type = v4sf_ftype_v4sf_v4sf_v4si;
5764 /* vchar, vchar, vchar, 4 bit literal. */
5765 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5767 type = v16qi_ftype_v16qi_v16qi_char;
5769 /* vshort, vshort, vshort, 4 bit literal. */
5770 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5772 type = v8hi_ftype_v8hi_v8hi_char;
5774 /* vint, vint, vint, 4 bit literal. */
5775 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5777 type = v4si_ftype_v4si_v4si_char;
5779 /* vfloat, vfloat, vfloat, 4 bit literal. */
5780 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5782 type = v4sf_ftype_v4sf_v4sf_char;
5787 def_builtin (d->mask, d->name, type, d->code);
5790 /* Add the simple binary operators. */
5791 d = (struct builtin_description *) bdesc_2arg;
5792 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5794 enum machine_mode mode0, mode1, mode2;
5800 mode0 = insn_data[d->icode].operand[0].mode;
5801 mode1 = insn_data[d->icode].operand[1].mode;
5802 mode2 = insn_data[d->icode].operand[2].mode;
5804 /* When all three operands are of the same mode. */
5805 if (mode0 == mode1 && mode1 == mode2)
5810 type = v4sf_ftype_v4sf_v4sf;
5813 type = v4si_ftype_v4si_v4si;
5816 type = v16qi_ftype_v16qi_v16qi;
5819 type = v8hi_ftype_v8hi_v8hi;
5822 type = v2si_ftype_v2si_v2si;
5825 type = v2sf_ftype_v2sf_v2sf;
5828 type = int_ftype_int_int;
5835 /* A few other combos we really don't want to do manually. */
5837 /* vint, vfloat, vfloat. */
5838 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
5839 type = v4si_ftype_v4sf_v4sf;
5841 /* vshort, vchar, vchar. */
5842 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
5843 type = v8hi_ftype_v16qi_v16qi;
5845 /* vint, vshort, vshort. */
5846 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
5847 type = v4si_ftype_v8hi_v8hi;
5849 /* vshort, vint, vint. */
5850 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
5851 type = v8hi_ftype_v4si_v4si;
5853 /* vchar, vshort, vshort. */
5854 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
5855 type = v16qi_ftype_v8hi_v8hi;
5857 /* vint, vchar, vint. */
5858 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
5859 type = v4si_ftype_v16qi_v4si;
5861 /* vint, vchar, vchar. */
5862 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
5863 type = v4si_ftype_v16qi_v16qi;
5865 /* vint, vshort, vint. */
5866 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
5867 type = v4si_ftype_v8hi_v4si;
5869 /* vint, vint, 5 bit literal. */
5870 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
5871 type = v4si_ftype_v4si_char;
5873 /* vshort, vshort, 5 bit literal. */
5874 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
5875 type = v8hi_ftype_v8hi_char;
5877 /* vchar, vchar, 5 bit literal. */
5878 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
5879 type = v16qi_ftype_v16qi_char;
5881 /* vfloat, vint, 5 bit literal. */
5882 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
5883 type = v4sf_ftype_v4si_char;
5885 /* vint, vfloat, 5 bit literal. */
5886 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
5887 type = v4si_ftype_v4sf_char;
5889 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
5890 type = v2si_ftype_int_int;
5892 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
5893 type = v2si_ftype_v2si_char;
5895 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
5896 type = v2si_ftype_int_char;
5899 else if (mode0 == SImode)
5904 type = int_ftype_v4si_v4si;
5907 type = int_ftype_v4sf_v4sf;
5910 type = int_ftype_v16qi_v16qi;
5913 type = int_ftype_v8hi_v8hi;
5923 def_builtin (d->mask, d->name, type, d->code);
5926 /* Add the simple unary operators. */
5927 d = (struct builtin_description *) bdesc_1arg;
5928 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5930 enum machine_mode mode0, mode1;
5936 mode0 = insn_data[d->icode].operand[0].mode;
5937 mode1 = insn_data[d->icode].operand[1].mode;
5939 if (mode0 == V4SImode && mode1 == QImode)
5940 type = v4si_ftype_char;
5941 else if (mode0 == V8HImode && mode1 == QImode)
5942 type = v8hi_ftype_char;
5943 else if (mode0 == V16QImode && mode1 == QImode)
5944 type = v16qi_ftype_char;
5945 else if (mode0 == V4SFmode && mode1 == V4SFmode)
5946 type = v4sf_ftype_v4sf;
5947 else if (mode0 == V8HImode && mode1 == V16QImode)
5948 type = v8hi_ftype_v16qi;
5949 else if (mode0 == V4SImode && mode1 == V8HImode)
5950 type = v4si_ftype_v8hi;
5951 else if (mode0 == V2SImode && mode1 == V2SImode)
5952 type = v2si_ftype_v2si;
5953 else if (mode0 == V2SFmode && mode1 == V2SFmode)
5954 type = v2sf_ftype_v2sf;
5955 else if (mode0 == V2SFmode && mode1 == V2SImode)
5956 type = v2sf_ftype_v2si;
5957 else if (mode0 == V2SImode && mode1 == V2SFmode)
5958 type = v2si_ftype_v2sf;
5959 else if (mode0 == V2SImode && mode1 == QImode)
5960 type = v2si_ftype_char;
5964 def_builtin (d->mask, d->name, type, d->code);
5968 /* Generate a memory reference for expand_block_move, copying volatile,
5969 and other bits from an original memory reference. */
5972 expand_block_move_mem (mode, addr, orig_mem)
5973 enum machine_mode mode;
5977 rtx mem = gen_rtx_MEM (mode, addr);
5979 MEM_COPY_ATTRIBUTES (mem, orig_mem);
5983 /* Expand a block move operation, and return 1 if successful. Return 0
5984 if we should let the compiler generate normal code.
5986 operands[0] is the destination
5987 operands[1] is the source
5988 operands[2] is the length
5989 operands[3] is the alignment */
5991 #define MAX_MOVE_REG 4
5994 expand_block_move (operands)
5997 rtx orig_dest = operands[0];
5998 rtx orig_src = operands[1];
5999 rtx bytes_rtx = operands[2];
6000 rtx align_rtx = operands[3];
6001 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6012 rtx stores[MAX_MOVE_REG];
6015 /* If this is not a fixed size move, just call memcpy */
6019 /* If this is not a fixed size alignment, abort */
6020 if (GET_CODE (align_rtx) != CONST_INT)
6022 align = INTVAL (align_rtx);
6024 /* Anything to move? */
6025 bytes = INTVAL (bytes_rtx);
6029 /* store_one_arg depends on expand_block_move to handle at least the size of
6030 reg_parm_stack_space. */
6031 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6034 /* Move the address into scratch registers. */
6035 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
6036 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
6038 if (TARGET_STRING) /* string instructions are available */
6040 for ( ; bytes > 0; bytes -= move_bytes)
6042 if (bytes > 24 /* move up to 32 bytes at a time */
6050 && ! fixed_regs[12])
6052 move_bytes = (bytes > 32) ? 32 : bytes;
6053 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
6056 expand_block_move_mem (BLKmode,
6059 GEN_INT ((move_bytes == 32)
6063 else if (bytes > 16 /* move up to 24 bytes at a time */
6069 && ! fixed_regs[10])
6071 move_bytes = (bytes > 24) ? 24 : bytes;
6072 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
6075 expand_block_move_mem (BLKmode,
6078 GEN_INT (move_bytes),
6081 else if (bytes > 8 /* move up to 16 bytes at a time */
6087 move_bytes = (bytes > 16) ? 16 : bytes;
6088 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
6091 expand_block_move_mem (BLKmode,
6094 GEN_INT (move_bytes),
6097 else if (bytes >= 8 && TARGET_POWERPC64
6098 /* 64-bit loads and stores require word-aligned
6100 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6103 tmp_reg = gen_reg_rtx (DImode);
6104 emit_move_insn (tmp_reg,
6105 expand_block_move_mem (DImode,
6106 src_reg, orig_src));
6107 emit_move_insn (expand_block_move_mem (DImode,
6108 dest_reg, orig_dest),
6111 else if (bytes > 4 && !TARGET_POWERPC64)
6112 { /* move up to 8 bytes at a time */
6113 move_bytes = (bytes > 8) ? 8 : bytes;
6114 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
6117 expand_block_move_mem (BLKmode,
6120 GEN_INT (move_bytes),
6123 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6124 { /* move 4 bytes */
6126 tmp_reg = gen_reg_rtx (SImode);
6127 emit_move_insn (tmp_reg,
6128 expand_block_move_mem (SImode,
6129 src_reg, orig_src));
6130 emit_move_insn (expand_block_move_mem (SImode,
6131 dest_reg, orig_dest),
6134 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6135 { /* move 2 bytes */
6137 tmp_reg = gen_reg_rtx (HImode);
6138 emit_move_insn (tmp_reg,
6139 expand_block_move_mem (HImode,
6140 src_reg, orig_src));
6141 emit_move_insn (expand_block_move_mem (HImode,
6142 dest_reg, orig_dest),
6145 else if (bytes == 1) /* move 1 byte */
6148 tmp_reg = gen_reg_rtx (QImode);
6149 emit_move_insn (tmp_reg,
6150 expand_block_move_mem (QImode,
6151 src_reg, orig_src));
6152 emit_move_insn (expand_block_move_mem (QImode,
6153 dest_reg, orig_dest),
6157 { /* move up to 4 bytes at a time */
6158 move_bytes = (bytes > 4) ? 4 : bytes;
6159 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
6162 expand_block_move_mem (BLKmode,
6165 GEN_INT (move_bytes),
6169 if (bytes > move_bytes)
6171 if (! TARGET_POWERPC64)
6173 emit_insn (gen_addsi3 (src_reg, src_reg,
6174 GEN_INT (move_bytes)));
6175 emit_insn (gen_addsi3 (dest_reg, dest_reg,
6176 GEN_INT (move_bytes)));
6180 emit_insn (gen_adddi3 (src_reg, src_reg,
6181 GEN_INT (move_bytes)));
6182 emit_insn (gen_adddi3 (dest_reg, dest_reg,
6183 GEN_INT (move_bytes)));
6189 else /* string instructions not available */
6191 num_reg = offset = 0;
6192 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
6194 /* Calculate the correct offset for src/dest */
6198 dest_addr = dest_reg;
6202 src_addr = plus_constant (src_reg, offset);
6203 dest_addr = plus_constant (dest_reg, offset);
6206 /* Generate the appropriate load and store, saving the stores
6208 if (bytes >= 8 && TARGET_POWERPC64
6209 /* 64-bit loads and stores require word-aligned
6211 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6214 tmp_reg = gen_reg_rtx (DImode);
6215 emit_insn (gen_movdi (tmp_reg,
6216 expand_block_move_mem (DImode,
6219 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
6224 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6227 tmp_reg = gen_reg_rtx (SImode);
6228 emit_insn (gen_movsi (tmp_reg,
6229 expand_block_move_mem (SImode,
6232 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
6237 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6240 tmp_reg = gen_reg_rtx (HImode);
6241 emit_insn (gen_movhi (tmp_reg,
6242 expand_block_move_mem (HImode,
6245 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
6253 tmp_reg = gen_reg_rtx (QImode);
6254 emit_insn (gen_movqi (tmp_reg,
6255 expand_block_move_mem (QImode,
6258 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
6264 if (num_reg >= MAX_MOVE_REG)
6266 for (i = 0; i < num_reg; i++)
6267 emit_insn (stores[i]);
6272 for (i = 0; i < num_reg; i++)
6273 emit_insn (stores[i]);
6280 /* Return 1 if OP is a load multiple operation. It is known to be a
6281 PARALLEL and the first section will be tested. */
6284 load_multiple_operation (op, mode)
6286 enum machine_mode mode ATTRIBUTE_UNUSED;
6288 int count = XVECLEN (op, 0);
6289 unsigned int dest_regno;
6293 /* Perform a quick check so we don't blow up below. */
6295 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6296 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6297 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6300 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6301 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6303 for (i = 1; i < count; i++)
6305 rtx elt = XVECEXP (op, 0, i);
6307 if (GET_CODE (elt) != SET
6308 || GET_CODE (SET_DEST (elt)) != REG
6309 || GET_MODE (SET_DEST (elt)) != SImode
6310 || REGNO (SET_DEST (elt)) != dest_regno + i
6311 || GET_CODE (SET_SRC (elt)) != MEM
6312 || GET_MODE (SET_SRC (elt)) != SImode
6313 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6314 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6315 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6316 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6323 /* Similar, but tests for store multiple. Here, the second vector element
6324 is a CLOBBER. It will be tested later. */
6327 store_multiple_operation (op, mode)
6329 enum machine_mode mode ATTRIBUTE_UNUSED;
6331 int count = XVECLEN (op, 0) - 1;
6332 unsigned int src_regno;
6336 /* Perform a quick check so we don't blow up below. */
6338 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6339 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6340 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6343 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6344 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6346 for (i = 1; i < count; i++)
6348 rtx elt = XVECEXP (op, 0, i + 1);
6350 if (GET_CODE (elt) != SET
6351 || GET_CODE (SET_SRC (elt)) != REG
6352 || GET_MODE (SET_SRC (elt)) != SImode
6353 || REGNO (SET_SRC (elt)) != src_regno + i
6354 || GET_CODE (SET_DEST (elt)) != MEM
6355 || GET_MODE (SET_DEST (elt)) != SImode
6356 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6357 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6358 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6359 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6366 /* Return 1 for a parallel vrsave operation. */
6369 vrsave_operation (op, mode)
6371 enum machine_mode mode ATTRIBUTE_UNUSED;
6373 int count = XVECLEN (op, 0);
6374 unsigned int dest_regno, src_regno;
6378 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6379 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6380 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6383 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6384 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6386 if (dest_regno != VRSAVE_REGNO
6387 && src_regno != VRSAVE_REGNO)
6390 for (i = 1; i < count; i++)
6392 rtx elt = XVECEXP (op, 0, i);
6394 if (GET_CODE (elt) != CLOBBER
6395 && GET_CODE (elt) != SET)
6402 /* Return 1 for an PARALLEL suitable for mtcrf. */
6405 mtcrf_operation (op, mode)
6407 enum machine_mode mode ATTRIBUTE_UNUSED;
6409 int count = XVECLEN (op, 0);
6413 /* Perform a quick check so we don't blow up below. */
6415 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6416 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6417 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6419 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6421 if (GET_CODE (src_reg) != REG
6422 || GET_MODE (src_reg) != SImode
6423 || ! INT_REGNO_P (REGNO (src_reg)))
6426 for (i = 0; i < count; i++)
6428 rtx exp = XVECEXP (op, 0, i);
6432 if (GET_CODE (exp) != SET
6433 || GET_CODE (SET_DEST (exp)) != REG
6434 || GET_MODE (SET_DEST (exp)) != CCmode
6435 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6437 unspec = SET_SRC (exp);
6438 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6440 if (GET_CODE (unspec) != UNSPEC
6441 || XINT (unspec, 1) != 20
6442 || XVECLEN (unspec, 0) != 2
6443 || XVECEXP (unspec, 0, 0) != src_reg
6444 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6445 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6451 /* Return 1 for an PARALLEL suitable for lmw. */
6454 lmw_operation (op, mode)
6456 enum machine_mode mode ATTRIBUTE_UNUSED;
6458 int count = XVECLEN (op, 0);
6459 unsigned int dest_regno;
6461 unsigned int base_regno;
6462 HOST_WIDE_INT offset;
6465 /* Perform a quick check so we don't blow up below. */
6467 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6468 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6469 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6472 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6473 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6476 || count != 32 - (int) dest_regno)
6479 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6482 base_regno = REGNO (src_addr);
6483 if (base_regno == 0)
6486 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6488 offset = INTVAL (XEXP (src_addr, 1));
6489 base_regno = REGNO (XEXP (src_addr, 0));
6494 for (i = 0; i < count; i++)
6496 rtx elt = XVECEXP (op, 0, i);
6499 HOST_WIDE_INT newoffset;
6501 if (GET_CODE (elt) != SET
6502 || GET_CODE (SET_DEST (elt)) != REG
6503 || GET_MODE (SET_DEST (elt)) != SImode
6504 || REGNO (SET_DEST (elt)) != dest_regno + i
6505 || GET_CODE (SET_SRC (elt)) != MEM
6506 || GET_MODE (SET_SRC (elt)) != SImode)
6508 newaddr = XEXP (SET_SRC (elt), 0);
6509 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6514 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6516 addr_reg = XEXP (newaddr, 0);
6517 newoffset = INTVAL (XEXP (newaddr, 1));
6521 if (REGNO (addr_reg) != base_regno
6522 || newoffset != offset + 4 * i)
6529 /* Return 1 for an PARALLEL suitable for stmw. */
6532 stmw_operation (op, mode)
6534 enum machine_mode mode ATTRIBUTE_UNUSED;
6536 int count = XVECLEN (op, 0);
6537 unsigned int src_regno;
6539 unsigned int base_regno;
6540 HOST_WIDE_INT offset;
6543 /* Perform a quick check so we don't blow up below. */
6545 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6546 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6547 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6550 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6551 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6554 || count != 32 - (int) src_regno)
6557 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6560 base_regno = REGNO (dest_addr);
6561 if (base_regno == 0)
6564 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6566 offset = INTVAL (XEXP (dest_addr, 1));
6567 base_regno = REGNO (XEXP (dest_addr, 0));
6572 for (i = 0; i < count; i++)
6574 rtx elt = XVECEXP (op, 0, i);
6577 HOST_WIDE_INT newoffset;
6579 if (GET_CODE (elt) != SET
6580 || GET_CODE (SET_SRC (elt)) != REG
6581 || GET_MODE (SET_SRC (elt)) != SImode
6582 || REGNO (SET_SRC (elt)) != src_regno + i
6583 || GET_CODE (SET_DEST (elt)) != MEM
6584 || GET_MODE (SET_DEST (elt)) != SImode)
6586 newaddr = XEXP (SET_DEST (elt), 0);
6587 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6592 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6594 addr_reg = XEXP (newaddr, 0);
6595 newoffset = INTVAL (XEXP (newaddr, 1));
6599 if (REGNO (addr_reg) != base_regno
6600 || newoffset != offset + 4 * i)
6607 /* A validation routine: say whether CODE, a condition code, and MODE
6608 match. The other alternatives either don't make sense or should
6609 never be generated. */
6612 validate_condition_mode (code, mode)
6614 enum machine_mode mode;
6616 if (GET_RTX_CLASS (code) != '<'
6617 || GET_MODE_CLASS (mode) != MODE_CC)
6620 /* These don't make sense. */
6621 if ((code == GT || code == LT || code == GE || code == LE)
6622 && mode == CCUNSmode)
6625 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6626 && mode != CCUNSmode)
6629 if (mode != CCFPmode
6630 && (code == ORDERED || code == UNORDERED
6631 || code == UNEQ || code == LTGT
6632 || code == UNGT || code == UNLT
6633 || code == UNGE || code == UNLE))
6636 /* These should never be generated except for
6637 flag_unsafe_math_optimizations. */
6638 if (mode == CCFPmode
6639 && ! flag_unsafe_math_optimizations
6640 && (code == LE || code == GE
6641 || code == UNEQ || code == LTGT
6642 || code == UNGT || code == UNLT))
6645 /* These are invalid; the information is not there. */
6646 if (mode == CCEQmode
6647 && code != EQ && code != NE)
6651 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6652 We only check the opcode against the mode of the CC value here. */
6655 branch_comparison_operator (op, mode)
6657 enum machine_mode mode ATTRIBUTE_UNUSED;
6659 enum rtx_code code = GET_CODE (op);
6660 enum machine_mode cc_mode;
6662 if (GET_RTX_CLASS (code) != '<')
6665 cc_mode = GET_MODE (XEXP (op, 0));
6666 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6669 validate_condition_mode (code, cc_mode);
6674 /* Return 1 if OP is a comparison operation that is valid for a branch
6675 insn and which is true if the corresponding bit in the CC register
6679 branch_positive_comparison_operator (op, mode)
6681 enum machine_mode mode;
6685 if (! branch_comparison_operator (op, mode))
6688 code = GET_CODE (op);
6689 return (code == EQ || code == LT || code == GT
6690 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6691 || code == LTU || code == GTU
6692 || code == UNORDERED);
6695 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6696 We check the opcode against the mode of the CC value and disallow EQ or
6697 NE comparisons for integers. */
6700 scc_comparison_operator (op, mode)
6702 enum machine_mode mode;
6704 enum rtx_code code = GET_CODE (op);
6705 enum machine_mode cc_mode;
6707 if (GET_MODE (op) != mode && mode != VOIDmode)
6710 if (GET_RTX_CLASS (code) != '<')
6713 cc_mode = GET_MODE (XEXP (op, 0));
6714 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6717 validate_condition_mode (code, cc_mode);
6719 if (code == NE && cc_mode != CCFPmode)
6726 trap_comparison_operator (op, mode)
6728 enum machine_mode mode;
6730 if (mode != VOIDmode && mode != GET_MODE (op))
6732 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6736 boolean_operator (op, mode)
6738 enum machine_mode mode ATTRIBUTE_UNUSED;
6740 enum rtx_code code = GET_CODE (op);
6741 return (code == AND || code == IOR || code == XOR);
6745 boolean_or_operator (op, mode)
6747 enum machine_mode mode ATTRIBUTE_UNUSED;
6749 enum rtx_code code = GET_CODE (op);
6750 return (code == IOR || code == XOR);
6754 min_max_operator (op, mode)
6756 enum machine_mode mode ATTRIBUTE_UNUSED;
6758 enum rtx_code code = GET_CODE (op);
6759 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6762 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6763 mask required to convert the result of a rotate insn into a shift
6764 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6767 includes_lshift_p (shiftop, andop)
6771 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6773 shift_mask <<= INTVAL (shiftop);
6775 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6778 /* Similar, but for right shift. */
6781 includes_rshift_p (shiftop, andop)
6785 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6787 shift_mask >>= INTVAL (shiftop);
6789 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6792 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6793 to perform a left shift. It must have exactly SHIFTOP least
6794 signifigant 0's, then one or more 1's, then zero or more 0's. */
6797 includes_rldic_lshift_p (shiftop, andop)
6801 if (GET_CODE (andop) == CONST_INT)
6803 HOST_WIDE_INT c, lsb, shift_mask;
6806 if (c == 0 || c == ~0)
6810 shift_mask <<= INTVAL (shiftop);
6812 /* Find the least signifigant one bit. */
6815 /* It must coincide with the LSB of the shift mask. */
6816 if (-lsb != shift_mask)
6819 /* Invert to look for the next transition (if any). */
6822 /* Remove the low group of ones (originally low group of zeros). */
6825 /* Again find the lsb, and check we have all 1's above. */
6829 else if (GET_CODE (andop) == CONST_DOUBLE
6830 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6832 HOST_WIDE_INT low, high, lsb;
6833 HOST_WIDE_INT shift_mask_low, shift_mask_high;
6835 low = CONST_DOUBLE_LOW (andop);
6836 if (HOST_BITS_PER_WIDE_INT < 64)
6837 high = CONST_DOUBLE_HIGH (andop);
6839 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
6840 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
6843 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6845 shift_mask_high = ~0;
6846 if (INTVAL (shiftop) > 32)
6847 shift_mask_high <<= INTVAL (shiftop) - 32;
6851 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
6858 return high == -lsb;
6861 shift_mask_low = ~0;
6862 shift_mask_low <<= INTVAL (shiftop);
6866 if (-lsb != shift_mask_low)
6869 if (HOST_BITS_PER_WIDE_INT < 64)
6874 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6877 return high == -lsb;
6881 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
6887 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6888 to perform a left shift. It must have SHIFTOP or more least
6889 signifigant 0's, with the remainder of the word 1's. */
6892 includes_rldicr_lshift_p (shiftop, andop)
6896 if (GET_CODE (andop) == CONST_INT)
6898 HOST_WIDE_INT c, lsb, shift_mask;
6901 shift_mask <<= INTVAL (shiftop);
6904 /* Find the least signifigant one bit. */
6907 /* It must be covered by the shift mask.
6908 This test also rejects c == 0. */
6909 if ((lsb & shift_mask) == 0)
6912 /* Check we have all 1's above the transition, and reject all 1's. */
6913 return c == -lsb && lsb != 1;
6915 else if (GET_CODE (andop) == CONST_DOUBLE
6916 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6918 HOST_WIDE_INT low, lsb, shift_mask_low;
6920 low = CONST_DOUBLE_LOW (andop);
6922 if (HOST_BITS_PER_WIDE_INT < 64)
6924 HOST_WIDE_INT high, shift_mask_high;
6926 high = CONST_DOUBLE_HIGH (andop);
6930 shift_mask_high = ~0;
6931 if (INTVAL (shiftop) > 32)
6932 shift_mask_high <<= INTVAL (shiftop) - 32;
6936 if ((lsb & shift_mask_high) == 0)
6939 return high == -lsb;
6945 shift_mask_low = ~0;
6946 shift_mask_low <<= INTVAL (shiftop);
6950 if ((lsb & shift_mask_low) == 0)
6953 return low == -lsb && lsb != 1;
6959 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
6960 for lfq and stfq insns.
6962 Note reg1 and reg2 *must* be hard registers. To be sure we will
6963 abort if we are passed pseudo registers. */
6966 registers_ok_for_quad_peep (reg1, reg2)
6969 /* We might have been passed a SUBREG. */
6970 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
6973 return (REGNO (reg1) == REGNO (reg2) - 1);
6976 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
6977 addr1 and addr2 must be in consecutive memory locations
6978 (addr2 == addr1 + 8). */
6981 addrs_ok_for_quad_peep (addr1, addr2)
6988 /* Extract an offset (if used) from the first addr. */
6989 if (GET_CODE (addr1) == PLUS)
6991 /* If not a REG, return zero. */
6992 if (GET_CODE (XEXP (addr1, 0)) != REG)
6996 reg1 = REGNO (XEXP (addr1, 0));
6997 /* The offset must be constant! */
6998 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7000 offset1 = INTVAL (XEXP (addr1, 1));
7003 else if (GET_CODE (addr1) != REG)
7007 reg1 = REGNO (addr1);
7008 /* This was a simple (mem (reg)) expression. Offset is 0. */
7012 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7013 if (GET_CODE (addr2) != PLUS)
7016 if (GET_CODE (XEXP (addr2, 0)) != REG
7017 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7020 if (reg1 != REGNO (XEXP (addr2, 0)))
7023 /* The offset for the second addr must be 8 more than the first addr. */
7024 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7027 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7032 /* Return the register class of a scratch register needed to copy IN into
7033 or out of a register in CLASS in MODE. If it can be done directly,
7034 NO_REGS is returned. */
7037 secondary_reload_class (class, mode, in)
7038 enum reg_class class;
7039 enum machine_mode mode ATTRIBUTE_UNUSED;
7044 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7046 /* We cannot copy a symbolic operand directly into anything
7047 other than BASE_REGS for TARGET_ELF. So indicate that a
7048 register from BASE_REGS is needed as an intermediate
7051 On Darwin, pic addresses require a load from memory, which
7052 needs a base register. */
7053 if (class != BASE_REGS
7054 && (GET_CODE (in) == SYMBOL_REF
7055 || GET_CODE (in) == HIGH
7056 || GET_CODE (in) == LABEL_REF
7057 || GET_CODE (in) == CONST))
7061 if (GET_CODE (in) == REG)
7064 if (regno >= FIRST_PSEUDO_REGISTER)
7066 regno = true_regnum (in);
7067 if (regno >= FIRST_PSEUDO_REGISTER)
7071 else if (GET_CODE (in) == SUBREG)
7073 regno = true_regnum (in);
7074 if (regno >= FIRST_PSEUDO_REGISTER)
7080 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7082 if (class == GENERAL_REGS || class == BASE_REGS
7083 || (regno >= 0 && INT_REGNO_P (regno)))
7086 /* Constants, memory, and FP registers can go into FP registers. */
7087 if ((regno == -1 || FP_REGNO_P (regno))
7088 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7091 /* Memory, and AltiVec registers can go into AltiVec registers. */
7092 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7093 && class == ALTIVEC_REGS)
7096 /* We can copy among the CR registers. */
7097 if ((class == CR_REGS || class == CR0_REGS)
7098 && regno >= 0 && CR_REGNO_P (regno))
7101 /* Otherwise, we need GENERAL_REGS. */
7102 return GENERAL_REGS;
7105 /* Given a comparison operation, return the bit number in CCR to test. We
7106 know this is a valid comparison.
7108 SCC_P is 1 if this is for an scc. That means that %D will have been
7109 used instead of %C, so the bits will be in different places.
7111 Return -1 if OP isn't a valid comparison for some reason. */
7118 enum rtx_code code = GET_CODE (op);
7119 enum machine_mode cc_mode;
7124 if (GET_RTX_CLASS (code) != '<')
7129 if (GET_CODE (reg) != REG
7130 || ! CR_REGNO_P (REGNO (reg)))
7133 cc_mode = GET_MODE (reg);
7134 cc_regnum = REGNO (reg);
7135 base_bit = 4 * (cc_regnum - CR0_REGNO);
7137 validate_condition_mode (code, cc_mode);
7142 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7143 return base_bit + 1;
7144 return scc_p ? base_bit + 3 : base_bit + 2;
7146 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7147 return base_bit + 1;
7148 return base_bit + 2;
7149 case GT: case GTU: case UNLE:
7150 return base_bit + 1;
7151 case LT: case LTU: case UNGE:
7153 case ORDERED: case UNORDERED:
7154 return base_bit + 3;
7157 /* If scc, we will have done a cror to put the bit in the
7158 unordered position. So test that bit. For integer, this is ! LT
7159 unless this is an scc insn. */
7160 return scc_p ? base_bit + 3 : base_bit;
7163 return scc_p ? base_bit + 3 : base_bit + 1;
7170 /* Return the GOT register. */
7173 rs6000_got_register (value)
7174 rtx value ATTRIBUTE_UNUSED;
7176 /* The second flow pass currently (June 1999) can't update
7177 regs_ever_live without disturbing other parts of the compiler, so
7178 update it here to make the prolog/epilogue code happy. */
7179 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7180 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7182 current_function_uses_pic_offset_table = 1;
7184 return pic_offset_table_rtx;
7187 /* Function to init struct machine_function.
7188 This will be called, via a pointer variable,
7189 from push_function_context. */
7191 static struct machine_function *
7192 rs6000_init_machine_status ()
7194 return ggc_alloc_cleared (sizeof (machine_function));
7197 /* These macros test for integers and extract the low-order bits. */
7199 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7200 && GET_MODE (X) == VOIDmode)
7202 #define INT_LOWPART(X) \
7203 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7210 unsigned long val = INT_LOWPART (op);
7212 /* If the high bit is zero, the value is the first 1 bit we find
7214 if ((val & 0x80000000) == 0)
7216 if ((val & 0xffffffff) == 0)
7220 while (((val <<= 1) & 0x80000000) == 0)
7225 /* If the high bit is set and the low bit is not, or the mask is all
7226 1's, the value is zero. */
7227 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7230 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7233 while (((val >>= 1) & 1) != 0)
7244 unsigned long val = INT_LOWPART (op);
7246 /* If the low bit is zero, the value is the first 1 bit we find from
7250 if ((val & 0xffffffff) == 0)
7254 while (((val >>= 1) & 1) == 0)
7260 /* If the low bit is set and the high bit is not, or the mask is all
7261 1's, the value is 31. */
7262 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7265 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7268 while (((val <<= 1) & 0x80000000) != 0)
7274 /* Print an operand. Recognize special options, documented below. */
7277 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7278 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7280 #define SMALL_DATA_RELOC "sda21"
7281 #define SMALL_DATA_REG 0
7285 print_operand (file, x, code)
7292 unsigned HOST_WIDE_INT uval;
7297 /* Write out an instruction after the call which may be replaced
7298 with glue code by the loader. This depends on the AIX version. */
7299 asm_fprintf (file, RS6000_CALL_GLUE);
7302 /* %a is output_address. */
7305 /* If X is a constant integer whose low-order 5 bits are zero,
7306 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7307 in the AIX assembler where "sri" with a zero shift count
7308 writes a trash instruction. */
7309 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7316 /* If constant, low-order 16 bits of constant, unsigned.
7317 Otherwise, write normally. */
7319 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7321 print_operand (file, x, 0);
7325 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7326 for 64-bit mask direction. */
7327 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7330 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7334 /* There used to be a comment for 'C' reading "This is an
7335 optional cror needed for certain floating-point
7336 comparisons. Otherwise write nothing." */
7338 /* Similar, except that this is for an scc, so we must be able to
7339 encode the test in a single bit that is one. We do the above
7340 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7341 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7342 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7344 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7346 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7348 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7351 else if (GET_CODE (x) == NE)
7353 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7355 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7356 base_bit + 2, base_bit + 2);
7358 else if (TARGET_SPE && TARGET_HARD_FLOAT
7359 && GET_CODE (x) == EQ
7360 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7362 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7364 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7365 base_bit + 1, base_bit + 1);
7370 /* X is a CR register. Print the number of the EQ bit of the CR */
7371 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7372 output_operand_lossage ("invalid %%E value");
7374 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7378 /* X is a CR register. Print the shift count needed to move it
7379 to the high-order four bits. */
7380 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7381 output_operand_lossage ("invalid %%f value");
7383 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7387 /* Similar, but print the count for the rotate in the opposite
7389 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7390 output_operand_lossage ("invalid %%F value");
7392 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7396 /* X is a constant integer. If it is negative, print "m",
7397 otherwise print "z". This is to make a aze or ame insn. */
7398 if (GET_CODE (x) != CONST_INT)
7399 output_operand_lossage ("invalid %%G value");
7400 else if (INTVAL (x) >= 0)
7407 /* If constant, output low-order five bits. Otherwise, write
7410 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7412 print_operand (file, x, 0);
7416 /* If constant, output low-order six bits. Otherwise, write
7419 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7421 print_operand (file, x, 0);
7425 /* Print `i' if this is a constant, else nothing. */
7431 /* Write the bit number in CCR for jump. */
7434 output_operand_lossage ("invalid %%j code");
7436 fprintf (file, "%d", i);
7440 /* Similar, but add one for shift count in rlinm for scc and pass
7441 scc flag to `ccr_bit'. */
7444 output_operand_lossage ("invalid %%J code");
7446 /* If we want bit 31, write a shift count of zero, not 32. */
7447 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7451 /* X must be a constant. Write the 1's complement of the
7454 output_operand_lossage ("invalid %%k value");
7456 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7460 /* X must be a symbolic constant on ELF. Write an
7461 expression suitable for an 'addi' that adds in the low 16
7463 if (GET_CODE (x) != CONST)
7465 print_operand_address (file, x);
7470 if (GET_CODE (XEXP (x, 0)) != PLUS
7471 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7472 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7473 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7474 output_operand_lossage ("invalid %%K value");
7475 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7477 /* For GNU as, there must be a non-alphanumeric character
7478 between 'l' and the number. The '-' is added by
7479 print_operand() already. */
7480 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7482 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7486 /* %l is output_asm_label. */
7489 /* Write second word of DImode or DFmode reference. Works on register
7490 or non-indexed memory only. */
7491 if (GET_CODE (x) == REG)
7492 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7493 else if (GET_CODE (x) == MEM)
7495 /* Handle possible auto-increment. Since it is pre-increment and
7496 we have already done it, we can just use an offset of word. */
7497 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7498 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7499 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7502 output_address (XEXP (adjust_address_nv (x, SImode,
7506 if (small_data_operand (x, GET_MODE (x)))
7507 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7508 reg_names[SMALL_DATA_REG]);
7513 /* MB value for a mask operand. */
7514 if (! mask_operand (x, SImode))
7515 output_operand_lossage ("invalid %%m value");
7517 fprintf (file, "%d", extract_MB (x));
7521 /* ME value for a mask operand. */
7522 if (! mask_operand (x, SImode))
7523 output_operand_lossage ("invalid %%M value");
7525 fprintf (file, "%d", extract_ME (x));
7528 /* %n outputs the negative of its operand. */
7531 /* Write the number of elements in the vector times 4. */
7532 if (GET_CODE (x) != PARALLEL)
7533 output_operand_lossage ("invalid %%N value");
7535 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7539 /* Similar, but subtract 1 first. */
7540 if (GET_CODE (x) != PARALLEL)
7541 output_operand_lossage ("invalid %%O value");
7543 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7547 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7549 || INT_LOWPART (x) < 0
7550 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7551 output_operand_lossage ("invalid %%p value");
7553 fprintf (file, "%d", i);
7557 /* The operand must be an indirect memory reference. The result
7558 is the register number. */
7559 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7560 || REGNO (XEXP (x, 0)) >= 32)
7561 output_operand_lossage ("invalid %%P value");
7563 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7567 /* This outputs the logical code corresponding to a boolean
7568 expression. The expression may have one or both operands
7569 negated (if one, only the first one). For condition register
7570 logical operations, it will also treat the negated
7571 CR codes as NOTs, but not handle NOTs of them. */
7573 const char *const *t = 0;
7575 enum rtx_code code = GET_CODE (x);
7576 static const char * const tbl[3][3] = {
7577 { "and", "andc", "nor" },
7578 { "or", "orc", "nand" },
7579 { "xor", "eqv", "xor" } };
7583 else if (code == IOR)
7585 else if (code == XOR)
7588 output_operand_lossage ("invalid %%q value");
7590 if (GET_CODE (XEXP (x, 0)) != NOT)
7594 if (GET_CODE (XEXP (x, 1)) == NOT)
7605 /* X is a CR register. Print the mask for `mtcrf'. */
7606 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7607 output_operand_lossage ("invalid %%R value");
7609 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7613 /* Low 5 bits of 32 - value */
7615 output_operand_lossage ("invalid %%s value");
7617 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7621 /* PowerPC64 mask position. All 0's is excluded.
7622 CONST_INT 32-bit mask is considered sign-extended so any
7623 transition must occur within the CONST_INT, not on the boundary. */
7624 if (! mask64_operand (x, DImode))
7625 output_operand_lossage ("invalid %%S value");
7627 uval = INT_LOWPART (x);
7629 if (uval & 1) /* Clear Left */
7631 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7634 else /* Clear Right */
7637 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7644 fprintf (file, "%d", i);
7648 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7649 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7652 /* Bit 3 is OV bit. */
7653 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7655 /* If we want bit 31, write a shift count of zero, not 32. */
7656 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7660 /* Print the symbolic name of a branch target register. */
7661 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7662 && REGNO (x) != COUNT_REGISTER_REGNUM))
7663 output_operand_lossage ("invalid %%T value");
7664 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7665 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7667 fputs ("ctr", file);
7671 /* High-order 16 bits of constant for use in unsigned operand. */
7673 output_operand_lossage ("invalid %%u value");
7675 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7676 (INT_LOWPART (x) >> 16) & 0xffff);
7680 /* High-order 16 bits of constant for use in signed operand. */
7682 output_operand_lossage ("invalid %%v value");
7684 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7685 (INT_LOWPART (x) >> 16) & 0xffff);
7689 /* Print `u' if this has an auto-increment or auto-decrement. */
7690 if (GET_CODE (x) == MEM
7691 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7692 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7697 /* Print the trap code for this operand. */
7698 switch (GET_CODE (x))
7701 fputs ("eq", file); /* 4 */
7704 fputs ("ne", file); /* 24 */
7707 fputs ("lt", file); /* 16 */
7710 fputs ("le", file); /* 20 */
7713 fputs ("gt", file); /* 8 */
7716 fputs ("ge", file); /* 12 */
7719 fputs ("llt", file); /* 2 */
7722 fputs ("lle", file); /* 6 */
7725 fputs ("lgt", file); /* 1 */
7728 fputs ("lge", file); /* 5 */
7736 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7739 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7740 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7742 print_operand (file, x, 0);
7746 /* MB value for a PowerPC64 rldic operand. */
7747 val = (GET_CODE (x) == CONST_INT
7748 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7753 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7754 if ((val <<= 1) < 0)
7757 #if HOST_BITS_PER_WIDE_INT == 32
7758 if (GET_CODE (x) == CONST_INT && i >= 0)
7759 i += 32; /* zero-extend high-part was all 0's */
7760 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7762 val = CONST_DOUBLE_LOW (x);
7769 for ( ; i < 64; i++)
7770 if ((val <<= 1) < 0)
7775 fprintf (file, "%d", i + 1);
7779 if (GET_CODE (x) == MEM
7780 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7785 /* Like 'L', for third word of TImode */
7786 if (GET_CODE (x) == REG)
7787 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7788 else if (GET_CODE (x) == MEM)
7790 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7791 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7792 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7794 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7795 if (small_data_operand (x, GET_MODE (x)))
7796 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7797 reg_names[SMALL_DATA_REG]);
7802 /* X is a SYMBOL_REF. Write out the name preceded by a
7803 period and without any trailing data in brackets. Used for function
7804 names. If we are configured for System V (or the embedded ABI) on
7805 the PowerPC, do not emit the period, since those systems do not use
7806 TOCs and the like. */
7807 if (GET_CODE (x) != SYMBOL_REF)
7810 if (XSTR (x, 0)[0] != '.')
7812 switch (DEFAULT_ABI)
7822 case ABI_AIX_NODESC:
7828 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
7830 assemble_name (file, XSTR (x, 0));
7835 /* Like 'L', for last word of TImode. */
7836 if (GET_CODE (x) == REG)
7837 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
7838 else if (GET_CODE (x) == MEM)
7840 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7841 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7842 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
7844 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
7845 if (small_data_operand (x, GET_MODE (x)))
7846 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7847 reg_names[SMALL_DATA_REG]);
7851 /* Print AltiVec or SPE memory operand. */
7856 if (GET_CODE (x) != MEM)
7864 if (GET_CODE (tmp) == REG)
7866 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
7869 /* Handle [reg+UIMM]. */
7870 else if (GET_CODE (tmp) == PLUS &&
7871 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
7875 if (GET_CODE (XEXP (tmp, 0)) != REG)
7878 x = INTVAL (XEXP (tmp, 1));
7879 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
7883 /* Fall through. Must be [reg+reg]. */
7885 if (GET_CODE (tmp) == REG)
7886 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
7887 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
7889 if (REGNO (XEXP (tmp, 0)) == 0)
7890 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
7891 reg_names[ REGNO (XEXP (tmp, 0)) ]);
7893 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
7894 reg_names[ REGNO (XEXP (tmp, 1)) ]);
7902 if (GET_CODE (x) == REG)
7903 fprintf (file, "%s", reg_names[REGNO (x)]);
7904 else if (GET_CODE (x) == MEM)
7906 /* We need to handle PRE_INC and PRE_DEC here, since we need to
7907 know the width from the mode. */
7908 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
7909 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
7910 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7911 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
7912 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
7913 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7915 output_address (XEXP (x, 0));
7918 output_addr_const (file, x);
7922 output_operand_lossage ("invalid %%xn code");
7926 /* Print the address of an operand. */
7929 print_operand_address (file, x)
7933 if (GET_CODE (x) == REG)
7934 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
7935 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
7936 || GET_CODE (x) == LABEL_REF)
7938 output_addr_const (file, x);
7939 if (small_data_operand (x, GET_MODE (x)))
7940 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7941 reg_names[SMALL_DATA_REG]);
7942 else if (TARGET_TOC)
7945 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
7947 if (REGNO (XEXP (x, 0)) == 0)
7948 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
7949 reg_names[ REGNO (XEXP (x, 0)) ]);
7951 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
7952 reg_names[ REGNO (XEXP (x, 1)) ]);
7954 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
7956 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
7957 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7960 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7961 && CONSTANT_P (XEXP (x, 1)))
7963 output_addr_const (file, XEXP (x, 1));
7964 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7968 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7969 && CONSTANT_P (XEXP (x, 1)))
7971 fprintf (file, "lo16(");
7972 output_addr_const (file, XEXP (x, 1));
7973 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7976 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
7978 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
7980 rtx contains_minus = XEXP (x, 1);
7984 /* Find the (minus (sym) (toc)) buried in X, and temporarily
7985 turn it into (sym) for output_addr_const. */
7986 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
7987 contains_minus = XEXP (contains_minus, 0);
7989 minus = XEXP (contains_minus, 0);
7990 symref = XEXP (minus, 0);
7991 XEXP (contains_minus, 0) = symref;
7996 name = XSTR (symref, 0);
7997 newname = alloca (strlen (name) + sizeof ("@toc"));
7998 strcpy (newname, name);
7999 strcat (newname, "@toc");
8000 XSTR (symref, 0) = newname;
8002 output_addr_const (file, XEXP (x, 1));
8004 XSTR (symref, 0) = name;
8005 XEXP (contains_minus, 0) = minus;
8008 output_addr_const (file, XEXP (x, 1));
8010 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8016 /* Target hook for assembling integer objects. The powerpc version has
8017 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8018 is defined. It also needs to handle DI-mode objects on 64-bit
8022 rs6000_assemble_integer (x, size, aligned_p)
8027 #ifdef RELOCATABLE_NEEDS_FIXUP
8028 /* Special handling for SI values. */
8029 if (size == 4 && aligned_p)
8031 extern int in_toc_section PARAMS ((void));
8032 static int recurse = 0;
8034 /* For -mrelocatable, we mark all addresses that need to be fixed up
8035 in the .fixup section. */
8036 if (TARGET_RELOCATABLE
8037 && !in_toc_section ()
8038 && !in_text_section ()
8040 && GET_CODE (x) != CONST_INT
8041 && GET_CODE (x) != CONST_DOUBLE
8047 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8049 ASM_OUTPUT_LABEL (asm_out_file, buf);
8050 fprintf (asm_out_file, "\t.long\t(");
8051 output_addr_const (asm_out_file, x);
8052 fprintf (asm_out_file, ")@fixup\n");
8053 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8054 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8055 fprintf (asm_out_file, "\t.long\t");
8056 assemble_name (asm_out_file, buf);
8057 fprintf (asm_out_file, "\n\t.previous\n");
8061 /* Remove initial .'s to turn a -mcall-aixdesc function
8062 address into the address of the descriptor, not the function
8064 else if (GET_CODE (x) == SYMBOL_REF
8065 && XSTR (x, 0)[0] == '.'
8066 && DEFAULT_ABI == ABI_AIX)
8068 const char *name = XSTR (x, 0);
8069 while (*name == '.')
8072 fprintf (asm_out_file, "\t.long\t%s\n", name);
8076 #endif /* RELOCATABLE_NEEDS_FIXUP */
8077 return default_assemble_integer (x, size, aligned_p);
8081 rs6000_reverse_condition (mode, code)
8082 enum machine_mode mode;
8085 /* Reversal of FP compares takes care -- an ordered compare
8086 becomes an unordered compare and vice versa. */
8087 if (mode == CCFPmode)
8088 return reverse_condition_maybe_unordered (code);
8090 return reverse_condition (code);
8093 /* Generate a compare for CODE. Return a brand-new rtx that
8094 represents the result of the compare. */
8097 rs6000_generate_compare (code)
8100 enum machine_mode comp_mode;
8103 if (rs6000_compare_fp_p)
8104 comp_mode = CCFPmode;
8105 else if (code == GTU || code == LTU
8106 || code == GEU || code == LEU)
8107 comp_mode = CCUNSmode;
8111 /* First, the compare. */
8112 compare_result = gen_reg_rtx (comp_mode);
8114 /* SPE FP compare instructions on the GPRs. Yuck! */
8115 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8117 rtx cmp, or1, or2, or_result, compare_result2;
8125 cmp = flag_unsafe_math_optimizations
8126 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8128 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8129 rs6000_compare_op1);
8137 cmp = flag_unsafe_math_optimizations
8138 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8140 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8141 rs6000_compare_op1);
8149 cmp = flag_unsafe_math_optimizations
8150 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8152 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8153 rs6000_compare_op1);
8159 /* Synthesize LE and GE from LT/GT || EQ. */
8160 if (code == LE || code == GE || code == LEU || code == GEU)
8162 /* Synthesize GE/LE frome GT/LT || EQ. */
8168 case LE: code = LT; break;
8169 case GE: code = GT; break;
8170 case LEU: code = LT; break;
8171 case GEU: code = GT; break;
8175 or1 = gen_reg_rtx (SImode);
8176 or2 = gen_reg_rtx (SImode);
8177 or_result = gen_reg_rtx (CCEQmode);
8178 compare_result2 = gen_reg_rtx (CCFPmode);
8181 cmp = flag_unsafe_math_optimizations
8182 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8184 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8185 rs6000_compare_op1);
8188 /* The MC8540 FP compare instructions set the CR bits
8189 differently than other PPC compare instructions. For
8190 that matter, there is no generic test instruction, but a
8191 testgt, testlt, and testeq. For a true condition, bit 2
8192 is set (x1xx) in the CR. Following the traditional CR
8198 ... bit 2 would be a GT CR alias, so later on we
8199 look in the GT bits for the branch instructins.
8200 However, we must be careful to emit correct RTL in
8201 the meantime, so optimizations don't get confused. */
8203 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8204 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8206 /* OR them together. */
8207 cmp = gen_rtx_SET (VOIDmode, or_result,
8208 gen_rtx_COMPARE (CCEQmode,
8209 gen_rtx_IOR (SImode, or1, or2),
8211 compare_result = or_result;
8216 /* We only care about 1 bit (x1xx), so map everything to NE to
8217 maintain rtl sanity. We'll get to the right bit (x1xx) at
8218 code output time. */
8219 if (code == NE || code == LTGT)
8220 /* Do the inverse here because we have no cmpne
8221 instruction. We use the cmpeq instruction and expect
8222 to get a 0 instead. */
8231 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8232 gen_rtx_COMPARE (comp_mode,
8234 rs6000_compare_op1)));
8236 /* Some kinds of FP comparisons need an OR operation;
8237 except for flag_unsafe_math_optimizations we don't bother. */
8238 if (rs6000_compare_fp_p
8239 && ! flag_unsafe_math_optimizations
8240 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8241 && (code == LE || code == GE
8242 || code == UNEQ || code == LTGT
8243 || code == UNGT || code == UNLT))
8245 enum rtx_code or1, or2;
8246 rtx or1_rtx, or2_rtx, compare2_rtx;
8247 rtx or_result = gen_reg_rtx (CCEQmode);
8251 case LE: or1 = LT; or2 = EQ; break;
8252 case GE: or1 = GT; or2 = EQ; break;
8253 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8254 case LTGT: or1 = LT; or2 = GT; break;
8255 case UNGT: or1 = UNORDERED; or2 = GT; break;
8256 case UNLT: or1 = UNORDERED; or2 = LT; break;
8259 validate_condition_mode (or1, comp_mode);
8260 validate_condition_mode (or2, comp_mode);
8261 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8262 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8263 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8264 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8266 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8268 compare_result = or_result;
8272 validate_condition_mode (code, GET_MODE (compare_result));
8274 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8278 /* Emit the RTL for an sCOND pattern. */
8281 rs6000_emit_sCOND (code, result)
8286 enum machine_mode op_mode;
8288 condition_rtx = rs6000_generate_compare (code);
8290 op_mode = GET_MODE (rs6000_compare_op0);
8291 if (op_mode == VOIDmode)
8292 op_mode = GET_MODE (rs6000_compare_op1);
8294 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8296 PUT_MODE (condition_rtx, DImode);
8297 convert_move (result, condition_rtx, 0);
8301 PUT_MODE (condition_rtx, SImode);
8302 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8306 /* Emit a branch of kind CODE to location LOC. */
8309 rs6000_emit_cbranch (code, loc)
8313 rtx condition_rtx, loc_ref;
8315 condition_rtx = rs6000_generate_compare (code);
8316 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8317 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8318 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8322 /* Return the string to output a conditional branch to LABEL, which is
8323 the operand number of the label, or -1 if the branch is really a
8326 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8327 condition code register and its mode specifies what kind of
8330 REVERSED is non-zero if we should reverse the sense of the comparison.
8332 INSN is the insn. */
8335 output_cbranch (op, label, reversed, insn)
8341 static char string[64];
8342 enum rtx_code code = GET_CODE (op);
8343 rtx cc_reg = XEXP (op, 0);
8344 enum machine_mode mode = GET_MODE (cc_reg);
8345 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8346 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8347 int really_reversed = reversed ^ need_longbranch;
8353 validate_condition_mode (code, mode);
8355 /* Work out which way this really branches. We could use
8356 reverse_condition_maybe_unordered here always but this
8357 makes the resulting assembler clearer. */
8358 if (really_reversed)
8359 code = rs6000_reverse_condition (mode, code);
8361 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8363 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8366 /* Opposite of GT. */
8368 else if (code == NE)
8376 /* Not all of these are actually distinct opcodes, but
8377 we distinguish them for clarity of the resulting assembler. */
8379 ccode = "ne"; break;
8381 ccode = "eq"; break;
8383 ccode = "ge"; break;
8384 case GT: case GTU: case UNGT:
8385 ccode = "gt"; break;
8387 ccode = "le"; break;
8388 case LT: case LTU: case UNLT:
8389 ccode = "lt"; break;
8390 case UNORDERED: ccode = "un"; break;
8391 case ORDERED: ccode = "nu"; break;
8392 case UNGE: ccode = "nl"; break;
8393 case UNLE: ccode = "ng"; break;
8398 /* Maybe we have a guess as to how likely the branch is.
8399 The old mnemonics don't have a way to specify this information. */
8400 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8401 if (note != NULL_RTX)
8403 /* PROB is the difference from 50%. */
8404 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8406 /* For branches that are very close to 50%, assume not-taken. */
8407 if (abs (prob) > REG_BR_PROB_BASE / 20
8408 && ((prob > 0) ^ need_longbranch))
8417 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8419 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8421 /* We need to escape any '%' characters in the reg_names string.
8422 Assume they'd only be the first character... */
8423 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8425 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8429 /* If the branch distance was too far, we may have to use an
8430 unconditional branch to go the distance. */
8431 if (need_longbranch)
8432 s += sprintf (s, ",$+8\n\tb %s", label);
8434 s += sprintf (s, ",%s", label);
8440 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8441 operands of the last comparison is nonzero/true, FALSE_COND if it
8442 is zero/false. Return 0 if the hardware has no such operation. */
8445 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8451 enum rtx_code code = GET_CODE (op);
8452 rtx op0 = rs6000_compare_op0;
8453 rtx op1 = rs6000_compare_op1;
8455 enum machine_mode compare_mode = GET_MODE (op0);
8456 enum machine_mode result_mode = GET_MODE (dest);
8459 /* These modes should always match. */
8460 if (GET_MODE (op1) != compare_mode
8461 /* In the isel case however, we can use a compare immediate, so
8462 op1 may be a small constant. */
8463 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8465 if (GET_MODE (true_cond) != result_mode)
8467 if (GET_MODE (false_cond) != result_mode)
8470 /* First, work out if the hardware can do this at all, or
8471 if it's too slow... */
8472 if (! rs6000_compare_fp_p)
8475 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8479 /* Eliminate half of the comparisons by switching operands, this
8480 makes the remaining code simpler. */
8481 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8482 || code == LTGT || code == LT)
8484 code = reverse_condition_maybe_unordered (code);
8486 true_cond = false_cond;
8490 /* UNEQ and LTGT take four instructions for a comparison with zero,
8491 it'll probably be faster to use a branch here too. */
8495 if (GET_CODE (op1) == CONST_DOUBLE)
8496 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8498 /* We're going to try to implement comparions by performing
8499 a subtract, then comparing against zero. Unfortunately,
8500 Inf - Inf is NaN which is not zero, and so if we don't
8501 know that the the operand is finite and the comparison
8502 would treat EQ different to UNORDERED, we can't do it. */
8503 if (! flag_unsafe_math_optimizations
8504 && code != GT && code != UNGE
8505 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
8506 /* Constructs of the form (a OP b ? a : b) are safe. */
8507 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8508 || (! rtx_equal_p (op0, true_cond)
8509 && ! rtx_equal_p (op1, true_cond))))
8511 /* At this point we know we can use fsel. */
8513 /* Reduce the comparison to a comparison against zero. */
8514 temp = gen_reg_rtx (compare_mode);
8515 emit_insn (gen_rtx_SET (VOIDmode, temp,
8516 gen_rtx_MINUS (compare_mode, op0, op1)));
8518 op1 = CONST0_RTX (compare_mode);
8520 /* If we don't care about NaNs we can reduce some of the comparisons
8521 down to faster ones. */
8522 if (flag_unsafe_math_optimizations)
8528 true_cond = false_cond;
8541 /* Now, reduce everything down to a GE. */
8548 temp = gen_reg_rtx (compare_mode);
8549 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8554 temp = gen_reg_rtx (compare_mode);
8555 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8560 temp = gen_reg_rtx (compare_mode);
8561 emit_insn (gen_rtx_SET (VOIDmode, temp,
8562 gen_rtx_NEG (compare_mode,
8563 gen_rtx_ABS (compare_mode, op0))));
8568 temp = gen_reg_rtx (result_mode);
8569 emit_insn (gen_rtx_SET (VOIDmode, temp,
8570 gen_rtx_IF_THEN_ELSE (result_mode,
8571 gen_rtx_GE (VOIDmode,
8573 true_cond, false_cond)));
8575 true_cond = false_cond;
8577 temp = gen_reg_rtx (compare_mode);
8578 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8583 temp = gen_reg_rtx (result_mode);
8584 emit_insn (gen_rtx_SET (VOIDmode, temp,
8585 gen_rtx_IF_THEN_ELSE (result_mode,
8586 gen_rtx_GE (VOIDmode,
8588 true_cond, false_cond)));
8590 false_cond = true_cond;
8592 temp = gen_reg_rtx (compare_mode);
8593 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8601 emit_insn (gen_rtx_SET (VOIDmode, dest,
8602 gen_rtx_IF_THEN_ELSE (result_mode,
8603 gen_rtx_GE (VOIDmode,
8605 true_cond, false_cond)));
8609 /* Same as above, but for ints (isel). */
8612 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8618 rtx condition_rtx, cr;
8620 /* All isel implementations thus far are 32-bits. */
8621 if (GET_MODE (rs6000_compare_op0) != SImode)
8624 /* We still have to do the compare, because isel doesn't do a
8625 compare, it just looks at the CRx bits set by a previous compare
8627 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8628 cr = XEXP (condition_rtx, 0);
8630 if (GET_MODE (cr) == CCmode)
8631 emit_insn (gen_isel_signed (dest, condition_rtx,
8632 true_cond, false_cond, cr));
8634 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8635 true_cond, false_cond, cr));
8641 output_isel (operands)
8646 code = GET_CODE (operands[1]);
8647 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8649 PUT_CODE (operands[1], reverse_condition (code));
8650 return "isel %0,%3,%2,%j1";
8653 return "isel %0,%2,%3,%j1";
8657 rs6000_emit_minmax (dest, code, op0, op1)
8663 enum machine_mode mode = GET_MODE (op0);
8667 if (code == SMAX || code == SMIN)
8672 if (code == SMAX || code == UMAX)
8673 target = emit_conditional_move (dest, c, op0, op1, mode,
8676 target = emit_conditional_move (dest, c, op0, op1, mode,
8678 if (target == NULL_RTX)
8681 emit_move_insn (dest, target);
8684 /* This page contains routines that are used to determine what the
8685 function prologue and epilogue code will do and write them out. */
8687 /* Return the first fixed-point register that is required to be
8688 saved. 32 if none. */
8691 first_reg_to_save ()
8695 /* Find lowest numbered live register. */
8696 for (first_reg = 13; first_reg <= 31; first_reg++)
8697 if (regs_ever_live[first_reg]
8698 && (! call_used_regs[first_reg]
8699 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8700 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8701 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8705 if (flag_pic && current_function_uses_pic_offset_table &&
8706 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
8707 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8713 /* Similar, for FP regs. */
8716 first_fp_reg_to_save ()
8720 /* Find lowest numbered live register. */
8721 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8722 if (regs_ever_live[first_reg])
8728 /* Similar, for AltiVec regs. */
8731 first_altivec_reg_to_save ()
8735 /* Stack frame remains as is unless we are in AltiVec ABI. */
8736 if (! TARGET_ALTIVEC_ABI)
8737 return LAST_ALTIVEC_REGNO + 1;
8739 /* Find lowest numbered live register. */
8740 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8741 if (regs_ever_live[i])
8747 /* Return a 32-bit mask of the AltiVec registers we need to set in
8748 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8749 the 32-bit word is 0. */
8752 compute_vrsave_mask ()
8754 unsigned int i, mask = 0;
8756 /* First, find out if we use _any_ altivec registers. */
8757 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8758 if (regs_ever_live[i])
8759 mask |= ALTIVEC_REG_BIT (i);
8764 /* Next, add all registers that are call-clobbered. We do this
8765 because post-reload register optimizers such as regrename_optimize
8766 may choose to use them. They never change the register class
8767 chosen by reload, so cannot create new uses of altivec registers
8768 if there were none before, so the early exit above is safe. */
8769 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8770 altivec registers not saved in the mask, which might well make the
8771 adjustments below more effective in eliding the save/restore of
8772 VRSAVE in small functions. */
8773 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8774 if (call_used_regs[i])
8775 mask |= ALTIVEC_REG_BIT (i);
8777 /* Next, remove the argument registers from the set. These must
8778 be in the VRSAVE mask set by the caller, so we don't need to add
8779 them in again. More importantly, the mask we compute here is
8780 used to generate CLOBBERs in the set_vrsave insn, and we do not
8781 wish the argument registers to die. */
8782 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
8783 mask &= ~ALTIVEC_REG_BIT (i);
8785 /* Similarly, remove the return value from the set. */
8788 diddle_return_value (is_altivec_return_reg, &yes);
8790 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
8797 is_altivec_return_reg (reg, xyes)
8801 bool *yes = (bool *) xyes;
8802 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
8807 /* Calculate the stack information for the current function. This is
8808 complicated by having two separate calling sequences, the AIX calling
8809 sequence and the V.4 calling sequence.
8811 AIX (and Darwin/Mac OS X) stack frames look like:
8813 SP----> +---------------------------------------+
8814 | back chain to caller | 0 0
8815 +---------------------------------------+
8816 | saved CR | 4 8 (8-11)
8817 +---------------------------------------+
8819 +---------------------------------------+
8820 | reserved for compilers | 12 24
8821 +---------------------------------------+
8822 | reserved for binders | 16 32
8823 +---------------------------------------+
8824 | saved TOC pointer | 20 40
8825 +---------------------------------------+
8826 | Parameter save area (P) | 24 48
8827 +---------------------------------------+
8828 | Alloca space (A) | 24+P etc.
8829 +---------------------------------------+
8830 | Local variable space (L) | 24+P+A
8831 +---------------------------------------+
8832 | Float/int conversion temporary (X) | 24+P+A+L
8833 +---------------------------------------+
8834 | Save area for AltiVec registers (W) | 24+P+A+L+X
8835 +---------------------------------------+
8836 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8837 +---------------------------------------+
8838 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8839 +---------------------------------------+
8840 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8841 +---------------------------------------+
8842 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8843 +---------------------------------------+
8844 old SP->| back chain to caller's caller |
8845 +---------------------------------------+
8847 The required alignment for AIX configurations is two words (i.e., 8
8851 V.4 stack frames look like:
8853 SP----> +---------------------------------------+
8854 | back chain to caller | 0
8855 +---------------------------------------+
8856 | caller's saved LR | 4
8857 +---------------------------------------+
8858 | Parameter save area (P) | 8
8859 +---------------------------------------+
8860 | Alloca space (A) | 8+P
8861 +---------------------------------------+
8862 | Varargs save area (V) | 8+P+A
8863 +---------------------------------------+
8864 | Local variable space (L) | 8+P+A+V
8865 +---------------------------------------+
8866 | Float/int conversion temporary (X) | 8+P+A+V+L
8867 +---------------------------------------+
8868 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
8869 +---------------------------------------+
8870 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
8871 +---------------------------------------+
8872 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
8873 +---------------------------------------+
8874 | SPE: area for 64-bit GP registers |
8875 +---------------------------------------+
8876 | SPE alignment padding |
8877 +---------------------------------------+
8878 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
8879 +---------------------------------------+
8880 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
8881 +---------------------------------------+
8882 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
8883 +---------------------------------------+
8884 old SP->| back chain to caller's caller |
8885 +---------------------------------------+
8887 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
8888 given. (But note below and in sysv4.h that we require only 8 and
8889 may round up the size of our stack frame anyways. The historical
8890 reason is early versions of powerpc-linux which didn't properly
8891 align the stack at program startup. A happy side-effect is that
8892 -mno-eabi libraries can be used with -meabi programs.)
8894 The EABI configuration defaults to the V.4 layout, unless
8895 -mcall-aix is used, in which case the AIX layout is used. However,
8896 the stack alignment requirements may differ. If -mno-eabi is not
8897 given, the required stack alignment is 8 bytes; if -mno-eabi is
8898 given, the required alignment is 16 bytes. (But see V.4 comment
8901 #ifndef ABI_STACK_BOUNDARY
8902 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
8906 rs6000_stack_info ()
8908 static rs6000_stack_t info, zero_info;
8909 rs6000_stack_t *info_ptr = &info;
8910 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8911 enum rs6000_abi abi;
8915 /* Zero all fields portably. */
8918 /* Select which calling sequence. */
8919 info_ptr->abi = abi = DEFAULT_ABI;
8921 /* Calculate which registers need to be saved & save area size. */
8922 info_ptr->first_gp_reg_save = first_reg_to_save ();
8923 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8924 even if it currently looks like we won't. */
8925 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
8926 || (flag_pic == 1 && abi == ABI_V4)
8927 || (flag_pic && abi == ABI_DARWIN))
8928 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8929 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
8931 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
8933 /* For the SPE, we have an additional upper 32-bits on each GPR.
8934 Ideally we should save the entire 64-bits only when the upper
8935 half is used in SIMD instructions. Since we only record
8936 registers live (not the size they are used in), this proves
8937 difficult because we'd have to traverse the instruction chain at
8938 the right time, taking reload into account. This is a real pain,
8939 so we opt to save the GPRs in 64-bits always. Anyone overly
8940 concerned with frame size can fix this. ;-).
8942 So... since we save all GPRs (except the SP) in 64-bits, the
8943 traditional GP save area will be empty. */
8945 info_ptr->gp_size = 0;
8947 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
8948 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
8950 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
8951 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
8952 - info_ptr->first_altivec_reg_save);
8954 /* Does this function call anything? */
8955 info_ptr->calls_p = (! current_function_is_leaf
8956 || cfun->machine->ra_needs_full_frame);
8958 /* Determine if we need to save the link register. */
8959 if (rs6000_ra_ever_killed ()
8960 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
8961 #ifdef TARGET_RELOCATABLE
8962 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
8964 || (info_ptr->first_fp_reg_save != 64
8965 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
8966 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
8967 || (abi == ABI_V4 && current_function_calls_alloca)
8968 || (DEFAULT_ABI == ABI_DARWIN
8970 && current_function_uses_pic_offset_table)
8971 || info_ptr->calls_p)
8973 info_ptr->lr_save_p = 1;
8974 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
8977 /* Determine if we need to save the condition code registers. */
8978 if (regs_ever_live[CR2_REGNO]
8979 || regs_ever_live[CR3_REGNO]
8980 || regs_ever_live[CR4_REGNO])
8982 info_ptr->cr_save_p = 1;
8984 info_ptr->cr_size = reg_size;
8987 /* If the current function calls __builtin_eh_return, then we need
8988 to allocate stack space for registers that will hold data for
8989 the exception handler. */
8990 if (current_function_calls_eh_return)
8993 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
8996 /* SPE saves EH registers in 64-bits. */
8997 ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9002 /* Determine various sizes. */
9003 info_ptr->reg_size = reg_size;
9004 info_ptr->fixed_size = RS6000_SAVE_AREA;
9005 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9006 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9007 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9011 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9013 info_ptr->spe_gp_size = 0;
9015 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9017 info_ptr->vrsave_mask = compute_vrsave_mask ();
9018 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9022 info_ptr->vrsave_mask = 0;
9023 info_ptr->vrsave_size = 0;
9026 /* Calculate the offsets. */
9034 case ABI_AIX_NODESC:
9036 info_ptr->fp_save_offset = - info_ptr->fp_size;
9037 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9039 if (TARGET_ALTIVEC_ABI)
9041 info_ptr->vrsave_save_offset
9042 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9044 /* Align stack so vector save area is on a quadword boundary. */
9045 if (info_ptr->altivec_size != 0)
9046 info_ptr->altivec_padding_size
9047 = 16 - (-info_ptr->vrsave_save_offset % 16);
9049 info_ptr->altivec_padding_size = 0;
9051 info_ptr->altivec_save_offset
9052 = info_ptr->vrsave_save_offset
9053 - info_ptr->altivec_padding_size
9054 - info_ptr->altivec_size;
9056 /* Adjust for AltiVec case. */
9057 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9060 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9061 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9062 info_ptr->lr_save_offset = 2*reg_size;
9066 info_ptr->fp_save_offset = - info_ptr->fp_size;
9067 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9068 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9072 /* Align stack so SPE GPR save area is aligned on a
9073 double-word boundary. */
9074 if (info_ptr->spe_gp_size != 0)
9075 info_ptr->spe_padding_size
9076 = 8 - (-info_ptr->cr_save_offset % 8);
9078 info_ptr->spe_padding_size = 0;
9080 info_ptr->spe_gp_save_offset
9081 = info_ptr->cr_save_offset
9082 - info_ptr->spe_padding_size
9083 - info_ptr->spe_gp_size;
9085 /* Adjust for SPE case. */
9086 info_ptr->toc_save_offset
9087 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9089 else if (TARGET_ALTIVEC_ABI)
9091 info_ptr->vrsave_save_offset
9092 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9094 /* Align stack so vector save area is on a quadword boundary. */
9095 if (info_ptr->altivec_size != 0)
9096 info_ptr->altivec_padding_size
9097 = 16 - (-info_ptr->vrsave_save_offset % 16);
9099 info_ptr->altivec_padding_size = 0;
9101 info_ptr->altivec_save_offset
9102 = info_ptr->vrsave_save_offset
9103 - info_ptr->altivec_padding_size
9104 - info_ptr->altivec_size;
9106 /* Adjust for AltiVec case. */
9107 info_ptr->toc_save_offset
9108 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9111 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9112 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9113 info_ptr->lr_save_offset = reg_size;
9117 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9119 + info_ptr->altivec_size
9120 + info_ptr->altivec_padding_size
9121 + info_ptr->vrsave_size
9122 + info_ptr->spe_gp_size
9123 + info_ptr->spe_padding_size
9127 + info_ptr->vrsave_size
9128 + info_ptr->toc_size,
9129 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9132 total_raw_size = (info_ptr->vars_size
9133 + info_ptr->parm_size
9134 + info_ptr->save_size
9135 + info_ptr->varargs_size
9136 + info_ptr->fixed_size);
9138 info_ptr->total_size =
9139 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9141 /* Determine if we need to allocate any stack frame:
9143 For AIX we need to push the stack if a frame pointer is needed
9144 (because the stack might be dynamically adjusted), if we are
9145 debugging, if we make calls, or if the sum of fp_save, gp_save,
9146 and local variables are more than the space needed to save all
9147 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9148 + 18*8 = 288 (GPR13 reserved).
9150 For V.4 we don't have the stack cushion that AIX uses, but assume
9151 that the debugger can handle stackless frames. */
9153 if (info_ptr->calls_p)
9154 info_ptr->push_p = 1;
9156 else if (abi == ABI_V4)
9157 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9160 info_ptr->push_p = (frame_pointer_needed
9161 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9162 || ((total_raw_size - info_ptr->fixed_size)
9163 > (TARGET_32BIT ? 220 : 288)));
9165 /* Zero offsets if we're not saving those registers. */
9166 if (info_ptr->fp_size == 0)
9167 info_ptr->fp_save_offset = 0;
9169 if (info_ptr->gp_size == 0)
9170 info_ptr->gp_save_offset = 0;
9172 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9173 info_ptr->altivec_save_offset = 0;
9175 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9176 info_ptr->vrsave_save_offset = 0;
9178 if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9179 info_ptr->spe_gp_save_offset = 0;
9181 if (! info_ptr->lr_save_p)
9182 info_ptr->lr_save_offset = 0;
9184 if (! info_ptr->cr_save_p)
9185 info_ptr->cr_save_offset = 0;
9187 if (! info_ptr->toc_save_p)
9188 info_ptr->toc_save_offset = 0;
9194 debug_stack_info (info)
9195 rs6000_stack_t *info;
9197 const char *abi_string;
9200 info = rs6000_stack_info ();
9202 fprintf (stderr, "\nStack information for function %s:\n",
9203 ((current_function_decl && DECL_NAME (current_function_decl))
9204 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9209 default: abi_string = "Unknown"; break;
9210 case ABI_NONE: abi_string = "NONE"; break;
9212 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9213 case ABI_DARWIN: abi_string = "Darwin"; break;
9214 case ABI_V4: abi_string = "V.4"; break;
9217 fprintf (stderr, "\tABI = %5s\n", abi_string);
9219 if (TARGET_ALTIVEC_ABI)
9220 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9223 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9225 if (info->first_gp_reg_save != 32)
9226 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9228 if (info->first_fp_reg_save != 64)
9229 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9231 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9232 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9233 info->first_altivec_reg_save);
9235 if (info->lr_save_p)
9236 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9238 if (info->cr_save_p)
9239 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9241 if (info->toc_save_p)
9242 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9244 if (info->vrsave_mask)
9245 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9248 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9251 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9253 if (info->gp_save_offset)
9254 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9256 if (info->fp_save_offset)
9257 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9259 if (info->altivec_save_offset)
9260 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9261 info->altivec_save_offset);
9263 if (info->spe_gp_save_offset)
9264 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9265 info->spe_gp_save_offset);
9267 if (info->vrsave_save_offset)
9268 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9269 info->vrsave_save_offset);
9271 if (info->lr_save_offset)
9272 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9274 if (info->cr_save_offset)
9275 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9277 if (info->toc_save_offset)
9278 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9280 if (info->varargs_save_offset)
9281 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9283 if (info->total_size)
9284 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9286 if (info->varargs_size)
9287 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9289 if (info->vars_size)
9290 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9292 if (info->parm_size)
9293 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9295 if (info->fixed_size)
9296 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9299 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9301 if (info->spe_gp_size)
9302 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9305 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9307 if (info->altivec_size)
9308 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9310 if (info->vrsave_size)
9311 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9313 if (info->altivec_padding_size)
9314 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9315 info->altivec_padding_size);
9317 if (info->spe_padding_size)
9318 fprintf (stderr, "\tspe_padding_size = %5d\n",
9319 info->spe_padding_size);
9322 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9325 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9328 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9330 if (info->save_size)
9331 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9333 if (info->reg_size != 4)
9334 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9336 fprintf (stderr, "\n");
9340 rs6000_return_addr (count, frame)
9344 /* Currently we don't optimize very well between prolog and body
9345 code and for PIC code the code can be actually quite bad, so
9346 don't try to be too clever here. */
9347 if (count != 0 || flag_pic != 0)
9349 cfun->machine->ra_needs_full_frame = 1;
9356 plus_constant (copy_to_reg
9357 (gen_rtx_MEM (Pmode,
9358 memory_address (Pmode, frame))),
9359 RETURN_ADDRESS_OFFSET)));
9362 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9366 rs6000_ra_ever_killed ()
9370 #ifdef ASM_OUTPUT_MI_THUNK
9371 if (current_function_is_thunk)
9374 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
9375 || cfun->machine->ra_needs_full_frame)
9376 return regs_ever_live[LINK_REGISTER_REGNUM];
9378 push_topmost_sequence ();
9380 pop_topmost_sequence ();
9382 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9386 /* Add a REG_MAYBE_DEAD note to the insn. */
9388 rs6000_maybe_dead (insn)
9391 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9396 /* Emit instructions needed to load the TOC register.
9397 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9398 a constant pool; or for SVR4 -fpic. */
9401 rs6000_emit_load_toc_table (fromprolog)
9405 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9407 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
9409 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9411 rtx temp = (fromprolog
9412 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9413 : gen_reg_rtx (Pmode));
9414 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
9415 rs6000_maybe_dead (emit_move_insn (dest, temp));
9417 else if (flag_pic == 2)
9420 rtx tempLR = (fromprolog
9421 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9422 : gen_reg_rtx (Pmode));
9423 rtx temp0 = (fromprolog
9424 ? gen_rtx_REG (Pmode, 0)
9425 : gen_reg_rtx (Pmode));
9428 /* possibly create the toc section */
9429 if (! toc_initialized)
9432 function_section (current_function_decl);
9439 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9440 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9442 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9443 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9445 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9447 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9448 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9455 static int reload_toc_labelno = 0;
9457 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9459 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9460 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9462 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
9465 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9466 rs6000_maybe_dead (emit_move_insn (temp0,
9467 gen_rtx_MEM (Pmode, dest)));
9469 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
9471 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
9473 /* This is for AIX code running in non-PIC ELF. */
9476 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9477 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9479 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
9480 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
9488 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
9490 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
9495 get_TOC_alias_set ()
9497 static int set = -1;
9499 set = new_alias_set ();
9503 /* This retuns nonzero if the current function uses the TOC. This is
9504 determined by the presence of (unspec ... 7), which is generated by
9505 the various load_toc_* patterns. */
9512 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9515 rtx pat = PATTERN (insn);
9518 if (GET_CODE (pat) == PARALLEL)
9519 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9520 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9521 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9528 create_TOC_reference (symbol)
9531 return gen_rtx_PLUS (Pmode,
9532 gen_rtx_REG (Pmode, TOC_REGISTER),
9533 gen_rtx_CONST (Pmode,
9534 gen_rtx_MINUS (Pmode, symbol,
9535 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9539 /* __throw will restore its own return address to be the same as the
9540 return address of the function that the throw is being made to.
9541 This is unfortunate, because we want to check the original
9542 return address to see if we need to restore the TOC.
9543 So we have to squirrel it away here.
9544 This is used only in compiling __throw and __rethrow.
9546 Most of this code should be removed by CSE. */
9547 static rtx insn_after_throw;
9549 /* This does the saving... */
9551 rs6000_aix_emit_builtin_unwind_init ()
9554 rtx stack_top = gen_reg_rtx (Pmode);
9555 rtx opcode_addr = gen_reg_rtx (Pmode);
9557 insn_after_throw = gen_reg_rtx (SImode);
9559 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9560 emit_move_insn (stack_top, mem);
9562 mem = gen_rtx_MEM (Pmode,
9563 gen_rtx_PLUS (Pmode, stack_top,
9564 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9565 emit_move_insn (opcode_addr, mem);
9566 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9569 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9570 in _eh.o). Only used on AIX.
9572 The idea is that on AIX, function calls look like this:
9573 bl somefunction-trampoline
9577 somefunction-trampoline:
9579 ... load function address in the count register ...
9581 or like this, if the linker determines that this is not a cross-module call
9582 and so the TOC need not be restored:
9585 or like this, if the compiler could determine that this is not a
9588 now, the tricky bit here is that register 2 is saved and restored
9589 by the _linker_, so we can't readily generate debugging information
9590 for it. So we need to go back up the call chain looking at the
9591 insns at return addresses to see which calls saved the TOC register
9592 and so see where it gets restored from.
9594 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9595 just before the actual epilogue.
9597 On the bright side, this incurs no space or time overhead unless an
9598 exception is thrown, except for the extra code in libgcc.a.
9600 The parameter STACKSIZE is a register containing (at runtime)
9601 the amount to be popped off the stack in addition to the stack frame
9602 of this routine (which will be __throw or __rethrow, and so is
9603 guaranteed to have a stack frame). */
9606 rs6000_emit_eh_toc_restore (stacksize)
9610 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9611 rtx tocompare = gen_reg_rtx (SImode);
9612 rtx opcode = gen_reg_rtx (SImode);
9613 rtx opcode_addr = gen_reg_rtx (Pmode);
9615 rtx loop_start = gen_label_rtx ();
9616 rtx no_toc_restore_needed = gen_label_rtx ();
9617 rtx loop_exit = gen_label_rtx ();
9619 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9620 set_mem_alias_set (mem, rs6000_sr_alias_set);
9621 emit_move_insn (bottom_of_stack, mem);
9623 top_of_stack = expand_binop (Pmode, add_optab,
9624 bottom_of_stack, stacksize,
9625 NULL_RTX, 1, OPTAB_WIDEN);
9627 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9628 : 0xE8410028, SImode));
9630 if (insn_after_throw == NULL_RTX)
9632 emit_move_insn (opcode, insn_after_throw);
9634 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9635 emit_label (loop_start);
9637 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9638 SImode, NULL_RTX, NULL_RTX,
9639 no_toc_restore_needed);
9641 mem = gen_rtx_MEM (Pmode,
9642 gen_rtx_PLUS (Pmode, bottom_of_stack,
9643 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9644 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9646 emit_label (no_toc_restore_needed);
9647 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9648 Pmode, NULL_RTX, NULL_RTX,
9651 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9652 set_mem_alias_set (mem, rs6000_sr_alias_set);
9653 emit_move_insn (bottom_of_stack, mem);
9655 mem = gen_rtx_MEM (Pmode,
9656 gen_rtx_PLUS (Pmode, bottom_of_stack,
9657 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9658 emit_move_insn (opcode_addr, mem);
9659 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9661 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9662 emit_jump (loop_start);
9663 emit_note (NULL, NOTE_INSN_LOOP_END);
9664 emit_label (loop_exit);
9666 #endif /* TARGET_AIX */
9668 /* This ties together stack memory (MEM with an alias set of
9669 rs6000_sr_alias_set) and the change to the stack pointer. */
9672 rs6000_emit_stack_tie ()
9674 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9676 set_mem_alias_set (mem, rs6000_sr_alias_set);
9677 emit_insn (gen_stack_tie (mem));
9680 /* Emit the correct code for allocating stack space, as insns.
9681 If COPY_R12, make sure a copy of the old frame is left in r12.
9682 The generated code may use hard register 0 as a temporary. */
9685 rs6000_emit_allocate_stack (size, copy_r12)
9690 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9691 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9692 rtx todec = GEN_INT (-size);
9694 if (current_function_limit_stack)
9696 if (REG_P (stack_limit_rtx)
9697 && REGNO (stack_limit_rtx) > 1
9698 && REGNO (stack_limit_rtx) <= 31)
9700 emit_insn (Pmode == SImode
9701 ? gen_addsi3 (tmp_reg,
9704 : gen_adddi3 (tmp_reg,
9708 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9711 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9713 && DEFAULT_ABI == ABI_V4)
9715 rtx toload = gen_rtx_CONST (VOIDmode,
9716 gen_rtx_PLUS (Pmode,
9720 emit_insn (gen_elf_high (tmp_reg, toload));
9721 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
9722 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9726 warning ("stack limit expression is not supported");
9729 if (copy_r12 || ! TARGET_UPDATE)
9730 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
9736 /* Need a note here so that try_split doesn't get confused. */
9737 if (get_last_insn() == NULL_RTX)
9738 emit_note (0, NOTE_INSN_DELETED);
9739 insn = emit_move_insn (tmp_reg, todec);
9740 try_split (PATTERN (insn), insn, 0);
9744 if (Pmode == SImode)
9745 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
9748 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
9753 if (Pmode == SImode)
9754 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
9756 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
9757 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
9758 gen_rtx_REG (Pmode, 12));
9761 RTX_FRAME_RELATED_P (insn) = 1;
9763 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9764 gen_rtx_SET (VOIDmode, stack_reg,
9765 gen_rtx_PLUS (Pmode, stack_reg,
9770 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9773 (mem (plus (blah) (regXX)))
9777 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9780 altivec_frame_fixup (insn, reg, val)
9786 real = copy_rtx (PATTERN (insn));
9788 real = replace_rtx (real, reg, GEN_INT (val));
9790 RTX_FRAME_RELATED_P (insn) = 1;
9791 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9796 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
9797 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
9798 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
9799 deduce these equivalences by itself so it wasn't necessary to hold
9800 its hand so much. */
9803 rs6000_frame_related (insn, reg, val, reg2, rreg)
9812 /* copy_rtx will not make unique copies of registers, so we need to
9813 ensure we don't have unwanted sharing here. */
9815 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9818 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9820 real = copy_rtx (PATTERN (insn));
9822 if (reg2 != NULL_RTX)
9823 real = replace_rtx (real, reg2, rreg);
9825 real = replace_rtx (real, reg,
9826 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
9827 STACK_POINTER_REGNUM),
9830 /* We expect that 'real' is either a SET or a PARALLEL containing
9831 SETs (and possibly other stuff). In a PARALLEL, all the SETs
9832 are important so they all have to be marked RTX_FRAME_RELATED_P. */
9834 if (GET_CODE (real) == SET)
9838 temp = simplify_rtx (SET_SRC (set));
9840 SET_SRC (set) = temp;
9841 temp = simplify_rtx (SET_DEST (set));
9843 SET_DEST (set) = temp;
9844 if (GET_CODE (SET_DEST (set)) == MEM)
9846 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9848 XEXP (SET_DEST (set), 0) = temp;
9851 else if (GET_CODE (real) == PARALLEL)
9854 for (i = 0; i < XVECLEN (real, 0); i++)
9855 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
9857 rtx set = XVECEXP (real, 0, i);
9859 temp = simplify_rtx (SET_SRC (set));
9861 SET_SRC (set) = temp;
9862 temp = simplify_rtx (SET_DEST (set));
9864 SET_DEST (set) = temp;
9865 if (GET_CODE (SET_DEST (set)) == MEM)
9867 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9869 XEXP (SET_DEST (set), 0) = temp;
9871 RTX_FRAME_RELATED_P (set) = 1;
9877 RTX_FRAME_RELATED_P (insn) = 1;
9878 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9883 /* Returns an insn that has a vrsave set operation with the
9884 appropriate CLOBBERs. */
9887 generate_set_vrsave (reg, info, epiloguep)
9889 rs6000_stack_t *info;
9893 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
9894 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
9897 = gen_rtx_SET (VOIDmode,
9899 gen_rtx_UNSPEC_VOLATILE (SImode,
9900 gen_rtvec (2, reg, vrsave),
9905 /* We need to clobber the registers in the mask so the scheduler
9906 does not move sets to VRSAVE before sets of AltiVec registers.
9908 However, if the function receives nonlocal gotos, reload will set
9909 all call saved registers live. We will end up with:
9911 (set (reg 999) (mem))
9912 (parallel [ (set (reg vrsave) (unspec blah))
9913 (clobber (reg 999))])
9915 The clobber will cause the store into reg 999 to be dead, and
9916 flow will attempt to delete an epilogue insn. In this case, we
9917 need an unspec use/set of the register. */
9919 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9920 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
9922 if (!epiloguep || call_used_regs [i])
9923 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
9924 gen_rtx_REG (V4SImode, i));
9927 rtx reg = gen_rtx_REG (V4SImode, i);
9930 = gen_rtx_SET (VOIDmode,
9932 gen_rtx_UNSPEC (V4SImode,
9933 gen_rtvec (1, reg), 27));
9937 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
9939 for (i = 0; i < nclobs; ++i)
9940 XVECEXP (insn, 0, i) = clobs[i];
9945 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
9946 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
9949 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
9952 enum machine_mode mode;
9957 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
9958 rtx replacea, replaceb;
9960 int_rtx = GEN_INT (offset);
9962 /* Some cases that need register indexed addressing. */
9963 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
9965 && SPE_VECTOR_MODE (mode)
9966 && !SPE_CONST_OFFSET_OK (offset)))
9968 /* Whomever calls us must make sure r11 is available in the
9969 flow path of instructions in the prologue. */
9970 offset_rtx = gen_rtx_REG (Pmode, 11);
9971 emit_move_insn (offset_rtx, int_rtx);
9973 replacea = offset_rtx;
9978 offset_rtx = int_rtx;
9979 replacea = NULL_RTX;
9980 replaceb = NULL_RTX;
9983 reg = gen_rtx_REG (mode, regno);
9984 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
9985 mem = gen_rtx_MEM (mode, addr);
9986 set_mem_alias_set (mem, rs6000_sr_alias_set);
9988 insn = emit_move_insn (mem, reg);
9990 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
9993 /* Emit an offset memory reference suitable for a frame store, while
9994 converting to a valid addressing mode. */
9997 gen_frame_mem_offset (mode, reg, offset)
9998 enum machine_mode mode;
10002 rtx int_rtx, offset_rtx;
10004 int_rtx = GEN_INT (offset);
10006 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10008 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10009 emit_move_insn (offset_rtx, int_rtx);
10012 offset_rtx = int_rtx;
10014 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10017 /* Emit function prologue as insns. */
10020 rs6000_emit_prologue ()
10022 rs6000_stack_t *info = rs6000_stack_info ();
10023 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10024 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10025 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10026 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10027 rtx frame_reg_rtx = sp_reg_rtx;
10028 rtx cr_save_rtx = NULL;
10030 int saving_FPRs_inline;
10031 int using_store_multiple;
10032 HOST_WIDE_INT sp_offset = 0;
10034 if (TARGET_SPE_ABI)
10036 reg_mode = V2SImode;
10040 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10042 && info->first_gp_reg_save < 31);
10043 saving_FPRs_inline = (info->first_fp_reg_save == 64
10044 || FP_SAVE_INLINE (info->first_fp_reg_save));
10046 /* For V.4, update stack before we do any saving and set back pointer. */
10047 if (info->push_p && DEFAULT_ABI == ABI_V4)
10049 if (info->total_size < 32767)
10050 sp_offset = info->total_size;
10052 frame_reg_rtx = frame_ptr_rtx;
10053 rs6000_emit_allocate_stack (info->total_size,
10054 (frame_reg_rtx != sp_reg_rtx
10055 && (info->cr_save_p
10057 || info->first_fp_reg_save < 64
10058 || info->first_gp_reg_save < 32
10060 if (frame_reg_rtx != sp_reg_rtx)
10061 rs6000_emit_stack_tie ();
10064 /* Save AltiVec registers if needed. */
10065 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10069 /* There should be a non inline version of this, for when we
10070 are saving lots of vector registers. */
10071 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10072 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10074 rtx areg, savereg, mem;
10077 offset = info->altivec_save_offset + sp_offset
10078 + 16 * (i - info->first_altivec_reg_save);
10080 savereg = gen_rtx_REG (V4SImode, i);
10082 areg = gen_rtx_REG (Pmode, 0);
10083 emit_move_insn (areg, GEN_INT (offset));
10085 /* AltiVec addressing mode is [reg+reg]. */
10086 mem = gen_rtx_MEM (V4SImode,
10087 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10089 set_mem_alias_set (mem, rs6000_sr_alias_set);
10091 insn = emit_move_insn (mem, savereg);
10093 altivec_frame_fixup (insn, areg, offset);
10097 /* VRSAVE is a bit vector representing which AltiVec registers
10098 are used. The OS uses this to determine which vector
10099 registers to save on a context switch. We need to save
10100 VRSAVE on the stack frame, add whatever AltiVec registers we
10101 used in this function, and do the corresponding magic in the
10104 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10106 rtx reg, mem, vrsave;
10109 /* Get VRSAVE onto a GPR. */
10110 reg = gen_rtx_REG (SImode, 12);
10111 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10113 emit_insn (gen_get_vrsave_internal (reg));
10115 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10118 offset = info->vrsave_save_offset + sp_offset;
10120 = gen_rtx_MEM (SImode,
10121 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10122 set_mem_alias_set (mem, rs6000_sr_alias_set);
10123 insn = emit_move_insn (mem, reg);
10125 /* Include the registers in the mask. */
10126 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10128 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10131 /* If we use the link register, get it into r0. */
10132 if (info->lr_save_p)
10133 emit_move_insn (gen_rtx_REG (Pmode, 0),
10134 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10136 /* If we need to save CR, put it into r12. */
10137 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10139 cr_save_rtx = gen_rtx_REG (SImode, 12);
10140 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10143 /* Do any required saving of fpr's. If only one or two to save, do
10144 it ourselves. Otherwise, call function. */
10145 if (saving_FPRs_inline)
10148 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10149 if ((regs_ever_live[info->first_fp_reg_save+i]
10150 && ! call_used_regs[info->first_fp_reg_save+i]))
10151 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10152 info->first_fp_reg_save + i,
10153 info->fp_save_offset + sp_offset + 8 * i,
10156 else if (info->first_fp_reg_save != 64)
10160 const char *alloc_rname;
10162 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10164 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10165 gen_rtx_REG (Pmode,
10166 LINK_REGISTER_REGNUM));
10167 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10168 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10169 alloc_rname = ggc_strdup (rname);
10170 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10171 gen_rtx_SYMBOL_REF (Pmode,
10173 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10175 rtx addr, reg, mem;
10176 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10177 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10178 GEN_INT (info->fp_save_offset
10179 + sp_offset + 8*i));
10180 mem = gen_rtx_MEM (DFmode, addr);
10181 set_mem_alias_set (mem, rs6000_sr_alias_set);
10183 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10185 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10186 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10187 NULL_RTX, NULL_RTX);
10190 /* Save GPRs. This is done as a PARALLEL if we are using
10191 the store-multiple instructions. */
10192 if (using_store_multiple)
10196 p = rtvec_alloc (32 - info->first_gp_reg_save);
10197 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
10198 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10200 rtx addr, reg, mem;
10201 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10202 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10203 GEN_INT (info->gp_save_offset
10206 mem = gen_rtx_MEM (reg_mode, addr);
10207 set_mem_alias_set (mem, rs6000_sr_alias_set);
10209 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10211 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10212 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10213 NULL_RTX, NULL_RTX);
10218 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10219 if ((regs_ever_live[info->first_gp_reg_save+i]
10220 && ! call_used_regs[info->first_gp_reg_save+i])
10221 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10222 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10223 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10225 rtx addr, reg, mem;
10226 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10228 if (TARGET_SPE_ABI)
10230 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10233 if (!SPE_CONST_OFFSET_OK (offset))
10235 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10236 emit_move_insn (b, GEN_INT (offset));
10239 b = GEN_INT (offset);
10241 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10242 mem = gen_rtx_MEM (V2SImode, addr);
10243 set_mem_alias_set (mem, rs6000_sr_alias_set);
10244 insn = emit_move_insn (mem, reg);
10246 if (GET_CODE (b) == CONST_INT)
10247 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10248 NULL_RTX, NULL_RTX);
10250 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10251 b, GEN_INT (offset));
10255 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10256 GEN_INT (info->gp_save_offset
10259 mem = gen_rtx_MEM (reg_mode, addr);
10260 set_mem_alias_set (mem, rs6000_sr_alias_set);
10262 insn = emit_move_insn (mem, reg);
10263 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10264 NULL_RTX, NULL_RTX);
10269 /* ??? There's no need to emit actual instructions here, but it's the
10270 easiest way to get the frame unwind information emitted. */
10271 if (current_function_calls_eh_return)
10273 unsigned int i, regno;
10277 regno = EH_RETURN_DATA_REGNO (i);
10278 if (regno == INVALID_REGNUM)
10281 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10282 info->ehrd_offset + sp_offset
10283 + reg_size * (int) i,
10288 /* Save lr if we used it. */
10289 if (info->lr_save_p)
10291 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10292 GEN_INT (info->lr_save_offset + sp_offset));
10293 rtx reg = gen_rtx_REG (Pmode, 0);
10294 rtx mem = gen_rtx_MEM (Pmode, addr);
10295 /* This should not be of rs6000_sr_alias_set, because of
10296 __builtin_return_address. */
10298 insn = emit_move_insn (mem, reg);
10299 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10300 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10303 /* Save CR if we use any that must be preserved. */
10304 if (info->cr_save_p)
10306 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10307 GEN_INT (info->cr_save_offset + sp_offset));
10308 rtx mem = gen_rtx_MEM (SImode, addr);
10310 set_mem_alias_set (mem, rs6000_sr_alias_set);
10312 /* If r12 was used to hold the original sp, copy cr into r0 now
10314 if (REGNO (frame_reg_rtx) == 12)
10316 cr_save_rtx = gen_rtx_REG (SImode, 0);
10317 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10319 insn = emit_move_insn (mem, cr_save_rtx);
10321 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10322 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10323 OK. All we have to do is specify that _one_ condition code
10324 register is saved in this stack slot. The thrower's epilogue
10325 will then restore all the call-saved registers.
10326 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10327 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10328 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10331 /* Update stack and set back pointer unless this is V.4,
10332 for which it was done previously. */
10333 if (info->push_p && DEFAULT_ABI != ABI_V4)
10334 rs6000_emit_allocate_stack (info->total_size, FALSE);
10336 /* Set frame pointer, if needed. */
10337 if (frame_pointer_needed)
10339 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10341 RTX_FRAME_RELATED_P (insn) = 1;
10344 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10345 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10346 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10347 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10349 /* If emit_load_toc_table will use the link register, we need to save
10350 it. We use R11 for this purpose because emit_load_toc_table
10351 can use register 0. This allows us to use a plain 'blr' to return
10352 from the procedure more often. */
10353 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10354 && ! info->lr_save_p
10355 && EXIT_BLOCK_PTR->pred != NULL);
10356 if (save_LR_around_toc_setup)
10357 emit_move_insn (gen_rtx_REG (Pmode, 11),
10358 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10360 rs6000_emit_load_toc_table (TRUE);
10362 if (save_LR_around_toc_setup)
10363 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10364 gen_rtx_REG (Pmode, 11));
10367 if (DEFAULT_ABI == ABI_DARWIN
10368 && flag_pic && current_function_uses_pic_offset_table)
10370 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10372 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
10374 rs6000_maybe_dead (
10375 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10376 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10380 /* Write function prologue. */
10383 rs6000_output_function_prologue (file, size)
10385 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10387 rs6000_stack_t *info = rs6000_stack_info ();
10389 if (TARGET_DEBUG_STACK)
10390 debug_stack_info (info);
10392 /* Write .extern for any function we will call to save and restore
10394 if (info->first_fp_reg_save < 64
10395 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10396 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10397 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10398 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10399 RESTORE_FP_SUFFIX);
10401 /* Write .extern for AIX common mode routines, if needed. */
10402 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10404 fputs ("\t.extern __mulh\n", file);
10405 fputs ("\t.extern __mull\n", file);
10406 fputs ("\t.extern __divss\n", file);
10407 fputs ("\t.extern __divus\n", file);
10408 fputs ("\t.extern __quoss\n", file);
10409 fputs ("\t.extern __quous\n", file);
10410 common_mode_defined = 1;
10413 if (! HAVE_prologue)
10417 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10418 the "toplevel" insn chain. */
10419 emit_note (0, NOTE_INSN_DELETED);
10420 rs6000_emit_prologue ();
10421 emit_note (0, NOTE_INSN_DELETED);
10423 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10427 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10429 INSN_ADDRESSES_NEW (insn, addr);
10434 if (TARGET_DEBUG_STACK)
10435 debug_rtx_list (get_insns (), 100);
10436 final (get_insns (), file, FALSE, FALSE);
10440 rs6000_pic_labelno++;
10443 /* Emit function epilogue as insns.
10445 At present, dwarf2out_frame_debug_expr doesn't understand
10446 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10447 anywhere in the epilogue. Most of the insns below would in any case
10448 need special notes to explain where r11 is in relation to the stack. */
10451 rs6000_emit_epilogue (sibcall)
10454 rs6000_stack_t *info;
10455 int restoring_FPRs_inline;
10456 int using_load_multiple;
10457 int using_mfcr_multiple;
10458 int use_backchain_to_restore_sp;
10460 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10461 rtx frame_reg_rtx = sp_reg_rtx;
10462 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10463 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10466 if (TARGET_SPE_ABI)
10468 reg_mode = V2SImode;
10472 info = rs6000_stack_info ();
10473 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10475 && info->first_gp_reg_save < 31);
10476 restoring_FPRs_inline = (sibcall
10477 || current_function_calls_eh_return
10478 || info->first_fp_reg_save == 64
10479 || FP_SAVE_INLINE (info->first_fp_reg_save));
10480 use_backchain_to_restore_sp = (frame_pointer_needed
10481 || current_function_calls_alloca
10482 || info->total_size > 32767);
10483 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10484 || rs6000_cpu == PROCESSOR_PPC603
10485 || rs6000_cpu == PROCESSOR_PPC750
10488 /* If we have a frame pointer, a call to alloca, or a large stack
10489 frame, restore the old stack pointer using the backchain. Otherwise,
10490 we know what size to update it with. */
10491 if (use_backchain_to_restore_sp)
10493 /* Under V.4, don't reset the stack pointer until after we're done
10494 loading the saved registers. */
10495 if (DEFAULT_ABI == ABI_V4)
10496 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10498 emit_move_insn (frame_reg_rtx,
10499 gen_rtx_MEM (Pmode, sp_reg_rtx));
10502 else if (info->push_p)
10504 if (DEFAULT_ABI == ABI_V4)
10505 sp_offset = info->total_size;
10508 emit_insn (TARGET_32BIT
10509 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10510 GEN_INT (info->total_size))
10511 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10512 GEN_INT (info->total_size)));
10516 /* Restore AltiVec registers if needed. */
10517 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10521 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10522 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10524 rtx addr, areg, mem;
10526 areg = gen_rtx_REG (Pmode, 0);
10528 (areg, GEN_INT (info->altivec_save_offset
10530 + 16 * (i - info->first_altivec_reg_save)));
10532 /* AltiVec addressing mode is [reg+reg]. */
10533 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10534 mem = gen_rtx_MEM (V4SImode, addr);
10535 set_mem_alias_set (mem, rs6000_sr_alias_set);
10537 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10541 /* Restore VRSAVE if needed. */
10542 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10544 rtx addr, mem, reg;
10546 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10547 GEN_INT (info->vrsave_save_offset + sp_offset));
10548 mem = gen_rtx_MEM (SImode, addr);
10549 set_mem_alias_set (mem, rs6000_sr_alias_set);
10550 reg = gen_rtx_REG (SImode, 12);
10551 emit_move_insn (reg, mem);
10553 emit_insn (generate_set_vrsave (reg, info, 1));
10556 /* Get the old lr if we saved it. */
10557 if (info->lr_save_p)
10559 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10560 info->lr_save_offset + sp_offset);
10562 set_mem_alias_set (mem, rs6000_sr_alias_set);
10564 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10567 /* Get the old cr if we saved it. */
10568 if (info->cr_save_p)
10570 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10571 GEN_INT (info->cr_save_offset + sp_offset));
10572 rtx mem = gen_rtx_MEM (SImode, addr);
10574 set_mem_alias_set (mem, rs6000_sr_alias_set);
10576 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10579 /* Set LR here to try to overlap restores below. */
10580 if (info->lr_save_p)
10581 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10582 gen_rtx_REG (Pmode, 0));
10584 /* Load exception handler data registers, if needed. */
10585 if (current_function_calls_eh_return)
10587 unsigned int i, regno;
10593 regno = EH_RETURN_DATA_REGNO (i);
10594 if (regno == INVALID_REGNUM)
10597 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10598 info->ehrd_offset + sp_offset
10599 + reg_size * (int) i);
10600 set_mem_alias_set (mem, rs6000_sr_alias_set);
10602 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10606 /* Restore GPRs. This is done as a PARALLEL if we are using
10607 the load-multiple instructions. */
10608 if (using_load_multiple)
10611 p = rtvec_alloc (32 - info->first_gp_reg_save);
10612 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10614 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10615 GEN_INT (info->gp_save_offset
10618 rtx mem = gen_rtx_MEM (reg_mode, addr);
10620 set_mem_alias_set (mem, rs6000_sr_alias_set);
10623 gen_rtx_SET (VOIDmode,
10624 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10627 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10630 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10631 if ((regs_ever_live[info->first_gp_reg_save+i]
10632 && ! call_used_regs[info->first_gp_reg_save+i])
10633 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10634 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10635 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10637 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10638 GEN_INT (info->gp_save_offset
10641 rtx mem = gen_rtx_MEM (reg_mode, addr);
10643 /* Restore 64-bit quantities for SPE. */
10644 if (TARGET_SPE_ABI)
10646 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10649 if (!SPE_CONST_OFFSET_OK (offset))
10651 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10652 emit_move_insn (b, GEN_INT (offset));
10655 b = GEN_INT (offset);
10657 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10658 mem = gen_rtx_MEM (V2SImode, addr);
10661 set_mem_alias_set (mem, rs6000_sr_alias_set);
10663 emit_move_insn (gen_rtx_REG (reg_mode,
10664 info->first_gp_reg_save + i), mem);
10667 /* Restore fpr's if we need to do it without calling a function. */
10668 if (restoring_FPRs_inline)
10669 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10670 if ((regs_ever_live[info->first_fp_reg_save+i]
10671 && ! call_used_regs[info->first_fp_reg_save+i]))
10674 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10675 GEN_INT (info->fp_save_offset
10678 mem = gen_rtx_MEM (DFmode, addr);
10679 set_mem_alias_set (mem, rs6000_sr_alias_set);
10681 emit_move_insn (gen_rtx_REG (DFmode,
10682 info->first_fp_reg_save + i),
10686 /* If we saved cr, restore it here. Just those that were used. */
10687 if (info->cr_save_p)
10689 rtx r12_rtx = gen_rtx_REG (SImode, 12);
10692 if (using_mfcr_multiple)
10694 for (i = 0; i < 8; i++)
10695 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10701 if (using_mfcr_multiple && count > 1)
10706 p = rtvec_alloc (count);
10709 for (i = 0; i < 8; i++)
10710 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10712 rtvec r = rtvec_alloc (2);
10713 RTVEC_ELT (r, 0) = r12_rtx;
10714 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
10715 RTVEC_ELT (p, ndx) =
10716 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
10717 gen_rtx_UNSPEC (CCmode, r, 20));
10720 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10725 for (i = 0; i < 8; i++)
10726 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10728 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
10734 /* If this is V.4, unwind the stack pointer after all of the loads
10735 have been done. We need to emit a block here so that sched
10736 doesn't decide to move the sp change before the register restores
10737 (which may not have any obvious dependency on the stack). This
10738 doesn't hurt performance, because there is no scheduling that can
10739 be done after this point. */
10740 if (DEFAULT_ABI == ABI_V4)
10742 if (frame_reg_rtx != sp_reg_rtx)
10743 rs6000_emit_stack_tie ();
10745 if (use_backchain_to_restore_sp)
10747 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
10749 else if (sp_offset != 0)
10751 emit_insn (Pmode == SImode
10752 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10753 GEN_INT (sp_offset))
10754 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10755 GEN_INT (sp_offset)));
10759 if (current_function_calls_eh_return)
10761 rtx sa = EH_RETURN_STACKADJ_RTX;
10762 emit_insn (Pmode == SImode
10763 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
10764 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
10770 if (! restoring_FPRs_inline)
10771 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
10773 p = rtvec_alloc (2);
10775 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
10776 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10777 gen_rtx_REG (Pmode,
10778 LINK_REGISTER_REGNUM));
10780 /* If we have to restore more than two FP registers, branch to the
10781 restore function. It will return to our caller. */
10782 if (! restoring_FPRs_inline)
10786 const char *alloc_rname;
10788 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
10789 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
10790 alloc_rname = ggc_strdup (rname);
10791 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
10792 gen_rtx_SYMBOL_REF (Pmode,
10795 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10798 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
10799 GEN_INT (info->fp_save_offset + 8*i));
10800 mem = gen_rtx_MEM (DFmode, addr);
10801 set_mem_alias_set (mem, rs6000_sr_alias_set);
10803 RTVEC_ELT (p, i+3) =
10804 gen_rtx_SET (VOIDmode,
10805 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
10810 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
10814 /* Write function epilogue. */
10817 rs6000_output_function_epilogue (file, size)
10819 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10821 rs6000_stack_t *info = rs6000_stack_info ();
10822 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
10824 if (! HAVE_epilogue)
10826 rtx insn = get_last_insn ();
10827 /* If the last insn was a BARRIER, we don't have to write anything except
10828 the trace table. */
10829 if (GET_CODE (insn) == NOTE)
10830 insn = prev_nonnote_insn (insn);
10831 if (insn == 0 || GET_CODE (insn) != BARRIER)
10833 /* This is slightly ugly, but at least we don't have two
10834 copies of the epilogue-emitting code. */
10837 /* A NOTE_INSN_DELETED is supposed to be at the start
10838 and end of the "toplevel" insn chain. */
10839 emit_note (0, NOTE_INSN_DELETED);
10840 rs6000_emit_epilogue (FALSE);
10841 emit_note (0, NOTE_INSN_DELETED);
10843 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10847 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10849 INSN_ADDRESSES_NEW (insn, addr);
10854 if (TARGET_DEBUG_STACK)
10855 debug_rtx_list (get_insns (), 100);
10856 final (get_insns (), file, FALSE, FALSE);
10861 /* Output a traceback table here. See /usr/include/sys/debug.h for info
10864 We don't output a traceback table if -finhibit-size-directive was
10865 used. The documentation for -finhibit-size-directive reads
10866 ``don't output a @code{.size} assembler directive, or anything
10867 else that would cause trouble if the function is split in the
10868 middle, and the two halves are placed at locations far apart in
10869 memory.'' The traceback table has this property, since it
10870 includes the offset from the start of the function to the
10871 traceback table itself.
10873 System V.4 Powerpc's (and the embedded ABI derived from it) use a
10874 different traceback table. */
10875 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
10877 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10878 const char *language_string = lang_hooks.name;
10879 int fixed_parms = 0, float_parms = 0, parm_info = 0;
10882 while (*fname == '.') /* V.4 encodes . in the name */
10885 /* Need label immediately before tbtab, so we can compute its offset
10886 from the function start. */
10889 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
10890 ASM_OUTPUT_LABEL (file, fname);
10892 /* The .tbtab pseudo-op can only be used for the first eight
10893 expressions, since it can't handle the possibly variable
10894 length fields that follow. However, if you omit the optional
10895 fields, the assembler outputs zeros for all optional fields
10896 anyways, giving each variable length field is minimum length
10897 (as defined in sys/debug.h). Thus we can not use the .tbtab
10898 pseudo-op at all. */
10900 /* An all-zero word flags the start of the tbtab, for debuggers
10901 that have to find it by searching forward from the entry
10902 point or from the current pc. */
10903 fputs ("\t.long 0\n", file);
10905 /* Tbtab format type. Use format type 0. */
10906 fputs ("\t.byte 0,", file);
10908 /* Language type. Unfortunately, there doesn't seem to be any
10909 official way to get this info, so we use language_string. C
10910 is 0. C++ is 9. No number defined for Obj-C, so use the
10911 value for C for now. There is no official value for Java,
10912 although IBM appears to be using 13. There is no official value
10913 for Chill, so we've chosen 44 pseudo-randomly. */
10914 if (! strcmp (language_string, "GNU C")
10915 || ! strcmp (language_string, "GNU Objective-C"))
10917 else if (! strcmp (language_string, "GNU F77"))
10919 else if (! strcmp (language_string, "GNU Ada"))
10921 else if (! strcmp (language_string, "GNU Pascal"))
10923 else if (! strcmp (language_string, "GNU C++"))
10925 else if (! strcmp (language_string, "GNU Java"))
10927 else if (! strcmp (language_string, "GNU CHILL"))
10931 fprintf (file, "%d,", i);
10933 /* 8 single bit fields: global linkage (not set for C extern linkage,
10934 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
10935 from start of procedure stored in tbtab, internal function, function
10936 has controlled storage, function has no toc, function uses fp,
10937 function logs/aborts fp operations. */
10938 /* Assume that fp operations are used if any fp reg must be saved. */
10939 fprintf (file, "%d,",
10940 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
10942 /* 6 bitfields: function is interrupt handler, name present in
10943 proc table, function calls alloca, on condition directives
10944 (controls stack walks, 3 bits), saves condition reg, saves
10946 /* The `function calls alloca' bit seems to be set whenever reg 31 is
10947 set up as a frame pointer, even when there is no alloca call. */
10948 fprintf (file, "%d,",
10949 ((optional_tbtab << 6)
10950 | ((optional_tbtab & frame_pointer_needed) << 5)
10951 | (info->cr_save_p << 1)
10952 | (info->lr_save_p)));
10954 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
10956 fprintf (file, "%d,",
10957 (info->push_p << 7) | (64 - info->first_fp_reg_save));
10959 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
10960 fprintf (file, "%d,", (32 - first_reg_to_save ()));
10962 if (optional_tbtab)
10964 /* Compute the parameter info from the function decl argument
10967 int next_parm_info_bit = 31;
10969 for (decl = DECL_ARGUMENTS (current_function_decl);
10970 decl; decl = TREE_CHAIN (decl))
10972 rtx parameter = DECL_INCOMING_RTL (decl);
10973 enum machine_mode mode = GET_MODE (parameter);
10975 if (GET_CODE (parameter) == REG)
10977 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
10983 if (mode == SFmode)
10985 else if (mode == DFmode)
10990 /* If only one bit will fit, don't or in this entry. */
10991 if (next_parm_info_bit > 0)
10992 parm_info |= (bits << (next_parm_info_bit - 1));
10993 next_parm_info_bit -= 2;
10997 fixed_parms += ((GET_MODE_SIZE (mode)
10998 + (UNITS_PER_WORD - 1))
11000 next_parm_info_bit -= 1;
11006 /* Number of fixed point parameters. */
11007 /* This is actually the number of words of fixed point parameters; thus
11008 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11009 fprintf (file, "%d,", fixed_parms);
11011 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11013 /* This is actually the number of fp registers that hold parameters;
11014 and thus the maximum value is 13. */
11015 /* Set parameters on stack bit if parameters are not in their original
11016 registers, regardless of whether they are on the stack? Xlc
11017 seems to set the bit when not optimizing. */
11018 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11020 if (! optional_tbtab)
11023 /* Optional fields follow. Some are variable length. */
11025 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11026 11 double float. */
11027 /* There is an entry for each parameter in a register, in the order that
11028 they occur in the parameter list. Any intervening arguments on the
11029 stack are ignored. If the list overflows a long (max possible length
11030 34 bits) then completely leave off all elements that don't fit. */
11031 /* Only emit this long if there was at least one parameter. */
11032 if (fixed_parms || float_parms)
11033 fprintf (file, "\t.long %d\n", parm_info);
11035 /* Offset from start of code to tb table. */
11036 fputs ("\t.long ", file);
11037 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11039 RS6000_OUTPUT_BASENAME (file, fname);
11041 assemble_name (file, fname);
11043 fputs ("-.", file);
11045 RS6000_OUTPUT_BASENAME (file, fname);
11047 assemble_name (file, fname);
11051 /* Interrupt handler mask. */
11052 /* Omit this long, since we never set the interrupt handler bit
11055 /* Number of CTL (controlled storage) anchors. */
11056 /* Omit this long, since the has_ctl bit is never set above. */
11058 /* Displacement into stack of each CTL anchor. */
11059 /* Omit this list of longs, because there are no CTL anchors. */
11061 /* Length of function name. */
11062 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11064 /* Function name. */
11065 assemble_string (fname, strlen (fname));
11067 /* Register for alloca automatic storage; this is always reg 31.
11068 Only emit this if the alloca bit was set above. */
11069 if (frame_pointer_needed)
11070 fputs ("\t.byte 31\n", file);
11072 fputs ("\t.align 2\n", file);
11077 /* A C compound statement that outputs the assembler code for a thunk
11078 function, used to implement C++ virtual function calls with
11079 multiple inheritance. The thunk acts as a wrapper around a virtual
11080 function, adjusting the implicit object parameter before handing
11081 control off to the real function.
11083 First, emit code to add the integer DELTA to the location that
11084 contains the incoming first argument. Assume that this argument
11085 contains a pointer, and is the one used to pass the `this' pointer
11086 in C++. This is the incoming argument *before* the function
11087 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11088 values of all other incoming arguments.
11090 After the addition, emit code to jump to FUNCTION, which is a
11091 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11092 not touch the return address. Hence returning from FUNCTION will
11093 return to whoever called the current `thunk'.
11095 The effect must be as if FUNCTION had been called directly with the
11096 adjusted first argument. This macro is responsible for emitting
11097 all of the code for a thunk function; output_function_prologue()
11098 and output_function_epilogue() are not invoked.
11100 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11101 been extracted from it.) It might possibly be useful on some
11102 targets, but probably not.
11104 If you do not define this macro, the target-independent code in the
11105 C++ frontend will generate a less efficient heavyweight thunk that
11106 calls FUNCTION instead of jumping to it. The generic approach does
11107 not support varargs. */
11110 output_mi_thunk (file, thunk_fndecl, delta, function)
11112 tree thunk_fndecl ATTRIBUTE_UNUSED;
11116 const char *this_reg =
11117 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
11118 const char *prefix;
11120 const char *r0 = reg_names[0];
11121 const char *toc = reg_names[2];
11122 const char *schain = reg_names[11];
11123 const char *r12 = reg_names[12];
11125 static int labelno = 0;
11127 /* Small constants that can be done by one add instruction. */
11128 if (delta >= -32768 && delta <= 32767)
11130 if (! TARGET_NEW_MNEMONICS)
11131 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
11133 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
11136 /* Large constants that can be done by one addis instruction. */
11137 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
11138 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11141 /* 32-bit constants that can be done by an add and addis instruction. */
11142 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
11144 /* Break into two pieces, propagating the sign bit from the low
11145 word to the upper word. */
11146 int delta_high = delta >> 16;
11147 int delta_low = delta & 0xffff;
11148 if ((delta_low & 0x8000) != 0)
11151 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
11154 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11157 if (! TARGET_NEW_MNEMONICS)
11158 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
11160 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
11163 /* 64-bit constants, fixme */
11167 /* Get the prefix in front of the names. */
11168 switch (DEFAULT_ABI)
11178 case ABI_AIX_NODESC:
11183 /* If the function is compiled in this module, jump to it directly.
11184 Otherwise, load up its address and jump to it. */
11186 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
11188 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
11189 && (! lookup_attribute ("longcall",
11190 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11191 || lookup_attribute ("shortcall",
11192 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11195 fprintf (file, "\tb %s", prefix);
11196 assemble_name (file, fname);
11197 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
11203 switch (DEFAULT_ABI)
11209 /* Set up a TOC entry for the function. */
11210 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
11212 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
11215 if (TARGET_MINIMAL_TOC)
11216 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11219 fputs ("\t.tc ", file);
11220 assemble_name (file, fname);
11221 fputs ("[TC],", file);
11223 assemble_name (file, fname);
11226 if (TARGET_MINIMAL_TOC)
11227 asm_fprintf (file, (TARGET_32BIT)
11228 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
11229 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
11230 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
11231 assemble_name (file, buf);
11232 if (TARGET_ELF && TARGET_MINIMAL_TOC)
11233 fputs ("-(.LCTOC1)", file);
11234 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
11236 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11240 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11243 asm_fprintf (file, "\tmtctr %s\n", r0);
11245 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11248 asm_fprintf (file, "\tbctr\n");
11251 case ABI_AIX_NODESC:
11253 fprintf (file, "\tb %s", prefix);
11254 assemble_name (file, fname);
11255 if (flag_pic) fputs ("@plt", file);
11261 fprintf (file, "\tb %s", prefix);
11262 if (flag_pic && !machopic_name_defined_p (fname))
11263 assemble_name (file, machopic_stub_name (fname));
11265 assemble_name (file, fname);
11274 /* A quick summary of the various types of 'constant-pool tables'
11277 Target Flags Name One table per
11278 AIX (none) AIX TOC object file
11279 AIX -mfull-toc AIX TOC object file
11280 AIX -mminimal-toc AIX minimal TOC translation unit
11281 SVR4/EABI (none) SVR4 SDATA object file
11282 SVR4/EABI -fpic SVR4 pic object file
11283 SVR4/EABI -fPIC SVR4 PIC translation unit
11284 SVR4/EABI -mrelocatable EABI TOC function
11285 SVR4/EABI -maix AIX TOC object file
11286 SVR4/EABI -maix -mminimal-toc
11287 AIX minimal TOC translation unit
11289 Name Reg. Set by entries contains:
11290 made by addrs? fp? sum?
11292 AIX TOC 2 crt0 as Y option option
11293 AIX minimal TOC 30 prolog gcc Y Y option
11294 SVR4 SDATA 13 crt0 gcc N Y N
11295 SVR4 pic 30 prolog ld Y not yet N
11296 SVR4 PIC 30 prolog gcc Y option option
11297 EABI TOC 30 prolog gcc Y option option
11301 /* Hash table stuff for keeping track of TOC entries. */
11303 struct toc_hash_struct
11305 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11306 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
11308 enum machine_mode key_mode;
11312 static htab_t toc_hash_table;
11314 /* Hash functions for the hash table. */
11317 rs6000_hash_constant (k)
11320 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
11321 const char *format = GET_RTX_FORMAT (GET_CODE (k));
11322 int flen = strlen (format);
11325 if (GET_CODE (k) == LABEL_REF)
11326 return result * 1231 + X0INT (XEXP (k, 0), 3);
11328 if (GET_CODE (k) == CODE_LABEL)
11333 for (; fidx < flen; fidx++)
11334 switch (format[fidx])
11339 const char *str = XSTR (k, fidx);
11340 len = strlen (str);
11341 result = result * 613 + len;
11342 for (i = 0; i < len; i++)
11343 result = result * 613 + (unsigned) str[i];
11348 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11352 result = result * 613 + (unsigned) XINT (k, fidx);
11355 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11356 result = result * 613 + (unsigned) XWINT (k, fidx);
11360 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11361 result = result * 613 + (unsigned) (XWINT (k, fidx)
11372 toc_hash_function (hash_entry)
11373 const void * hash_entry;
11375 const struct toc_hash_struct *thc =
11376 (const struct toc_hash_struct *) hash_entry;
11377 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11380 /* Compare H1 and H2 for equivalence. */
11383 toc_hash_eq (h1, h2)
11387 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11388 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11390 if (((const struct toc_hash_struct *) h1)->key_mode
11391 != ((const struct toc_hash_struct *) h2)->key_mode)
11394 return rtx_equal_p (r1, r2);
11397 /* Mark the hash table-entry HASH_ENTRY. */
11400 toc_hash_mark_entry (hash_slot, unused)
11402 void * unused ATTRIBUTE_UNUSED;
11404 const struct toc_hash_struct * hash_entry =
11405 *(const struct toc_hash_struct **) hash_slot;
11406 rtx r = hash_entry->key;
11407 ggc_set_mark (hash_entry);
11408 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
11409 if (GET_CODE (r) == LABEL_REF)
11412 ggc_set_mark (XEXP (r, 0));
11419 /* Mark all the elements of the TOC hash-table *HT. */
11422 toc_hash_mark_table (vht)
11427 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
11430 /* These are the names given by the C++ front-end to vtables, and
11431 vtable-like objects. Ideally, this logic should not be here;
11432 instead, there should be some programmatic way of inquiring as
11433 to whether or not an object is a vtable. */
11435 #define VTABLE_NAME_P(NAME) \
11436 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11437 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11438 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11439 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11442 rs6000_output_symbol_ref (file, x)
11446 /* Currently C++ toc references to vtables can be emitted before it
11447 is decided whether the vtable is public or private. If this is
11448 the case, then the linker will eventually complain that there is
11449 a reference to an unknown section. Thus, for vtables only,
11450 we emit the TOC reference to reference the symbol and not the
11452 const char *name = XSTR (x, 0);
11454 if (VTABLE_NAME_P (name))
11456 RS6000_OUTPUT_BASENAME (file, name);
11459 assemble_name (file, name);
11462 /* Output a TOC entry. We derive the entry name from what is being
11466 output_toc (file, x, labelno, mode)
11470 enum machine_mode mode;
11473 const char *name = buf;
11474 const char *real_name;
11481 /* When the linker won't eliminate them, don't output duplicate
11482 TOC entries (this happens on AIX if there is any kind of TOC,
11483 and on SVR4 under -fPIC or -mrelocatable). */
11486 struct toc_hash_struct *h;
11489 h = ggc_alloc (sizeof (*h));
11491 h->key_mode = mode;
11492 h->labelno = labelno;
11494 found = htab_find_slot (toc_hash_table, h, 1);
11495 if (*found == NULL)
11497 else /* This is indeed a duplicate.
11498 Set this label equal to that label. */
11500 fputs ("\t.set ", file);
11501 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11502 fprintf (file, "%d,", labelno);
11503 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11504 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11510 /* If we're going to put a double constant in the TOC, make sure it's
11511 aligned properly when strict alignment is on. */
11512 if (GET_CODE (x) == CONST_DOUBLE
11513 && STRICT_ALIGNMENT
11514 && GET_MODE_BITSIZE (mode) >= 64
11515 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11516 ASM_OUTPUT_ALIGN (file, 3);
11519 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
11521 /* Handle FP constants specially. Note that if we have a minimal
11522 TOC, things we put here aren't actually in the TOC, so we can allow
11524 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11526 REAL_VALUE_TYPE rv;
11529 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11530 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11534 if (TARGET_MINIMAL_TOC)
11535 fputs (DOUBLE_INT_ASM_OP, file);
11537 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11538 k[0] & 0xffffffff, k[1] & 0xffffffff);
11539 fprintf (file, "0x%lx%08lx\n",
11540 k[0] & 0xffffffff, k[1] & 0xffffffff);
11545 if (TARGET_MINIMAL_TOC)
11546 fputs ("\t.long ", file);
11548 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11549 k[0] & 0xffffffff, k[1] & 0xffffffff);
11550 fprintf (file, "0x%lx,0x%lx\n",
11551 k[0] & 0xffffffff, k[1] & 0xffffffff);
11555 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11557 REAL_VALUE_TYPE rv;
11560 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11561 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11565 if (TARGET_MINIMAL_TOC)
11566 fputs (DOUBLE_INT_ASM_OP, file);
11568 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11569 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11574 if (TARGET_MINIMAL_TOC)
11575 fputs ("\t.long ", file);
11577 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11578 fprintf (file, "0x%lx\n", l & 0xffffffff);
11582 else if (GET_MODE (x) == VOIDmode
11583 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11585 unsigned HOST_WIDE_INT low;
11586 HOST_WIDE_INT high;
11588 if (GET_CODE (x) == CONST_DOUBLE)
11590 low = CONST_DOUBLE_LOW (x);
11591 high = CONST_DOUBLE_HIGH (x);
11594 #if HOST_BITS_PER_WIDE_INT == 32
11597 high = (low & 0x80000000) ? ~0 : 0;
11601 low = INTVAL (x) & 0xffffffff;
11602 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11606 /* TOC entries are always Pmode-sized, but since this
11607 is a bigendian machine then if we're putting smaller
11608 integer constants in the TOC we have to pad them.
11609 (This is still a win over putting the constants in
11610 a separate constant pool, because then we'd have
11611 to have both a TOC entry _and_ the actual constant.)
11613 For a 32-bit target, CONST_INT values are loaded and shifted
11614 entirely within `low' and can be stored in one TOC entry. */
11616 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11617 abort ();/* It would be easy to make this work, but it doesn't now. */
11619 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11620 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11621 POINTER_SIZE, &low, &high, 0);
11625 if (TARGET_MINIMAL_TOC)
11626 fputs (DOUBLE_INT_ASM_OP, file);
11628 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11629 (long) high & 0xffffffff, (long) low & 0xffffffff);
11630 fprintf (file, "0x%lx%08lx\n",
11631 (long) high & 0xffffffff, (long) low & 0xffffffff);
11636 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11638 if (TARGET_MINIMAL_TOC)
11639 fputs ("\t.long ", file);
11641 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11642 (long) high & 0xffffffff, (long) low & 0xffffffff);
11643 fprintf (file, "0x%lx,0x%lx\n",
11644 (long) high & 0xffffffff, (long) low & 0xffffffff);
11648 if (TARGET_MINIMAL_TOC)
11649 fputs ("\t.long ", file);
11651 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11652 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11658 if (GET_CODE (x) == CONST)
11660 if (GET_CODE (XEXP (x, 0)) != PLUS)
11663 base = XEXP (XEXP (x, 0), 0);
11664 offset = INTVAL (XEXP (XEXP (x, 0), 1));
11667 if (GET_CODE (base) == SYMBOL_REF)
11668 name = XSTR (base, 0);
11669 else if (GET_CODE (base) == LABEL_REF)
11670 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
11671 else if (GET_CODE (base) == CODE_LABEL)
11672 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
11676 real_name = (*targetm.strip_name_encoding) (name);
11677 if (TARGET_MINIMAL_TOC)
11678 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11681 fprintf (file, "\t.tc %s", real_name);
11684 fprintf (file, ".N%d", - offset);
11686 fprintf (file, ".P%d", offset);
11688 fputs ("[TC],", file);
11691 /* Currently C++ toc references to vtables can be emitted before it
11692 is decided whether the vtable is public or private. If this is
11693 the case, then the linker will eventually complain that there is
11694 a TOC reference to an unknown section. Thus, for vtables only,
11695 we emit the TOC reference to reference the symbol and not the
11697 if (VTABLE_NAME_P (name))
11699 RS6000_OUTPUT_BASENAME (file, name);
11701 fprintf (file, "%d", offset);
11702 else if (offset > 0)
11703 fprintf (file, "+%d", offset);
11706 output_addr_const (file, x);
11710 /* Output an assembler pseudo-op to write an ASCII string of N characters
11711 starting at P to FILE.
11713 On the RS/6000, we have to do this using the .byte operation and
11714 write out special characters outside the quoted string.
11715 Also, the assembler is broken; very long strings are truncated,
11716 so we must artificially break them up early. */
11719 output_ascii (file, p, n)
11725 int i, count_string;
11726 const char *for_string = "\t.byte \"";
11727 const char *for_decimal = "\t.byte ";
11728 const char *to_close = NULL;
11731 for (i = 0; i < n; i++)
11734 if (c >= ' ' && c < 0177)
11737 fputs (for_string, file);
11740 /* Write two quotes to get one. */
11748 for_decimal = "\"\n\t.byte ";
11752 if (count_string >= 512)
11754 fputs (to_close, file);
11756 for_string = "\t.byte \"";
11757 for_decimal = "\t.byte ";
11765 fputs (for_decimal, file);
11766 fprintf (file, "%d", c);
11768 for_string = "\n\t.byte \"";
11769 for_decimal = ", ";
11775 /* Now close the string if we have written one. Then end the line. */
11777 fputs (to_close, file);
11780 /* Generate a unique section name for FILENAME for a section type
11781 represented by SECTION_DESC. Output goes into BUF.
11783 SECTION_DESC can be any string, as long as it is different for each
11784 possible section type.
11786 We name the section in the same manner as xlc. The name begins with an
11787 underscore followed by the filename (after stripping any leading directory
11788 names) with the last period replaced by the string SECTION_DESC. If
11789 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11793 rs6000_gen_section_name (buf, filename, section_desc)
11795 const char *filename;
11796 const char *section_desc;
11798 const char *q, *after_last_slash, *last_period = 0;
11802 after_last_slash = filename;
11803 for (q = filename; *q; q++)
11806 after_last_slash = q + 1;
11807 else if (*q == '.')
11811 len = strlen (after_last_slash) + strlen (section_desc) + 2;
11812 *buf = (char *) permalloc (len);
11817 for (q = after_last_slash; *q; q++)
11819 if (q == last_period)
11821 strcpy (p, section_desc);
11822 p += strlen (section_desc);
11825 else if (ISALNUM (*q))
11829 if (last_period == 0)
11830 strcpy (p, section_desc);
11835 /* Emit profile function. */
11838 output_profile_hook (labelno)
11841 if (DEFAULT_ABI == ABI_AIX)
11844 const char *label_name;
11847 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
11848 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
11849 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
11851 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
11854 else if (DEFAULT_ABI == ABI_DARWIN)
11856 const char *mcount_name = RS6000_MCOUNT;
11857 int caller_addr_regno = LINK_REGISTER_REGNUM;
11859 /* Be conservative and always set this, at least for now. */
11860 current_function_uses_pic_offset_table = 1;
11863 /* For PIC code, set up a stub and collect the caller's address
11864 from r0, which is where the prologue puts it. */
11867 mcount_name = machopic_stub_name (mcount_name);
11868 if (current_function_uses_pic_offset_table)
11869 caller_addr_regno = 0;
11872 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
11874 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
11878 /* Write function profiler code. */
11881 output_function_profiler (file, labelno)
11888 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
11889 switch (DEFAULT_ABI)
11896 /* Fall through. */
11898 case ABI_AIX_NODESC:
11901 warning ("no profiling of 64-bit code for this ABI");
11904 fprintf (file, "\tmflr %s\n", reg_names[0]);
11907 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
11908 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
11909 reg_names[0], save_lr, reg_names[1]);
11910 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
11911 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
11912 assemble_name (file, buf);
11913 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
11915 else if (flag_pic > 1)
11917 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
11918 reg_names[0], save_lr, reg_names[1]);
11919 /* Now, we need to get the address of the label. */
11920 fputs ("\tbl 1f\n\t.long ", file);
11921 assemble_name (file, buf);
11922 fputs ("-.\n1:", file);
11923 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
11924 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
11925 reg_names[0], reg_names[11]);
11926 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
11927 reg_names[0], reg_names[0], reg_names[11]);
11931 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
11932 assemble_name (file, buf);
11933 fputs ("@ha\n", file);
11934 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
11935 reg_names[0], save_lr, reg_names[1]);
11936 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
11937 assemble_name (file, buf);
11938 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
11941 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
11943 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
11944 reg_names[STATIC_CHAIN_REGNUM],
11946 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
11947 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
11948 reg_names[STATIC_CHAIN_REGNUM],
11952 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
11953 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
11958 /* Don't do anything, done in output_profile_hook (). */
11963 /* Adjust the cost of a scheduling dependency. Return the new cost of
11964 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
11967 rs6000_adjust_cost (insn, link, dep_insn, cost)
11970 rtx dep_insn ATTRIBUTE_UNUSED;
11973 if (! recog_memoized (insn))
11976 if (REG_NOTE_KIND (link) != 0)
11979 if (REG_NOTE_KIND (link) == 0)
11981 /* Data dependency; DEP_INSN writes a register that INSN reads
11982 some cycles later. */
11983 switch (get_attr_type (insn))
11986 /* Tell the first scheduling pass about the latency between
11987 a mtctr and bctr (and mtlr and br/blr). The first
11988 scheduling pass will not know about this latency since
11989 the mtctr instruction, which has the latency associated
11990 to it, will be generated by reload. */
11991 return TARGET_POWER ? 5 : 4;
11993 /* Leave some extra cycles between a compare and its
11994 dependent branch, to inhibit expensive mispredicts. */
11995 if ((rs6000_cpu_attr == CPU_PPC603
11996 || rs6000_cpu_attr == CPU_PPC604
11997 || rs6000_cpu_attr == CPU_PPC604E
11998 || rs6000_cpu_attr == CPU_PPC620
11999 || rs6000_cpu_attr == CPU_PPC630
12000 || rs6000_cpu_attr == CPU_PPC750
12001 || rs6000_cpu_attr == CPU_PPC7400
12002 || rs6000_cpu_attr == CPU_PPC7450
12003 || rs6000_cpu_attr == CPU_POWER4)
12004 && recog_memoized (dep_insn)
12005 && (INSN_CODE (dep_insn) >= 0)
12006 && (get_attr_type (dep_insn) == TYPE_COMPARE
12007 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12008 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12009 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
12014 /* Fall out to return default cost. */
12020 /* A C statement (sans semicolon) to update the integer scheduling
12021 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12022 INSN earlier, increase the priority to execute INSN later. Do not
12023 define this macro if you do not need to adjust the scheduling
12024 priorities of insns. */
12027 rs6000_adjust_priority (insn, priority)
12028 rtx insn ATTRIBUTE_UNUSED;
12031 /* On machines (like the 750) which have asymmetric integer units,
12032 where one integer unit can do multiply and divides and the other
12033 can't, reduce the priority of multiply/divide so it is scheduled
12034 before other integer operations. */
12037 if (! INSN_P (insn))
12040 if (GET_CODE (PATTERN (insn)) == USE)
12043 switch (rs6000_cpu_attr) {
12045 switch (get_attr_type (insn))
12052 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12053 priority, priority);
12054 if (priority >= 0 && priority < 0x01000000)
12064 /* Return how many instructions the machine can issue per cycle. */
12067 rs6000_issue_rate ()
12069 switch (rs6000_cpu_attr) {
12070 case CPU_RIOS1: /* ? */
12072 case CPU_PPC601: /* ? */
12092 /* Length in units of the trampoline for entering a nested function. */
12095 rs6000_trampoline_size ()
12099 switch (DEFAULT_ABI)
12105 ret = (TARGET_32BIT) ? 12 : 24;
12110 case ABI_AIX_NODESC:
12111 ret = (TARGET_32BIT) ? 40 : 48;
12118 /* Emit RTL insns to initialize the variable parts of a trampoline.
12119 FNADDR is an RTX for the address of the function's pure code.
12120 CXT is an RTX for the static chain value for the function. */
12123 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12128 enum machine_mode pmode = Pmode;
12129 int regsize = (TARGET_32BIT) ? 4 : 8;
12130 rtx ctx_reg = force_reg (pmode, cxt);
12132 switch (DEFAULT_ABI)
12137 /* Macros to shorten the code expansions below. */
12138 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12139 #define MEM_PLUS(addr,offset) \
12140 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12142 /* Under AIX, just build the 3 word function descriptor */
12145 rtx fn_reg = gen_reg_rtx (pmode);
12146 rtx toc_reg = gen_reg_rtx (pmode);
12147 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12148 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12149 emit_move_insn (MEM_DEREF (addr), fn_reg);
12150 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12151 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12155 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12158 case ABI_AIX_NODESC:
12159 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12160 FALSE, VOIDmode, 4,
12162 GEN_INT (rs6000_trampoline_size ()), SImode,
12172 /* Table of valid machine attributes. */
12174 const struct attribute_spec rs6000_attribute_table[] =
12176 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12177 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12178 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12179 { NULL, 0, 0, false, false, false, NULL }
12182 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12183 struct attribute_spec.handler. */
12186 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12189 tree args ATTRIBUTE_UNUSED;
12190 int flags ATTRIBUTE_UNUSED;
12191 bool *no_add_attrs;
12193 if (TREE_CODE (*node) != FUNCTION_TYPE
12194 && TREE_CODE (*node) != FIELD_DECL
12195 && TREE_CODE (*node) != TYPE_DECL)
12197 warning ("`%s' attribute only applies to functions",
12198 IDENTIFIER_POINTER (name));
12199 *no_add_attrs = true;
12205 /* Set longcall attributes on all functions declared when
12206 rs6000_default_long_calls is true. */
12208 rs6000_set_default_type_attributes (type)
12211 if (rs6000_default_long_calls
12212 && (TREE_CODE (type) == FUNCTION_TYPE
12213 || TREE_CODE (type) == METHOD_TYPE))
12214 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12216 TYPE_ATTRIBUTES (type));
12219 /* Return a reference suitable for calling a function with the
12220 longcall attribute. */
12223 rs6000_longcall_ref (call_ref)
12226 const char *call_name;
12229 if (GET_CODE (call_ref) != SYMBOL_REF)
12232 /* System V adds '.' to the internal name, so skip them. */
12233 call_name = XSTR (call_ref, 0);
12234 if (*call_name == '.')
12236 while (*call_name == '.')
12239 node = get_identifier (call_name);
12240 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12243 return force_reg (Pmode, call_ref);
12247 #ifdef USING_ELFOS_H
12249 /* A C statement or statements to switch to the appropriate section
12250 for output of RTX in mode MODE. You can assume that RTX is some
12251 kind of constant in RTL. The argument MODE is redundant except in
12252 the case of a `const_int' rtx. Select the section by calling
12253 `text_section' or one of the alternatives for other sections.
12255 Do not define this macro if you put all constants in the read-only
12259 rs6000_elf_select_rtx_section (mode, x, align)
12260 enum machine_mode mode;
12262 unsigned HOST_WIDE_INT align;
12264 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12267 default_elf_select_rtx_section (mode, x, align);
12270 /* A C statement or statements to switch to the appropriate
12271 section for output of DECL. DECL is either a `VAR_DECL' node
12272 or a constant of some sort. RELOC indicates whether forming
12273 the initial value of DECL requires link-time relocations. */
12276 rs6000_elf_select_section (decl, reloc, align)
12279 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
12281 int size = int_size_in_bytes (TREE_TYPE (decl));
12284 static void (* const sec_funcs[4]) PARAMS ((void)) = {
12285 &readonly_data_section,
12291 needs_sdata = (size > 0
12292 && size <= g_switch_value
12293 && rs6000_sdata != SDATA_NONE
12294 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
12296 if (TREE_CODE (decl) == STRING_CST)
12297 readonly = ! flag_writable_strings;
12298 else if (TREE_CODE (decl) == VAR_DECL)
12299 readonly = (! (flag_pic && reloc)
12300 && TREE_READONLY (decl)
12301 && ! TREE_SIDE_EFFECTS (decl)
12302 && DECL_INITIAL (decl)
12303 && DECL_INITIAL (decl) != error_mark_node
12304 && TREE_CONSTANT (DECL_INITIAL (decl)));
12305 else if (TREE_CODE (decl) == CONSTRUCTOR)
12306 readonly = (! (flag_pic && reloc)
12307 && ! TREE_SIDE_EFFECTS (decl)
12308 && TREE_CONSTANT (decl));
12311 if (needs_sdata && rs6000_sdata != SDATA_EABI)
12314 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
12317 /* A C statement to build up a unique section name, expressed as a
12318 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12319 RELOC indicates whether the initial value of EXP requires
12320 link-time relocations. If you do not define this macro, GCC will use
12321 the symbol name prefixed by `.' as the section name. Note - this
12322 macro can now be called for uninitialized data items as well as
12323 initialised data and functions. */
12326 rs6000_elf_unique_section (decl, reloc)
12334 const char *prefix;
12336 static const char *const prefixes[7][2] =
12338 { ".rodata.", ".gnu.linkonce.r." },
12339 { ".sdata2.", ".gnu.linkonce.s2." },
12340 { ".data.", ".gnu.linkonce.d." },
12341 { ".sdata.", ".gnu.linkonce.s." },
12342 { ".bss.", ".gnu.linkonce.b." },
12343 { ".sbss.", ".gnu.linkonce.sb." },
12344 { ".text.", ".gnu.linkonce.t." }
12347 if (TREE_CODE (decl) == FUNCTION_DECL)
12356 if (TREE_CODE (decl) == STRING_CST)
12357 readonly = ! flag_writable_strings;
12358 else if (TREE_CODE (decl) == VAR_DECL)
12359 readonly = (! (flag_pic && reloc)
12360 && TREE_READONLY (decl)
12361 && ! TREE_SIDE_EFFECTS (decl)
12362 && TREE_CONSTANT (DECL_INITIAL (decl)));
12364 size = int_size_in_bytes (TREE_TYPE (decl));
12365 needs_sdata = (size > 0
12366 && size <= g_switch_value
12367 && rs6000_sdata != SDATA_NONE
12368 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
12370 if (DECL_INITIAL (decl) == 0
12371 || DECL_INITIAL (decl) == error_mark_node)
12373 else if (! readonly)
12380 /* .sdata2 is only for EABI. */
12381 if (sec == 0 && rs6000_sdata != SDATA_EABI)
12387 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
12388 name = (*targetm.strip_name_encoding) (name);
12389 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
12390 len = strlen (name) + strlen (prefix);
12391 string = alloca (len + 1);
12393 sprintf (string, "%s%s", prefix, name);
12395 DECL_SECTION_NAME (decl) = build_string (len, string);
12399 /* If we are referencing a function that is static or is known to be
12400 in this file, make the SYMBOL_REF special. We can use this to indicate
12401 that we can branch to this function without emitting a no-op after the
12402 call. For real AIX calling sequences, we also replace the
12403 function name with the real name (1 or 2 leading .'s), rather than
12404 the function descriptor name. This saves a lot of overriding code
12405 to read the prefixes. */
12408 rs6000_elf_encode_section_info (decl, first)
12415 if (TREE_CODE (decl) == FUNCTION_DECL)
12417 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12418 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
12419 && ! DECL_WEAK (decl))
12420 SYMBOL_REF_FLAG (sym_ref) = 1;
12422 if (DEFAULT_ABI == ABI_AIX)
12424 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12425 size_t len2 = strlen (XSTR (sym_ref, 0));
12426 char *str = alloca (len1 + len2 + 1);
12429 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12431 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12434 else if (rs6000_sdata != SDATA_NONE
12435 && DEFAULT_ABI == ABI_V4
12436 && TREE_CODE (decl) == VAR_DECL)
12438 int size = int_size_in_bytes (TREE_TYPE (decl));
12439 tree section_name = DECL_SECTION_NAME (decl);
12440 const char *name = (char *)0;
12445 if (TREE_CODE (section_name) == STRING_CST)
12447 name = TREE_STRING_POINTER (section_name);
12448 len = TREE_STRING_LENGTH (section_name);
12454 if ((size > 0 && size <= g_switch_value)
12456 && ((len == sizeof (".sdata") - 1
12457 && strcmp (name, ".sdata") == 0)
12458 || (len == sizeof (".sdata2") - 1
12459 && strcmp (name, ".sdata2") == 0)
12460 || (len == sizeof (".sbss") - 1
12461 && strcmp (name, ".sbss") == 0)
12462 || (len == sizeof (".sbss2") - 1
12463 && strcmp (name, ".sbss2") == 0)
12464 || (len == sizeof (".PPC.EMB.sdata0") - 1
12465 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12466 || (len == sizeof (".PPC.EMB.sbss0") - 1
12467 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
12469 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12470 size_t len = strlen (XSTR (sym_ref, 0));
12471 char *str = alloca (len + 2);
12474 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12475 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12480 static const char *
12481 rs6000_elf_strip_name_encoding (str)
12484 while (*str == '*' || *str == '@')
12489 #endif /* USING_ELFOS_H */
12492 /* Return a REG that occurs in ADDR with coefficient 1.
12493 ADDR can be effectively incremented by incrementing REG.
12495 r0 is special and we must not select it as an address
12496 register by this routine since our caller will try to
12497 increment the returned register via an "la" instruction. */
12500 find_addr_reg (addr)
12503 while (GET_CODE (addr) == PLUS)
12505 if (GET_CODE (XEXP (addr, 0)) == REG
12506 && REGNO (XEXP (addr, 0)) != 0)
12507 addr = XEXP (addr, 0);
12508 else if (GET_CODE (XEXP (addr, 1)) == REG
12509 && REGNO (XEXP (addr, 1)) != 0)
12510 addr = XEXP (addr, 1);
12511 else if (CONSTANT_P (XEXP (addr, 0)))
12512 addr = XEXP (addr, 1);
12513 else if (CONSTANT_P (XEXP (addr, 1)))
12514 addr = XEXP (addr, 0);
12518 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12524 rs6000_fatal_bad_address (op)
12527 fatal_insn ("bad address", op);
12530 /* Called to register all of our global variables with the garbage
12534 rs6000_add_gc_roots ()
12536 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
12537 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
12538 toc_hash_mark_table);
12544 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12545 reference and a constant. */
12548 symbolic_operand (op)
12551 switch (GET_CODE (op))
12558 return (GET_CODE (op) == SYMBOL_REF ||
12559 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12560 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12561 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12568 #ifdef RS6000_LONG_BRANCH
12570 static tree stub_list = 0;
12572 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12573 procedure calls to the linked list. */
12576 add_compiler_stub (label_name, function_name, line_number)
12578 tree function_name;
12581 tree stub = build_tree_list (function_name, label_name);
12582 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12583 TREE_CHAIN (stub) = stub_list;
12587 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12588 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12589 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12591 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12592 handling procedure calls from the linked list and initializes the
12596 output_compiler_stub ()
12599 char label_buf[256];
12601 tree tmp_stub, stub;
12604 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12606 fprintf (asm_out_file,
12607 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12609 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12610 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12611 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12612 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12614 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12616 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12619 label_buf[0] = '_';
12620 strcpy (label_buf+1,
12621 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12624 strcpy (tmp_buf, "lis r12,hi16(");
12625 strcat (tmp_buf, label_buf);
12626 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12627 strcat (tmp_buf, label_buf);
12628 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12629 output_asm_insn (tmp_buf, 0);
12631 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12632 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12633 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12634 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12640 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12641 already there or not. */
12644 no_previous_def (function_name)
12645 tree function_name;
12648 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12649 if (function_name == STUB_FUNCTION_NAME (stub))
12654 /* GET_PREV_LABEL gets the label name from the previous definition of
12658 get_prev_label (function_name)
12659 tree function_name;
12662 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12663 if (function_name == STUB_FUNCTION_NAME (stub))
12664 return STUB_LABEL_NAME (stub);
12668 /* INSN is either a function call or a millicode call. It may have an
12669 unconditional jump in its delay slot.
12671 CALL_DEST is the routine we are calling. */
12674 output_call (insn, call_dest, operand_number)
12677 int operand_number;
12679 static char buf[256];
12680 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12683 tree funname = get_identifier (XSTR (call_dest, 0));
12685 if (no_previous_def (funname))
12688 rtx label_rtx = gen_label_rtx ();
12689 char *label_buf, temp_buf[256];
12690 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12691 CODE_LABEL_NUMBER (label_rtx));
12692 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12693 labelname = get_identifier (label_buf);
12694 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12696 line_number = NOTE_LINE_NUMBER (insn);
12697 add_compiler_stub (labelname, funname, line_number);
12700 labelname = get_prev_label (funname);
12702 sprintf (buf, "jbsr %%z%d,%.246s",
12703 operand_number, IDENTIFIER_POINTER (labelname));
12708 sprintf (buf, "bl %%z%d", operand_number);
12713 #endif /* RS6000_LONG_BRANCH */
12715 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12717 const char *const symbol_ = (SYMBOL); \
12718 char *buffer_ = (BUF); \
12719 if (symbol_[0] == '"') \
12721 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12723 else if (name_needs_quotes(symbol_)) \
12725 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12729 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12734 /* Generate PIC and indirect symbol stubs. */
12737 machopic_output_stub (file, symb, stub)
12739 const char *symb, *stub;
12741 unsigned int length;
12742 char *symbol_name, *lazy_ptr_name;
12743 char *local_label_0;
12744 static int label = 0;
12746 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12747 symb = (*targetm.strip_name_encoding) (symb);
12751 length = strlen (symb);
12752 symbol_name = alloca (length + 32);
12753 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
12755 lazy_ptr_name = alloca (length + 32);
12756 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
12758 local_label_0 = alloca (length + 32);
12759 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
12762 machopic_picsymbol_stub_section ();
12764 machopic_symbol_stub_section ();
12766 fprintf (file, "%s:\n", stub);
12767 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12771 fprintf (file, "\tmflr r0\n");
12772 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
12773 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
12774 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
12775 lazy_ptr_name, local_label_0);
12776 fprintf (file, "\tmtlr r0\n");
12777 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
12778 lazy_ptr_name, local_label_0);
12779 fprintf (file, "\tmtctr r12\n");
12780 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
12781 lazy_ptr_name, local_label_0);
12782 fprintf (file, "\tbctr\n");
12785 fprintf (file, "non-pure not supported\n");
12787 machopic_lazy_symbol_ptr_section ();
12788 fprintf (file, "%s:\n", lazy_ptr_name);
12789 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12790 fprintf (file, "\t.long dyld_stub_binding_helper\n");
12793 /* Legitimize PIC addresses. If the address is already
12794 position-independent, we return ORIG. Newly generated
12795 position-independent addresses go into a reg. This is REG if non
12796 zero, otherwise we allocate register(s) as necessary. */
12798 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12801 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
12803 enum machine_mode mode;
12808 if (reg == NULL && ! reload_in_progress && ! reload_completed)
12809 reg = gen_reg_rtx (Pmode);
12811 if (GET_CODE (orig) == CONST)
12813 if (GET_CODE (XEXP (orig, 0)) == PLUS
12814 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
12817 if (GET_CODE (XEXP (orig, 0)) == PLUS)
12820 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
12823 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
12829 if (GET_CODE (offset) == CONST_INT)
12831 if (SMALL_INT (offset))
12832 return plus_constant (base, INTVAL (offset));
12833 else if (! reload_in_progress && ! reload_completed)
12834 offset = force_reg (Pmode, offset);
12837 rtx mem = force_const_mem (Pmode, orig);
12838 return machopic_legitimize_pic_address (mem, Pmode, reg);
12841 return gen_rtx (PLUS, Pmode, base, offset);
12844 /* Fall back on generic machopic code. */
12845 return machopic_legitimize_pic_address (orig, mode, reg);
12848 /* This is just a placeholder to make linking work without having to
12849 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
12850 ever needed for Darwin (not too likely!) this would have to get a
12851 real definition. */
12858 #endif /* TARGET_MACHO */
12861 static unsigned int
12862 rs6000_elf_section_type_flags (decl, name, reloc)
12867 unsigned int flags = default_section_type_flags (decl, name, reloc);
12869 if (TARGET_RELOCATABLE)
12870 flags |= SECTION_WRITE;
12875 /* Record an element in the table of global constructors. SYMBOL is
12876 a SYMBOL_REF of the function to be called; PRIORITY is a number
12877 between 0 and MAX_INIT_PRIORITY.
12879 This differs from default_named_section_asm_out_constructor in
12880 that we have special handling for -mrelocatable. */
12883 rs6000_elf_asm_out_constructor (symbol, priority)
12887 const char *section = ".ctors";
12890 if (priority != DEFAULT_INIT_PRIORITY)
12892 sprintf (buf, ".ctors.%.5u",
12893 /* Invert the numbering so the linker puts us in the proper
12894 order; constructors are run from right to left, and the
12895 linker sorts in increasing order. */
12896 MAX_INIT_PRIORITY - priority);
12900 named_section_flags (section, SECTION_WRITE);
12901 assemble_align (POINTER_SIZE);
12903 if (TARGET_RELOCATABLE)
12905 fputs ("\t.long (", asm_out_file);
12906 output_addr_const (asm_out_file, symbol);
12907 fputs (")@fixup\n", asm_out_file);
12910 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
12914 rs6000_elf_asm_out_destructor (symbol, priority)
12918 const char *section = ".dtors";
12921 if (priority != DEFAULT_INIT_PRIORITY)
12923 sprintf (buf, ".dtors.%.5u",
12924 /* Invert the numbering so the linker puts us in the proper
12925 order; constructors are run from right to left, and the
12926 linker sorts in increasing order. */
12927 MAX_INIT_PRIORITY - priority);
12931 named_section_flags (section, SECTION_WRITE);
12932 assemble_align (POINTER_SIZE);
12934 if (TARGET_RELOCATABLE)
12936 fputs ("\t.long (", asm_out_file);
12937 output_addr_const (asm_out_file, symbol);
12938 fputs (")@fixup\n", asm_out_file);
12941 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
12947 xcoff_asm_named_section (name, flags)
12949 unsigned int flags ATTRIBUTE_UNUSED;
12951 fprintf (asm_out_file, "\t.csect %s\n", name);
12955 rs6000_xcoff_select_section (exp, reloc, align)
12958 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
12960 if ((TREE_CODE (exp) == STRING_CST
12961 && ! flag_writable_strings)
12962 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
12963 && TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)
12964 && DECL_INITIAL (exp)
12965 && (DECL_INITIAL (exp) == error_mark_node
12966 || TREE_CONSTANT (DECL_INITIAL (exp)))
12969 if (TREE_PUBLIC (exp))
12970 read_only_data_section ();
12972 read_only_private_data_section ();
12976 if (TREE_PUBLIC (exp))
12979 private_data_section ();
12984 rs6000_xcoff_unique_section (decl, reloc)
12986 int reloc ATTRIBUTE_UNUSED;
12992 if (TREE_CODE (decl) == FUNCTION_DECL)
12994 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
12995 len = strlen (name) + 5;
12996 string = alloca (len + 1);
12997 sprintf (string, ".%s[PR]", name);
12998 DECL_SECTION_NAME (decl) = build_string (len, string);
13002 /* Select section for constant in constant pool.
13004 On RS/6000, all constants are in the private read-only data area.
13005 However, if this is being placed in the TOC it must be output as a
13009 rs6000_xcoff_select_rtx_section (mode, x, align)
13010 enum machine_mode mode;
13012 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13014 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13017 read_only_private_data_section ();
13020 /* Remove any trailing [DS] or the like from the symbol name. */
13022 static const char *
13023 rs6000_xcoff_strip_name_encoding (name)
13029 len = strlen (name);
13030 if (name[len - 1] == ']')
13031 return ggc_alloc_string (name, len - 4);
13036 #endif /* TARGET_XCOFF */
13038 /* Note that this is also used for ELF64. */
13041 rs6000_xcoff_encode_section_info (decl, first)
13043 int first ATTRIBUTE_UNUSED;
13045 if (TREE_CODE (decl) == FUNCTION_DECL
13046 && (TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
13047 && ! DECL_WEAK (decl))
13048 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;