1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
83 /* Nonzero if we want SPE ABI extensions. */
86 /* Whether isel instructions should be generated. */
89 /* Nonzero if we have FPRs. */
92 /* String from -misel=. */
93 const char *rs6000_isel_string;
95 /* Set to non-zero once AIX common-mode calls have been defined. */
96 static int common_mode_defined;
98 /* Save information from a "cmpxx" operation until the branch or scc is
100 rtx rs6000_compare_op0, rs6000_compare_op1;
101 int rs6000_compare_fp_p;
103 /* Label number of label created for -mrelocatable, to call to so we can
104 get the address of the GOT section */
105 int rs6000_pic_labelno;
108 /* Which abi to adhere to */
109 const char *rs6000_abi_name = RS6000_ABI_NAME;
111 /* Semantics of the small data area */
112 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
114 /* Which small data model to use */
115 const char *rs6000_sdata_name = (char *)0;
117 /* Counter for labels which are to be placed in .fixup. */
118 int fixuplabelno = 0;
121 /* ABI enumeration available for subtarget to use. */
122 enum rs6000_abi rs6000_current_abi;
124 /* ABI string from -mabi= option. */
125 const char *rs6000_abi_string;
128 const char *rs6000_debug_name;
129 int rs6000_debug_stack; /* debug stack applications */
130 int rs6000_debug_arg; /* debug argument handling */
132 const char *rs6000_traceback_name;
134 traceback_default = 0,
140 /* Flag to say the TOC is initialized */
142 char toc_label_name[10];
144 /* Alias set for saves and restores from the rs6000 stack. */
145 static int rs6000_sr_alias_set;
147 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
148 The only place that looks at this is rs6000_set_default_type_attributes;
149 everywhere else should rely on the presence or absence of a longcall
150 attribute on the function declaration. */
151 int rs6000_default_long_calls;
152 const char *rs6000_longcall_switch;
154 struct builtin_description
156 /* mask is not const because we're going to alter it below. This
157 nonsense will go away when we rewrite the -march infrastructure
158 to give us more target flag bits. */
160 const enum insn_code icode;
161 const char *const name;
162 const enum rs6000_builtins code;
165 static void rs6000_add_gc_roots PARAMS ((void));
166 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
167 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
168 static void validate_condition_mode
169 PARAMS ((enum rtx_code, enum machine_mode));
170 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
171 static void rs6000_maybe_dead PARAMS ((rtx));
172 static void rs6000_emit_stack_tie PARAMS ((void));
173 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
174 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
175 unsigned int, int, int));
176 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
177 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
178 static unsigned rs6000_hash_constant PARAMS ((rtx));
179 static unsigned toc_hash_function PARAMS ((const void *));
180 static int toc_hash_eq PARAMS ((const void *, const void *));
181 static int toc_hash_mark_entry PARAMS ((void **, void *));
182 static void toc_hash_mark_table PARAMS ((void *));
183 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
184 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
185 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
186 static int rs6000_ra_ever_killed PARAMS ((void));
187 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
188 const struct attribute_spec rs6000_attribute_table[];
189 static void rs6000_set_default_type_attributes PARAMS ((tree));
190 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
191 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
192 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
193 HOST_WIDE_INT, HOST_WIDE_INT));
195 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
197 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
198 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
199 static void rs6000_elf_select_section PARAMS ((tree, int,
200 unsigned HOST_WIDE_INT));
201 static void rs6000_elf_unique_section PARAMS ((tree, int));
202 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
203 unsigned HOST_WIDE_INT));
204 static void rs6000_elf_encode_section_info PARAMS ((tree, int));
205 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
208 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
209 static void rs6000_xcoff_select_section PARAMS ((tree, int,
210 unsigned HOST_WIDE_INT));
211 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
212 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
213 unsigned HOST_WIDE_INT));
214 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
216 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
218 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
219 static int rs6000_adjust_priority PARAMS ((rtx, int));
220 static int rs6000_issue_rate PARAMS ((void));
222 static void rs6000_init_builtins PARAMS ((void));
223 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
224 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
225 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
226 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
227 static void altivec_init_builtins PARAMS ((void));
228 static void rs6000_common_init_builtins PARAMS ((void));
230 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
231 int, enum rs6000_builtins,
232 enum rs6000_builtins));
233 static void spe_init_builtins PARAMS ((void));
234 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
235 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
236 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
237 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
239 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
240 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
241 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
242 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
243 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
244 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
245 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
246 static void rs6000_parse_abi_options PARAMS ((void));
247 static void rs6000_parse_vrsave_option PARAMS ((void));
248 static void rs6000_parse_isel_option PARAMS ((void));
249 static int first_altivec_reg_to_save PARAMS ((void));
250 static unsigned int compute_vrsave_mask PARAMS ((void));
251 static void is_altivec_return_reg PARAMS ((rtx, void *));
252 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
253 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
254 static int easy_vector_constant PARAMS ((rtx));
256 /* Default register names. */
257 char rs6000_reg_names[][8] =
259 "0", "1", "2", "3", "4", "5", "6", "7",
260 "8", "9", "10", "11", "12", "13", "14", "15",
261 "16", "17", "18", "19", "20", "21", "22", "23",
262 "24", "25", "26", "27", "28", "29", "30", "31",
263 "0", "1", "2", "3", "4", "5", "6", "7",
264 "8", "9", "10", "11", "12", "13", "14", "15",
265 "16", "17", "18", "19", "20", "21", "22", "23",
266 "24", "25", "26", "27", "28", "29", "30", "31",
267 "mq", "lr", "ctr","ap",
268 "0", "1", "2", "3", "4", "5", "6", "7",
270 /* AltiVec registers. */
271 "0", "1", "2", "3", "4", "5", "6", "7",
272 "8", "9", "10", "11", "12", "13", "14", "15",
273 "16", "17", "18", "19", "20", "21", "22", "23",
274 "24", "25", "26", "27", "28", "29", "30", "31",
278 #ifdef TARGET_REGNAMES
279 static const char alt_reg_names[][8] =
281 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
282 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
283 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
284 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
285 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
286 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
287 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
288 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
289 "mq", "lr", "ctr", "ap",
290 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
292 /* AltiVec registers. */
293 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
294 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
295 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
296 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
301 #ifndef MASK_STRICT_ALIGN
302 #define MASK_STRICT_ALIGN 0
305 /* Initialize the GCC target structure. */
306 #undef TARGET_ATTRIBUTE_TABLE
307 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
308 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
309 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
311 #undef TARGET_ASM_ALIGNED_DI_OP
312 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
314 /* Default unaligned ops are only provided for ELF. Find the ops needed
315 for non-ELF systems. */
316 #ifndef OBJECT_FORMAT_ELF
318 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
320 #undef TARGET_ASM_UNALIGNED_HI_OP
321 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
322 #undef TARGET_ASM_UNALIGNED_SI_OP
323 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
324 #undef TARGET_ASM_UNALIGNED_DI_OP
325 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
328 #undef TARGET_ASM_UNALIGNED_HI_OP
329 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
330 #undef TARGET_ASM_UNALIGNED_SI_OP
331 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
335 /* This hook deals with fixups for relocatable code and DI-mode objects
337 #undef TARGET_ASM_INTEGER
338 #define TARGET_ASM_INTEGER rs6000_assemble_integer
340 #undef TARGET_ASM_FUNCTION_PROLOGUE
341 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
342 #undef TARGET_ASM_FUNCTION_EPILOGUE
343 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
346 #undef TARGET_SECTION_TYPE_FLAGS
347 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
350 #undef TARGET_SCHED_ISSUE_RATE
351 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
352 #undef TARGET_SCHED_ADJUST_COST
353 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
354 #undef TARGET_SCHED_ADJUST_PRIORITY
355 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
357 #undef TARGET_INIT_BUILTINS
358 #define TARGET_INIT_BUILTINS rs6000_init_builtins
360 #undef TARGET_EXPAND_BUILTIN
361 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
363 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
364 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
366 struct gcc_target targetm = TARGET_INITIALIZER;
368 /* Override command line options. Mostly we process the processor
369 type and sometimes adjust other TARGET_ options. */
372 rs6000_override_options (default_cpu)
373 const char *default_cpu;
376 struct rs6000_cpu_select *ptr;
378 /* Simplify the entries below by making a mask for any POWER
379 variant and any PowerPC variant. */
381 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
382 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
383 | MASK_PPC_GFXOPT | MASK_POWERPC64)
384 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
388 const char *const name; /* Canonical processor name. */
389 const enum processor_type processor; /* Processor type enum value. */
390 const int target_enable; /* Target flags to enable. */
391 const int target_disable; /* Target flags to disable. */
392 } const processor_target_table[]
393 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
394 POWER_MASKS | POWERPC_MASKS},
395 {"power", PROCESSOR_POWER,
396 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
397 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
398 {"power2", PROCESSOR_POWER,
399 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
400 POWERPC_MASKS | MASK_NEW_MNEMONICS},
401 {"power3", PROCESSOR_PPC630,
402 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
403 POWER_MASKS | MASK_PPC_GPOPT},
404 {"power4", PROCESSOR_POWER4,
405 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
406 POWER_MASKS | MASK_PPC_GPOPT},
407 {"powerpc", PROCESSOR_POWERPC,
408 MASK_POWERPC | MASK_NEW_MNEMONICS,
409 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
410 {"powerpc64", PROCESSOR_POWERPC64,
411 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
412 POWER_MASKS | POWERPC_OPT_MASKS},
413 {"rios", PROCESSOR_RIOS1,
414 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
415 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
416 {"rios1", PROCESSOR_RIOS1,
417 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
418 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
419 {"rsc", PROCESSOR_PPC601,
420 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
421 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
422 {"rsc1", PROCESSOR_PPC601,
423 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
424 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
425 {"rios2", PROCESSOR_RIOS2,
426 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
427 POWERPC_MASKS | MASK_NEW_MNEMONICS},
428 {"rs64a", PROCESSOR_RS64A,
429 MASK_POWERPC | MASK_NEW_MNEMONICS,
430 POWER_MASKS | POWERPC_OPT_MASKS},
431 {"401", PROCESSOR_PPC403,
432 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
433 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
434 {"403", PROCESSOR_PPC403,
435 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
436 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
437 {"405", PROCESSOR_PPC405,
438 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
439 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
440 {"505", PROCESSOR_MPCCORE,
441 MASK_POWERPC | MASK_NEW_MNEMONICS,
442 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
443 {"601", PROCESSOR_PPC601,
444 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
445 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
446 {"602", PROCESSOR_PPC603,
447 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
448 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
449 {"603", PROCESSOR_PPC603,
450 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
451 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
452 {"603e", PROCESSOR_PPC603,
453 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
454 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
455 {"ec603e", PROCESSOR_PPC603,
456 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
457 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
458 {"604", PROCESSOR_PPC604,
459 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
460 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
461 {"604e", PROCESSOR_PPC604e,
462 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
463 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
464 {"620", PROCESSOR_PPC620,
465 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
466 POWER_MASKS | MASK_PPC_GPOPT},
467 {"630", PROCESSOR_PPC630,
468 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
469 POWER_MASKS | MASK_PPC_GPOPT},
470 {"740", PROCESSOR_PPC750,
471 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
472 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
473 {"750", PROCESSOR_PPC750,
474 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
475 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
476 {"7400", PROCESSOR_PPC7400,
477 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
478 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
479 {"7450", PROCESSOR_PPC7450,
480 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
481 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
482 {"8540", PROCESSOR_PPC8540,
483 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
484 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
485 {"801", PROCESSOR_MPCCORE,
486 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
487 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
488 {"821", PROCESSOR_MPCCORE,
489 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
490 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
491 {"823", PROCESSOR_MPCCORE,
492 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
493 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
494 {"860", PROCESSOR_MPCCORE,
495 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
496 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
498 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
500 /* Save current -mmultiple/-mno-multiple status. */
501 int multiple = TARGET_MULTIPLE;
502 /* Save current -mstring/-mno-string status. */
503 int string = TARGET_STRING;
505 /* Identify the processor type. */
506 rs6000_select[0].string = default_cpu;
507 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
509 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
511 ptr = &rs6000_select[i];
512 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
514 for (j = 0; j < ptt_size; j++)
515 if (! strcmp (ptr->string, processor_target_table[j].name))
518 rs6000_cpu = processor_target_table[j].processor;
522 target_flags |= processor_target_table[j].target_enable;
523 target_flags &= ~processor_target_table[j].target_disable;
529 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
533 if (rs6000_cpu == PROCESSOR_PPC8540)
536 /* If we are optimizing big endian systems for space, use the store
537 multiple instructions. */
538 if (BYTES_BIG_ENDIAN && optimize_size)
539 target_flags |= MASK_MULTIPLE;
541 /* If -mmultiple or -mno-multiple was explicitly used, don't
542 override with the processor default */
543 if (TARGET_MULTIPLE_SET)
544 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
546 /* If -mstring or -mno-string was explicitly used, don't override
547 with the processor default. */
548 if (TARGET_STRING_SET)
549 target_flags = (target_flags & ~MASK_STRING) | string;
551 /* Don't allow -mmultiple or -mstring on little endian systems
552 unless the cpu is a 750, because the hardware doesn't support the
553 instructions used in little endian mode, and causes an alignment
554 trap. The 750 does not cause an alignment trap (except when the
555 target is unaligned). */
557 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
561 target_flags &= ~MASK_MULTIPLE;
562 if (TARGET_MULTIPLE_SET)
563 warning ("-mmultiple is not supported on little endian systems");
568 target_flags &= ~MASK_STRING;
569 if (TARGET_STRING_SET)
570 warning ("-mstring is not supported on little endian systems");
574 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
579 warning ("-f%s ignored (all code is position independent)",
580 (flag_pic > 1) ? "PIC" : "pic");
583 #ifdef XCOFF_DEBUGGING_INFO
584 if (flag_function_sections && (write_symbols != NO_DEBUG)
585 && DEFAULT_ABI == ABI_AIX)
587 warning ("-ffunction-sections disabled on AIX when debugging");
588 flag_function_sections = 0;
591 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
593 warning ("-fdata-sections not supported on AIX");
594 flag_data_sections = 0;
598 /* Set debug flags */
599 if (rs6000_debug_name)
601 if (! strcmp (rs6000_debug_name, "all"))
602 rs6000_debug_stack = rs6000_debug_arg = 1;
603 else if (! strcmp (rs6000_debug_name, "stack"))
604 rs6000_debug_stack = 1;
605 else if (! strcmp (rs6000_debug_name, "arg"))
606 rs6000_debug_arg = 1;
608 error ("unknown -mdebug-%s switch", rs6000_debug_name);
611 if (rs6000_traceback_name)
613 if (! strncmp (rs6000_traceback_name, "full", 4))
614 rs6000_traceback = traceback_full;
615 else if (! strncmp (rs6000_traceback_name, "part", 4))
616 rs6000_traceback = traceback_part;
617 else if (! strncmp (rs6000_traceback_name, "no", 2))
618 rs6000_traceback = traceback_none;
620 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
621 rs6000_traceback_name);
624 /* Set size of long double */
625 rs6000_long_double_type_size = 64;
626 if (rs6000_long_double_size_string)
629 int size = strtol (rs6000_long_double_size_string, &tail, 10);
630 if (*tail != '\0' || (size != 64 && size != 128))
631 error ("Unknown switch -mlong-double-%s",
632 rs6000_long_double_size_string);
634 rs6000_long_double_type_size = size;
637 /* Handle -mabi= options. */
638 rs6000_parse_abi_options ();
640 /* Handle -mvrsave= option. */
641 rs6000_parse_vrsave_option ();
643 /* Handle -misel= option. */
644 rs6000_parse_isel_option ();
646 #ifdef SUBTARGET_OVERRIDE_OPTIONS
647 SUBTARGET_OVERRIDE_OPTIONS;
649 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
650 SUBSUBTARGET_OVERRIDE_OPTIONS;
653 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
654 using TARGET_OPTIONS to handle a toggle switch, but we're out of
655 bits in target_flags so TARGET_SWITCHES cannot be used.
656 Assumption here is that rs6000_longcall_switch points into the
657 text of the complete option, rather than being a copy, so we can
658 scan back for the presence or absence of the no- modifier. */
659 if (rs6000_longcall_switch)
661 const char *base = rs6000_longcall_switch;
662 while (base[-1] != 'm') base--;
664 if (*rs6000_longcall_switch != '\0')
665 error ("invalid option `%s'", base);
666 rs6000_default_long_calls = (base[0] != 'n');
669 #ifdef TARGET_REGNAMES
670 /* If the user desires alternate register names, copy in the
671 alternate names now. */
673 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
676 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
677 If -maix-struct-return or -msvr4-struct-return was explicitly
678 used, don't override with the ABI default. */
679 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
681 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
682 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
684 target_flags |= MASK_AIX_STRUCT_RET;
687 /* Register global variables with the garbage collector. */
688 rs6000_add_gc_roots ();
690 /* Allocate an alias set for register saves & restores from stack. */
691 rs6000_sr_alias_set = new_alias_set ();
694 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
696 /* We can only guarantee the availability of DI pseudo-ops when
697 assembling for 64-bit targets. */
700 targetm.asm_out.aligned_op.di = NULL;
701 targetm.asm_out.unaligned_op.di = NULL;
704 /* Arrange to save and restore machine status around nested functions. */
705 init_machine_status = rs6000_init_machine_status;
708 /* Handle -misel= option. */
710 rs6000_parse_isel_option ()
712 if (rs6000_isel_string == 0)
714 else if (! strcmp (rs6000_isel_string, "yes"))
716 else if (! strcmp (rs6000_isel_string, "no"))
719 error ("unknown -misel= option specified: '%s'",
723 /* Handle -mvrsave= options. */
725 rs6000_parse_vrsave_option ()
727 /* Generate VRSAVE instructions by default. */
728 if (rs6000_altivec_vrsave_string == 0
729 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
730 rs6000_altivec_vrsave = 1;
731 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
732 rs6000_altivec_vrsave = 0;
734 error ("unknown -mvrsave= option specified: '%s'",
735 rs6000_altivec_vrsave_string);
738 /* Handle -mabi= options. */
740 rs6000_parse_abi_options ()
742 if (rs6000_abi_string == 0)
744 else if (! strcmp (rs6000_abi_string, "altivec"))
745 rs6000_altivec_abi = 1;
746 else if (! strcmp (rs6000_abi_string, "no-altivec"))
747 rs6000_altivec_abi = 0;
748 else if (! strcmp (rs6000_abi_string, "spe"))
750 else if (! strcmp (rs6000_abi_string, "no-spe"))
753 error ("unknown ABI specified: '%s'", rs6000_abi_string);
757 optimization_options (level, size)
758 int level ATTRIBUTE_UNUSED;
759 int size ATTRIBUTE_UNUSED;
763 /* Do anything needed at the start of the asm file. */
766 rs6000_file_start (file, default_cpu)
768 const char *default_cpu;
772 const char *start = buffer;
773 struct rs6000_cpu_select *ptr;
775 if (flag_verbose_asm)
777 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
778 rs6000_select[0].string = default_cpu;
780 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
782 ptr = &rs6000_select[i];
783 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
785 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
791 switch (rs6000_sdata)
793 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
794 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
795 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
796 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
799 if (rs6000_sdata && g_switch_value)
801 fprintf (file, "%s -G %d", start, g_switch_value);
811 /* Return non-zero if this function is known to have a null epilogue. */
816 if (reload_completed)
818 rs6000_stack_t *info = rs6000_stack_info ();
820 if (info->first_gp_reg_save == 32
821 && info->first_fp_reg_save == 64
822 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
825 && info->vrsave_mask == 0
833 /* Returns 1 always. */
836 any_operand (op, mode)
837 rtx op ATTRIBUTE_UNUSED;
838 enum machine_mode mode ATTRIBUTE_UNUSED;
843 /* Returns 1 if op is the count register. */
845 count_register_operand (op, mode)
847 enum machine_mode mode ATTRIBUTE_UNUSED;
849 if (GET_CODE (op) != REG)
852 if (REGNO (op) == COUNT_REGISTER_REGNUM)
855 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
861 /* Returns 1 if op is an altivec register. */
863 altivec_register_operand (op, mode)
865 enum machine_mode mode ATTRIBUTE_UNUSED;
868 return (register_operand (op, mode)
869 && (GET_CODE (op) != REG
870 || REGNO (op) > FIRST_PSEUDO_REGISTER
871 || ALTIVEC_REGNO_P (REGNO (op))));
875 xer_operand (op, mode)
877 enum machine_mode mode ATTRIBUTE_UNUSED;
879 if (GET_CODE (op) != REG)
882 if (XER_REGNO_P (REGNO (op)))
888 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
889 by such constants completes more quickly. */
892 s8bit_cint_operand (op, mode)
894 enum machine_mode mode ATTRIBUTE_UNUSED;
896 return ( GET_CODE (op) == CONST_INT
897 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
900 /* Return 1 if OP is a constant that can fit in a D field. */
903 short_cint_operand (op, mode)
905 enum machine_mode mode ATTRIBUTE_UNUSED;
907 return (GET_CODE (op) == CONST_INT
908 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
911 /* Similar for an unsigned D field. */
914 u_short_cint_operand (op, mode)
916 enum machine_mode mode ATTRIBUTE_UNUSED;
918 return (GET_CODE (op) == CONST_INT
919 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
922 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
925 non_short_cint_operand (op, mode)
927 enum machine_mode mode ATTRIBUTE_UNUSED;
929 return (GET_CODE (op) == CONST_INT
930 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
933 /* Returns 1 if OP is a CONST_INT that is a positive value
934 and an exact power of 2. */
937 exact_log2_cint_operand (op, mode)
939 enum machine_mode mode ATTRIBUTE_UNUSED;
941 return (GET_CODE (op) == CONST_INT
943 && exact_log2 (INTVAL (op)) >= 0);
946 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
950 gpc_reg_operand (op, mode)
952 enum machine_mode mode;
954 return (register_operand (op, mode)
955 && (GET_CODE (op) != REG
956 || (REGNO (op) >= ARG_POINTER_REGNUM
957 && !XER_REGNO_P (REGNO (op)))
958 || REGNO (op) < MQ_REGNO));
961 /* Returns 1 if OP is either a pseudo-register or a register denoting a
965 cc_reg_operand (op, mode)
967 enum machine_mode mode;
969 return (register_operand (op, mode)
970 && (GET_CODE (op) != REG
971 || REGNO (op) >= FIRST_PSEUDO_REGISTER
972 || CR_REGNO_P (REGNO (op))));
975 /* Returns 1 if OP is either a pseudo-register or a register denoting a
976 CR field that isn't CR0. */
979 cc_reg_not_cr0_operand (op, mode)
981 enum machine_mode mode;
983 return (register_operand (op, mode)
984 && (GET_CODE (op) != REG
985 || REGNO (op) >= FIRST_PSEUDO_REGISTER
986 || CR_REGNO_NOT_CR0_P (REGNO (op))));
989 /* Returns 1 if OP is either a constant integer valid for a D-field or
990 a non-special register. If a register, it must be in the proper
991 mode unless MODE is VOIDmode. */
994 reg_or_short_operand (op, mode)
996 enum machine_mode mode;
998 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1001 /* Similar, except check if the negation of the constant would be
1002 valid for a D-field. */
1005 reg_or_neg_short_operand (op, mode)
1007 enum machine_mode mode;
1009 if (GET_CODE (op) == CONST_INT)
1010 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1012 return gpc_reg_operand (op, mode);
1015 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1016 a non-special register. If a register, it must be in the proper
1017 mode unless MODE is VOIDmode. */
1020 reg_or_aligned_short_operand (op, mode)
1022 enum machine_mode mode;
1024 if (gpc_reg_operand (op, mode))
1026 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1033 /* Return 1 if the operand is either a register or an integer whose
1034 high-order 16 bits are zero. */
1037 reg_or_u_short_operand (op, mode)
1039 enum machine_mode mode;
1041 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1044 /* Return 1 is the operand is either a non-special register or ANY
1045 constant integer. */
1048 reg_or_cint_operand (op, mode)
1050 enum machine_mode mode;
1052 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1055 /* Return 1 is the operand is either a non-special register or ANY
1056 32-bit signed constant integer. */
1059 reg_or_arith_cint_operand (op, mode)
1061 enum machine_mode mode;
1063 return (gpc_reg_operand (op, mode)
1064 || (GET_CODE (op) == CONST_INT
1065 #if HOST_BITS_PER_WIDE_INT != 32
1066 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1067 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1072 /* Return 1 is the operand is either a non-special register or a 32-bit
1073 signed constant integer valid for 64-bit addition. */
1076 reg_or_add_cint64_operand (op, mode)
1078 enum machine_mode mode;
1080 return (gpc_reg_operand (op, mode)
1081 || (GET_CODE (op) == CONST_INT
1082 #if HOST_BITS_PER_WIDE_INT == 32
1083 && INTVAL (op) < 0x7fff8000
1085 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1091 /* Return 1 is the operand is either a non-special register or a 32-bit
1092 signed constant integer valid for 64-bit subtraction. */
1095 reg_or_sub_cint64_operand (op, mode)
1097 enum machine_mode mode;
1099 return (gpc_reg_operand (op, mode)
1100 || (GET_CODE (op) == CONST_INT
1101 #if HOST_BITS_PER_WIDE_INT == 32
1102 && (- INTVAL (op)) < 0x7fff8000
1104 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1110 /* Return 1 is the operand is either a non-special register or ANY
1111 32-bit unsigned constant integer. */
1114 reg_or_logical_cint_operand (op, mode)
1116 enum machine_mode mode;
1118 if (GET_CODE (op) == CONST_INT)
1120 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1122 if (GET_MODE_BITSIZE (mode) <= 32)
1125 if (INTVAL (op) < 0)
1129 return ((INTVAL (op) & GET_MODE_MASK (mode)
1130 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1132 else if (GET_CODE (op) == CONST_DOUBLE)
1134 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1138 return CONST_DOUBLE_HIGH (op) == 0;
1141 return gpc_reg_operand (op, mode);
1144 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1147 got_operand (op, mode)
1149 enum machine_mode mode ATTRIBUTE_UNUSED;
1151 return (GET_CODE (op) == SYMBOL_REF
1152 || GET_CODE (op) == CONST
1153 || GET_CODE (op) == LABEL_REF);
1156 /* Return 1 if the operand is a simple references that can be loaded via
1157 the GOT (labels involving addition aren't allowed). */
1160 got_no_const_operand (op, mode)
1162 enum machine_mode mode ATTRIBUTE_UNUSED;
1164 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1167 /* Return the number of instructions it takes to form a constant in an
1168 integer register. */
1171 num_insns_constant_wide (value)
1172 HOST_WIDE_INT value;
1174 /* signed constant loadable with {cal|addi} */
1175 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1178 /* constant loadable with {cau|addis} */
1179 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1182 #if HOST_BITS_PER_WIDE_INT == 64
1183 else if (TARGET_POWERPC64)
1185 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1186 HOST_WIDE_INT high = value >> 31;
1188 if (high == 0 || high == -1)
1194 return num_insns_constant_wide (high) + 1;
1196 return (num_insns_constant_wide (high)
1197 + num_insns_constant_wide (low) + 1);
1206 num_insns_constant (op, mode)
1208 enum machine_mode mode;
1210 if (GET_CODE (op) == CONST_INT)
1212 #if HOST_BITS_PER_WIDE_INT == 64
1213 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1214 && mask64_operand (op, mode))
1218 return num_insns_constant_wide (INTVAL (op));
1221 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1226 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1227 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1228 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1231 else if (GET_CODE (op) == CONST_DOUBLE)
1237 int endian = (WORDS_BIG_ENDIAN == 0);
1239 if (mode == VOIDmode || mode == DImode)
1241 high = CONST_DOUBLE_HIGH (op);
1242 low = CONST_DOUBLE_LOW (op);
1246 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1247 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1249 low = l[1 - endian];
1253 return (num_insns_constant_wide (low)
1254 + num_insns_constant_wide (high));
1258 if (high == 0 && low >= 0)
1259 return num_insns_constant_wide (low);
1261 else if (high == -1 && low < 0)
1262 return num_insns_constant_wide (low);
1264 else if (mask64_operand (op, mode))
1268 return num_insns_constant_wide (high) + 1;
1271 return (num_insns_constant_wide (high)
1272 + num_insns_constant_wide (low) + 1);
1280 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1281 register with one instruction per word. We only do this if we can
1282 safely read CONST_DOUBLE_{LOW,HIGH}. */
1285 easy_fp_constant (op, mode)
1287 enum machine_mode mode;
1289 if (GET_CODE (op) != CONST_DOUBLE
1290 || GET_MODE (op) != mode
1291 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1294 /* Consider all constants with -msoft-float to be easy. */
1295 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1299 /* If we are using V.4 style PIC, consider all constants to be hard. */
1300 if (flag_pic && DEFAULT_ABI == ABI_V4)
1303 #ifdef TARGET_RELOCATABLE
1304 /* Similarly if we are using -mrelocatable, consider all constants
1306 if (TARGET_RELOCATABLE)
1315 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1316 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1318 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1319 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1322 else if (mode == SFmode)
1327 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1328 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1330 return num_insns_constant_wide (l) == 1;
1333 else if (mode == DImode)
1334 return ((TARGET_POWERPC64
1335 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1336 || (num_insns_constant (op, DImode) <= 2));
1338 else if (mode == SImode)
1344 /* Return 1 if the operand is a CONST_INT and can be put into a
1345 register with one instruction. */
1348 easy_vector_constant (op)
1354 if (GET_CODE (op) != CONST_VECTOR)
1357 units = CONST_VECTOR_NUNITS (op);
1359 /* We can generate 0 easily. Look for that. */
1360 for (i = 0; i < units; ++i)
1362 elt = CONST_VECTOR_ELT (op, i);
1364 /* We could probably simplify this by just checking for equality
1365 with CONST0_RTX for the current mode, but let's be safe
1368 switch (GET_CODE (elt))
1371 if (INTVAL (elt) != 0)
1375 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1383 /* We could probably generate a few other constants trivially, but
1384 gcc doesn't generate them yet. FIXME later. */
1388 /* Return 1 if the operand is the constant 0. This works for scalars
1389 as well as vectors. */
1391 zero_constant (op, mode)
1393 enum machine_mode mode;
1395 return op == CONST0_RTX (mode);
1398 /* Return 1 if the operand is 0.0. */
1400 zero_fp_constant (op, mode)
1402 enum machine_mode mode;
1404 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1407 /* Return 1 if the operand is in volatile memory. Note that during
1408 the RTL generation phase, memory_operand does not return TRUE for
1409 volatile memory references. So this function allows us to
1410 recognize volatile references where its safe. */
1413 volatile_mem_operand (op, mode)
1415 enum machine_mode mode;
1417 if (GET_CODE (op) != MEM)
1420 if (!MEM_VOLATILE_P (op))
1423 if (mode != GET_MODE (op))
1426 if (reload_completed)
1427 return memory_operand (op, mode);
1429 if (reload_in_progress)
1430 return strict_memory_address_p (mode, XEXP (op, 0));
1432 return memory_address_p (mode, XEXP (op, 0));
1435 /* Return 1 if the operand is an offsettable memory operand. */
1438 offsettable_mem_operand (op, mode)
1440 enum machine_mode mode;
1442 return ((GET_CODE (op) == MEM)
1443 && offsettable_address_p (reload_completed || reload_in_progress,
1444 mode, XEXP (op, 0)));
1447 /* Return 1 if the operand is either an easy FP constant (see above) or
1451 mem_or_easy_const_operand (op, mode)
1453 enum machine_mode mode;
1455 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1458 /* Return 1 if the operand is either a non-special register or an item
1459 that can be used as the operand of a `mode' add insn. */
1462 add_operand (op, mode)
1464 enum machine_mode mode;
1466 if (GET_CODE (op) == CONST_INT)
1467 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1468 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1470 return gpc_reg_operand (op, mode);
1473 /* Return 1 if OP is a constant but not a valid add_operand. */
1476 non_add_cint_operand (op, mode)
1478 enum machine_mode mode ATTRIBUTE_UNUSED;
1480 return (GET_CODE (op) == CONST_INT
1481 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1482 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1485 /* Return 1 if the operand is a non-special register or a constant that
1486 can be used as the operand of an OR or XOR insn on the RS/6000. */
1489 logical_operand (op, mode)
1491 enum machine_mode mode;
1493 HOST_WIDE_INT opl, oph;
1495 if (gpc_reg_operand (op, mode))
1498 if (GET_CODE (op) == CONST_INT)
1500 opl = INTVAL (op) & GET_MODE_MASK (mode);
1502 #if HOST_BITS_PER_WIDE_INT <= 32
1503 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1507 else if (GET_CODE (op) == CONST_DOUBLE)
1509 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1512 opl = CONST_DOUBLE_LOW (op);
1513 oph = CONST_DOUBLE_HIGH (op);
1520 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1521 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1524 /* Return 1 if C is a constant that is not a logical operand (as
1525 above), but could be split into one. */
1528 non_logical_cint_operand (op, mode)
1530 enum machine_mode mode;
1532 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1533 && ! logical_operand (op, mode)
1534 && reg_or_logical_cint_operand (op, mode));
1537 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1538 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1539 Reject all ones and all zeros, since these should have been optimized
1540 away and confuse the making of MB and ME. */
1543 mask_operand (op, mode)
1545 enum machine_mode mode ATTRIBUTE_UNUSED;
1547 HOST_WIDE_INT c, lsb;
1549 if (GET_CODE (op) != CONST_INT)
1554 /* Fail in 64-bit mode if the mask wraps around because the upper
1555 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1556 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1559 /* We don't change the number of transitions by inverting,
1560 so make sure we start with the LS bit zero. */
1564 /* Reject all zeros or all ones. */
1568 /* Find the first transition. */
1571 /* Invert to look for a second transition. */
1574 /* Erase first transition. */
1577 /* Find the second transition (if any). */
1580 /* Match if all the bits above are 1's (or c is zero). */
1584 /* Return 1 for the PowerPC64 rlwinm corner case. */
1587 mask_operand_wrap (op, mode)
1589 enum machine_mode mode ATTRIBUTE_UNUSED;
1591 HOST_WIDE_INT c, lsb;
1593 if (GET_CODE (op) != CONST_INT)
1598 if ((c & 0x80000001) != 0x80000001)
1612 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1613 It is if there are no more than one 1->0 or 0->1 transitions.
1614 Reject all zeros, since zero should have been optimized away and
1615 confuses the making of MB and ME. */
1618 mask64_operand (op, mode)
1620 enum machine_mode mode ATTRIBUTE_UNUSED;
1622 if (GET_CODE (op) == CONST_INT)
1624 HOST_WIDE_INT c, lsb;
1628 /* Reject all zeros. */
1632 /* We don't change the number of transitions by inverting,
1633 so make sure we start with the LS bit zero. */
1637 /* Find the transition, and check that all bits above are 1's. */
1644 /* Like mask64_operand, but allow up to three transitions. This
1645 predicate is used by insn patterns that generate two rldicl or
1646 rldicr machine insns. */
1649 mask64_2_operand (op, mode)
1651 enum machine_mode mode ATTRIBUTE_UNUSED;
1653 if (GET_CODE (op) == CONST_INT)
1655 HOST_WIDE_INT c, lsb;
1659 /* Disallow all zeros. */
1663 /* We don't change the number of transitions by inverting,
1664 so make sure we start with the LS bit zero. */
1668 /* Find the first transition. */
1671 /* Invert to look for a second transition. */
1674 /* Erase first transition. */
1677 /* Find the second transition. */
1680 /* Invert to look for a third transition. */
1683 /* Erase second transition. */
1686 /* Find the third transition (if any). */
1689 /* Match if all the bits above are 1's (or c is zero). */
1695 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1696 implement ANDing by the mask IN. */
1698 build_mask64_2_operands (in, out)
1702 #if HOST_BITS_PER_WIDE_INT >= 64
1703 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1706 if (GET_CODE (in) != CONST_INT)
1712 /* Assume c initially something like 0x00fff000000fffff. The idea
1713 is to rotate the word so that the middle ^^^^^^ group of zeros
1714 is at the MS end and can be cleared with an rldicl mask. We then
1715 rotate back and clear off the MS ^^ group of zeros with a
1717 c = ~c; /* c == 0xff000ffffff00000 */
1718 lsb = c & -c; /* lsb == 0x0000000000100000 */
1719 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1720 c = ~c; /* c == 0x00fff000000fffff */
1721 c &= -lsb; /* c == 0x00fff00000000000 */
1722 lsb = c & -c; /* lsb == 0x0000100000000000 */
1723 c = ~c; /* c == 0xff000fffffffffff */
1724 c &= -lsb; /* c == 0xff00000000000000 */
1726 while ((lsb >>= 1) != 0)
1727 shift++; /* shift == 44 on exit from loop */
1728 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1729 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1730 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1734 /* Assume c initially something like 0xff000f0000000000. The idea
1735 is to rotate the word so that the ^^^ middle group of zeros
1736 is at the LS end and can be cleared with an rldicr mask. We then
1737 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1739 lsb = c & -c; /* lsb == 0x0000010000000000 */
1740 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1741 c = ~c; /* c == 0x00fff0ffffffffff */
1742 c &= -lsb; /* c == 0x00fff00000000000 */
1743 lsb = c & -c; /* lsb == 0x0000100000000000 */
1744 c = ~c; /* c == 0xff000fffffffffff */
1745 c &= -lsb; /* c == 0xff00000000000000 */
1747 while ((lsb >>= 1) != 0)
1748 shift++; /* shift == 44 on exit from loop */
1749 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1750 m1 >>= shift; /* m1 == 0x0000000000000fff */
1751 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1754 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1755 masks will be all 1's. We are guaranteed more than one transition. */
1756 out[0] = GEN_INT (64 - shift);
1757 out[1] = GEN_INT (m1);
1758 out[2] = GEN_INT (shift);
1759 out[3] = GEN_INT (m2);
1765 /* Return 1 if the operand is either a non-special register or a constant
1766 that can be used as the operand of a PowerPC64 logical AND insn. */
1769 and64_operand (op, mode)
1771 enum machine_mode mode;
1773 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1774 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1776 return (logical_operand (op, mode) || mask64_operand (op, mode));
1779 /* Like the above, but also match constants that can be implemented
1780 with two rldicl or rldicr insns. */
1783 and64_2_operand (op, mode)
1785 enum machine_mode mode;
1787 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1788 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1790 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1793 /* Return 1 if the operand is either a non-special register or a
1794 constant that can be used as the operand of an RS/6000 logical AND insn. */
1797 and_operand (op, mode)
1799 enum machine_mode mode;
1801 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1802 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1804 return (logical_operand (op, mode) || mask_operand (op, mode));
1807 /* Return 1 if the operand is a general register or memory operand. */
1810 reg_or_mem_operand (op, mode)
1812 enum machine_mode mode;
1814 return (gpc_reg_operand (op, mode)
1815 || memory_operand (op, mode)
1816 || volatile_mem_operand (op, mode));
1819 /* Return 1 if the operand is a general register or memory operand without
1820 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1824 lwa_operand (op, mode)
1826 enum machine_mode mode;
1830 if (reload_completed && GET_CODE (inner) == SUBREG)
1831 inner = SUBREG_REG (inner);
1833 return gpc_reg_operand (inner, mode)
1834 || (memory_operand (inner, mode)
1835 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1836 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1837 && (GET_CODE (XEXP (inner, 0)) != PLUS
1838 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1839 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1842 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1845 symbol_ref_operand (op, mode)
1847 enum machine_mode mode;
1849 if (mode != VOIDmode && GET_MODE (op) != mode)
1852 return (GET_CODE (op) == SYMBOL_REF);
1855 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1856 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1859 call_operand (op, mode)
1861 enum machine_mode mode;
1863 if (mode != VOIDmode && GET_MODE (op) != mode)
1866 return (GET_CODE (op) == SYMBOL_REF
1867 || (GET_CODE (op) == REG
1868 && (REGNO (op) == LINK_REGISTER_REGNUM
1869 || REGNO (op) == COUNT_REGISTER_REGNUM
1870 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1873 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1874 this file and the function is not weakly defined. */
1877 current_file_function_operand (op, mode)
1879 enum machine_mode mode ATTRIBUTE_UNUSED;
1881 return (GET_CODE (op) == SYMBOL_REF
1882 && (SYMBOL_REF_FLAG (op)
1883 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1884 && ! DECL_WEAK (current_function_decl))));
1887 /* Return 1 if this operand is a valid input for a move insn. */
1890 input_operand (op, mode)
1892 enum machine_mode mode;
1894 /* Memory is always valid. */
1895 if (memory_operand (op, mode))
1898 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1899 if (GET_CODE (op) == CONSTANT_P_RTX)
1902 /* For floating-point, easy constants are valid. */
1903 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1905 && easy_fp_constant (op, mode))
1908 /* Allow any integer constant. */
1909 if (GET_MODE_CLASS (mode) == MODE_INT
1910 && (GET_CODE (op) == CONST_INT
1911 || GET_CODE (op) == CONST_DOUBLE))
1914 /* For floating-point or multi-word mode, the only remaining valid type
1916 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1917 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1918 return register_operand (op, mode);
1920 /* The only cases left are integral modes one word or smaller (we
1921 do not get called for MODE_CC values). These can be in any
1923 if (register_operand (op, mode))
1926 /* A SYMBOL_REF referring to the TOC is valid. */
1927 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1930 /* A constant pool expression (relative to the TOC) is valid */
1931 if (TOC_RELATIVE_EXPR_P (op))
1934 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1936 if (DEFAULT_ABI == ABI_V4
1937 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1938 && small_data_operand (op, Pmode))
1944 /* Return 1 for an operand in small memory on V.4/eabi. */
1947 small_data_operand (op, mode)
1948 rtx op ATTRIBUTE_UNUSED;
1949 enum machine_mode mode ATTRIBUTE_UNUSED;
1954 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1957 if (DEFAULT_ABI != ABI_V4)
1960 if (GET_CODE (op) == SYMBOL_REF)
1963 else if (GET_CODE (op) != CONST
1964 || GET_CODE (XEXP (op, 0)) != PLUS
1965 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1966 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1971 rtx sum = XEXP (op, 0);
1972 HOST_WIDE_INT summand;
1974 /* We have to be careful here, because it is the referenced address
1975 that must be 32k from _SDA_BASE_, not just the symbol. */
1976 summand = INTVAL (XEXP (sum, 1));
1977 if (summand < 0 || summand > g_switch_value)
1980 sym_ref = XEXP (sum, 0);
1983 if (*XSTR (sym_ref, 0) != '@')
1994 constant_pool_expr_1 (op, have_sym, have_toc)
1999 switch (GET_CODE(op))
2002 if (CONSTANT_POOL_ADDRESS_P (op))
2004 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2012 else if (! strcmp (XSTR (op, 0), toc_label_name))
2021 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2022 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2024 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2033 constant_pool_expr_p (op)
2038 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2042 toc_relative_expr_p (op)
2047 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2050 /* Try machine-dependent ways of modifying an illegitimate address
2051 to be legitimate. If we find one, return the new, valid address.
2052 This is used from only one place: `memory_address' in explow.c.
2054 OLDX is the address as it was before break_out_memory_refs was
2055 called. In some cases it is useful to look at this to decide what
2058 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2060 It is always safe for this function to do nothing. It exists to
2061 recognize opportunities to optimize the output.
2063 On RS/6000, first check for the sum of a register with a constant
2064 integer that is out of range. If so, generate code to add the
2065 constant with the low-order 16 bits masked to the register and force
2066 this result into another register (this can be done with `cau').
2067 Then generate an address of REG+(CONST&0xffff), allowing for the
2068 possibility of bit 16 being a one.
2070 Then check for the sum of a register and something not constant, try to
2071 load the other things into a register and return the sum. */
2073 rs6000_legitimize_address (x, oldx, mode)
2075 rtx oldx ATTRIBUTE_UNUSED;
2076 enum machine_mode mode;
2078 if (GET_CODE (x) == PLUS
2079 && GET_CODE (XEXP (x, 0)) == REG
2080 && GET_CODE (XEXP (x, 1)) == CONST_INT
2081 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2083 HOST_WIDE_INT high_int, low_int;
2085 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2086 high_int = INTVAL (XEXP (x, 1)) - low_int;
2087 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2088 GEN_INT (high_int)), 0);
2089 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2091 else if (GET_CODE (x) == PLUS
2092 && GET_CODE (XEXP (x, 0)) == REG
2093 && GET_CODE (XEXP (x, 1)) != CONST_INT
2094 && GET_MODE_NUNITS (mode) == 1
2095 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2098 && (TARGET_POWERPC64 || mode != DImode)
2101 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2102 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2104 else if (ALTIVEC_VECTOR_MODE (mode))
2108 /* Make sure both operands are registers. */
2109 if (GET_CODE (x) == PLUS)
2110 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2111 force_reg (Pmode, XEXP (x, 1)));
2113 reg = force_reg (Pmode, x);
2116 else if (SPE_VECTOR_MODE (mode))
2118 /* We accept [reg + reg] and [reg + OFFSET]. */
2120 if (GET_CODE (x) == PLUS)
2122 rtx op1 = XEXP (x, 0);
2123 rtx op2 = XEXP (x, 1);
2125 op1 = force_reg (Pmode, op1);
2127 if (GET_CODE (op2) != REG
2128 && (GET_CODE (op2) != CONST_INT
2129 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2130 op2 = force_reg (Pmode, op2);
2132 return gen_rtx_PLUS (Pmode, op1, op2);
2135 return force_reg (Pmode, x);
2137 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2138 && GET_CODE (x) != CONST_INT
2139 && GET_CODE (x) != CONST_DOUBLE
2141 && GET_MODE_NUNITS (mode) == 1
2142 && (GET_MODE_BITSIZE (mode) <= 32
2143 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2145 rtx reg = gen_reg_rtx (Pmode);
2146 emit_insn (gen_elf_high (reg, (x)));
2147 return gen_rtx_LO_SUM (Pmode, reg, (x));
2149 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2151 && GET_CODE (x) != CONST_INT
2152 && GET_CODE (x) != CONST_DOUBLE
2154 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2158 rtx reg = gen_reg_rtx (Pmode);
2159 emit_insn (gen_macho_high (reg, (x)));
2160 return gen_rtx_LO_SUM (Pmode, reg, (x));
2163 && CONSTANT_POOL_EXPR_P (x)
2164 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2166 return create_TOC_reference (x);
2172 /* The convention appears to be to define this wherever it is used.
2173 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2174 is now used here. */
2175 #ifndef REG_MODE_OK_FOR_BASE_P
2176 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2179 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2180 replace the input X, or the original X if no replacement is called for.
2181 The output parameter *WIN is 1 if the calling macro should goto WIN,
2184 For RS/6000, we wish to handle large displacements off a base
2185 register by splitting the addend across an addiu/addis and the mem insn.
2186 This cuts number of extra insns needed from 3 to 1.
2188 On Darwin, we use this to generate code for floating point constants.
2189 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2190 The Darwin code is inside #if TARGET_MACHO because only then is
2191 machopic_function_base_name() defined. */
2193 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2195 enum machine_mode mode;
2198 int ind_levels ATTRIBUTE_UNUSED;
2201 /* We must recognize output that we have already generated ourselves. */
2202 if (GET_CODE (x) == PLUS
2203 && GET_CODE (XEXP (x, 0)) == PLUS
2204 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2205 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2206 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2208 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2209 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2210 opnum, (enum reload_type)type);
2216 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2217 && GET_CODE (x) == LO_SUM
2218 && GET_CODE (XEXP (x, 0)) == PLUS
2219 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2220 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2221 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2222 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2223 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2224 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2225 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2227 /* Result of previous invocation of this function on Darwin
2228 floating point constant. */
2229 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2230 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2231 opnum, (enum reload_type)type);
2236 if (GET_CODE (x) == PLUS
2237 && GET_CODE (XEXP (x, 0)) == REG
2238 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2239 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2240 && GET_CODE (XEXP (x, 1)) == CONST_INT
2241 && !SPE_VECTOR_MODE (mode)
2242 && !ALTIVEC_VECTOR_MODE (mode))
2244 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2245 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2247 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2249 /* Check for 32-bit overflow. */
2250 if (high + low != val)
2256 /* Reload the high part into a base reg; leave the low part
2257 in the mem directly. */
2259 x = gen_rtx_PLUS (GET_MODE (x),
2260 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2264 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2265 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2266 opnum, (enum reload_type)type);
2271 if (GET_CODE (x) == SYMBOL_REF
2272 && DEFAULT_ABI == ABI_DARWIN
2273 && !ALTIVEC_VECTOR_MODE (mode)
2276 /* Darwin load of floating point constant. */
2277 rtx offset = gen_rtx (CONST, Pmode,
2278 gen_rtx (MINUS, Pmode, x,
2279 gen_rtx (SYMBOL_REF, Pmode,
2280 machopic_function_base_name ())));
2281 x = gen_rtx (LO_SUM, GET_MODE (x),
2282 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2283 gen_rtx (HIGH, Pmode, offset)), offset);
2284 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2285 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2286 opnum, (enum reload_type)type);
2292 && CONSTANT_POOL_EXPR_P (x)
2293 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2295 (x) = create_TOC_reference (x);
2303 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2304 that is a valid memory address for an instruction.
2305 The MODE argument is the machine mode for the MEM expression
2306 that wants to use this address.
2308 On the RS/6000, there are four valid address: a SYMBOL_REF that
2309 refers to a constant pool entry of an address (or the sum of it
2310 plus a constant), a short (16-bit signed) constant plus a register,
2311 the sum of two registers, or a register indirect, possibly with an
2312 auto-increment. For DFmode and DImode with an constant plus register,
2313 we must ensure that both words are addressable or PowerPC64 with offset
2316 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2317 32-bit DImode, TImode), indexed addressing cannot be used because
2318 adjacent memory cells are accessed by adding word-sized offsets
2319 during assembly output. */
2321 rs6000_legitimate_address (mode, x, reg_ok_strict)
2322 enum machine_mode mode;
2326 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2328 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2329 && !ALTIVEC_VECTOR_MODE (mode)
2330 && !SPE_VECTOR_MODE (mode)
2332 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2334 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2336 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2338 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2340 && GET_CODE (x) == PLUS
2341 && GET_CODE (XEXP (x, 0)) == REG
2342 && XEXP (x, 0) == virtual_stack_vars_rtx
2343 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2345 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2348 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2351 && (TARGET_POWERPC64 || mode != DImode)
2352 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2354 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2359 /* Try to output insns to set TARGET equal to the constant C if it can
2360 be done in less than N insns. Do all computations in MODE.
2361 Returns the place where the output has been placed if it can be
2362 done and the insns have been emitted. If it would take more than N
2363 insns, zero is returned and no insns and emitted. */
2366 rs6000_emit_set_const (dest, mode, source, n)
2368 enum machine_mode mode;
2369 int n ATTRIBUTE_UNUSED;
2371 HOST_WIDE_INT c0, c1;
2373 if (mode == QImode || mode == HImode || mode == SImode)
2376 dest = gen_reg_rtx (mode);
2377 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2381 if (GET_CODE (source) == CONST_INT)
2383 c0 = INTVAL (source);
2386 else if (GET_CODE (source) == CONST_DOUBLE)
2388 #if HOST_BITS_PER_WIDE_INT >= 64
2389 c0 = CONST_DOUBLE_LOW (source);
2392 c0 = CONST_DOUBLE_LOW (source);
2393 c1 = CONST_DOUBLE_HIGH (source);
2399 return rs6000_emit_set_long_const (dest, c0, c1);
2402 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2403 fall back to a straight forward decomposition. We do this to avoid
2404 exponential run times encountered when looking for longer sequences
2405 with rs6000_emit_set_const. */
2407 rs6000_emit_set_long_const (dest, c1, c2)
2409 HOST_WIDE_INT c1, c2;
2411 if (!TARGET_POWERPC64)
2413 rtx operand1, operand2;
2415 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2417 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2419 emit_move_insn (operand1, GEN_INT (c1));
2420 emit_move_insn (operand2, GEN_INT (c2));
2424 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2427 ud2 = (c1 & 0xffff0000) >> 16;
2428 #if HOST_BITS_PER_WIDE_INT >= 64
2432 ud4 = (c2 & 0xffff0000) >> 16;
2434 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2435 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2438 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2440 emit_move_insn (dest, GEN_INT (ud1));
2443 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2444 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2447 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2450 emit_move_insn (dest, GEN_INT (ud2 << 16));
2452 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2454 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2455 || (ud4 == 0 && ! (ud3 & 0x8000)))
2458 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2461 emit_move_insn (dest, GEN_INT (ud3 << 16));
2464 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2465 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2467 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2472 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2475 emit_move_insn (dest, GEN_INT (ud4 << 16));
2478 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2480 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2482 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2483 GEN_INT (ud2 << 16)));
2485 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2491 /* Emit a move from SOURCE to DEST in mode MODE. */
2493 rs6000_emit_move (dest, source, mode)
2496 enum machine_mode mode;
2500 operands[1] = source;
2502 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2503 if (GET_CODE (operands[1]) == CONST_DOUBLE
2504 && ! FLOAT_MODE_P (mode)
2505 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2507 /* FIXME. This should never happen. */
2508 /* Since it seems that it does, do the safe thing and convert
2510 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2512 if (GET_CODE (operands[1]) == CONST_DOUBLE
2513 && ! FLOAT_MODE_P (mode)
2514 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2515 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2516 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2517 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2520 /* Check if GCC is setting up a block move that will end up using FP
2521 registers as temporaries. We must make sure this is acceptable. */
2522 if (GET_CODE (operands[0]) == MEM
2523 && GET_CODE (operands[1]) == MEM
2525 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2526 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2527 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2528 ? 32 : MEM_ALIGN (operands[0])))
2529 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2531 : MEM_ALIGN (operands[1]))))
2532 && ! MEM_VOLATILE_P (operands [0])
2533 && ! MEM_VOLATILE_P (operands [1]))
2535 emit_move_insn (adjust_address (operands[0], SImode, 0),
2536 adjust_address (operands[1], SImode, 0));
2537 emit_move_insn (adjust_address (operands[0], SImode, 4),
2538 adjust_address (operands[1], SImode, 4));
2542 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2543 operands[1] = force_reg (mode, operands[1]);
2545 if (mode == SFmode && ! TARGET_POWERPC
2546 && TARGET_HARD_FLOAT && TARGET_FPRS
2547 && GET_CODE (operands[0]) == MEM)
2551 if (reload_in_progress || reload_completed)
2552 regnum = true_regnum (operands[1]);
2553 else if (GET_CODE (operands[1]) == REG)
2554 regnum = REGNO (operands[1]);
2558 /* If operands[1] is a register, on POWER it may have
2559 double-precision data in it, so truncate it to single
2561 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2564 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2565 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2566 operands[1] = newreg;
2570 /* Handle the case where reload calls us with an invalid address;
2571 and the case of CONSTANT_P_RTX. */
2572 if (!ALTIVEC_VECTOR_MODE (mode)
2573 && (! general_operand (operands[1], mode)
2574 || ! nonimmediate_operand (operands[0], mode)
2575 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2577 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2581 /* FIXME: In the long term, this switch statement should go away
2582 and be replaced by a sequence of tests based on things like
2588 if (CONSTANT_P (operands[1])
2589 && GET_CODE (operands[1]) != CONST_INT)
2590 operands[1] = force_const_mem (mode, operands[1]);
2596 if (CONSTANT_P (operands[1])
2597 && ! easy_fp_constant (operands[1], mode))
2598 operands[1] = force_const_mem (mode, operands[1]);
2608 if (CONSTANT_P (operands[1])
2609 && !easy_vector_constant (operands[1]))
2610 operands[1] = force_const_mem (mode, operands[1]);
2615 /* Use default pattern for address of ELF small data */
2618 && DEFAULT_ABI == ABI_V4
2619 && (GET_CODE (operands[1]) == SYMBOL_REF
2620 || GET_CODE (operands[1]) == CONST)
2621 && small_data_operand (operands[1], mode))
2623 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2627 if (DEFAULT_ABI == ABI_V4
2628 && mode == Pmode && mode == SImode
2629 && flag_pic == 1 && got_operand (operands[1], mode))
2631 emit_insn (gen_movsi_got (operands[0], operands[1]));
2635 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2636 && TARGET_NO_TOC && ! flag_pic
2638 && CONSTANT_P (operands[1])
2639 && GET_CODE (operands[1]) != HIGH
2640 && GET_CODE (operands[1]) != CONST_INT)
2642 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2644 /* If this is a function address on -mcall-aixdesc,
2645 convert it to the address of the descriptor. */
2646 if (DEFAULT_ABI == ABI_AIX
2647 && GET_CODE (operands[1]) == SYMBOL_REF
2648 && XSTR (operands[1], 0)[0] == '.')
2650 const char *name = XSTR (operands[1], 0);
2652 while (*name == '.')
2654 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2655 CONSTANT_POOL_ADDRESS_P (new_ref)
2656 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2657 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2658 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2659 operands[1] = new_ref;
2662 if (DEFAULT_ABI == ABI_DARWIN)
2664 emit_insn (gen_macho_high (target, operands[1]));
2665 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2669 emit_insn (gen_elf_high (target, operands[1]));
2670 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2674 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2675 and we have put it in the TOC, we just need to make a TOC-relative
2678 && GET_CODE (operands[1]) == SYMBOL_REF
2679 && CONSTANT_POOL_EXPR_P (operands[1])
2680 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2681 get_pool_mode (operands[1])))
2683 operands[1] = create_TOC_reference (operands[1]);
2685 else if (mode == Pmode
2686 && CONSTANT_P (operands[1])
2687 && ((GET_CODE (operands[1]) != CONST_INT
2688 && ! easy_fp_constant (operands[1], mode))
2689 || (GET_CODE (operands[1]) == CONST_INT
2690 && num_insns_constant (operands[1], mode) > 2)
2691 || (GET_CODE (operands[0]) == REG
2692 && FP_REGNO_P (REGNO (operands[0]))))
2693 && GET_CODE (operands[1]) != HIGH
2694 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2695 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2697 /* Emit a USE operation so that the constant isn't deleted if
2698 expensive optimizations are turned on because nobody
2699 references it. This should only be done for operands that
2700 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2701 This should not be done for operands that contain LABEL_REFs.
2702 For now, we just handle the obvious case. */
2703 if (GET_CODE (operands[1]) != LABEL_REF)
2704 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2707 /* Darwin uses a special PIC legitimizer. */
2708 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2711 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2713 if (operands[0] != operands[1])
2714 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2719 /* If we are to limit the number of things we put in the TOC and
2720 this is a symbol plus a constant we can add in one insn,
2721 just put the symbol in the TOC and add the constant. Don't do
2722 this if reload is in progress. */
2723 if (GET_CODE (operands[1]) == CONST
2724 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2725 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2726 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2727 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2728 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2729 && ! side_effects_p (operands[0]))
2732 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2733 rtx other = XEXP (XEXP (operands[1], 0), 1);
2735 sym = force_reg (mode, sym);
2737 emit_insn (gen_addsi3 (operands[0], sym, other));
2739 emit_insn (gen_adddi3 (operands[0], sym, other));
2743 operands[1] = force_const_mem (mode, operands[1]);
2746 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2747 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2748 get_pool_constant (XEXP (operands[1], 0)),
2749 get_pool_mode (XEXP (operands[1], 0))))
2752 = gen_rtx_MEM (mode,
2753 create_TOC_reference (XEXP (operands[1], 0)));
2754 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2755 RTX_UNCHANGING_P (operands[1]) = 1;
2761 if (GET_CODE (operands[0]) == MEM
2762 && GET_CODE (XEXP (operands[0], 0)) != REG
2763 && ! reload_in_progress)
2765 = replace_equiv_address (operands[0],
2766 copy_addr_to_reg (XEXP (operands[0], 0)));
2768 if (GET_CODE (operands[1]) == MEM
2769 && GET_CODE (XEXP (operands[1], 0)) != REG
2770 && ! reload_in_progress)
2772 = replace_equiv_address (operands[1],
2773 copy_addr_to_reg (XEXP (operands[1], 0)));
2780 /* Above, we may have called force_const_mem which may have returned
2781 an invalid address. If we can, fix this up; otherwise, reload will
2782 have to deal with it. */
2783 if (GET_CODE (operands[1]) == MEM
2784 && ! memory_address_p (mode, XEXP (operands[1], 0))
2785 && ! reload_in_progress)
2786 operands[1] = adjust_address (operands[1], mode, 0);
2788 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2792 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2793 for a call to a function whose data type is FNTYPE.
2794 For a library call, FNTYPE is 0.
2796 For incoming args we set the number of arguments in the prototype large
2797 so we never return a PARALLEL. */
2800 init_cumulative_args (cum, fntype, libname, incoming)
2801 CUMULATIVE_ARGS *cum;
2803 rtx libname ATTRIBUTE_UNUSED;
2806 static CUMULATIVE_ARGS zero_cumulative;
2808 *cum = zero_cumulative;
2810 cum->fregno = FP_ARG_MIN_REG;
2811 cum->vregno = ALTIVEC_ARG_MIN_REG;
2812 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2813 cum->call_cookie = CALL_NORMAL;
2814 cum->sysv_gregno = GP_ARG_MIN_REG;
2817 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2819 else if (cum->prototype)
2820 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2821 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2822 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2825 cum->nargs_prototype = 0;
2827 cum->orig_nargs = cum->nargs_prototype;
2829 /* Check for a longcall attribute. */
2831 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2832 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2833 cum->call_cookie = CALL_LONG;
2835 if (TARGET_DEBUG_ARG)
2837 fprintf (stderr, "\ninit_cumulative_args:");
2840 tree ret_type = TREE_TYPE (fntype);
2841 fprintf (stderr, " ret code = %s,",
2842 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2845 if (cum->call_cookie & CALL_LONG)
2846 fprintf (stderr, " longcall,");
2848 fprintf (stderr, " proto = %d, nargs = %d\n",
2849 cum->prototype, cum->nargs_prototype);
2853 /* If defined, a C expression which determines whether, and in which
2854 direction, to pad out an argument with extra space. The value
2855 should be of type `enum direction': either `upward' to pad above
2856 the argument, `downward' to pad below, or `none' to inhibit
2859 For the AIX ABI structs are always stored left shifted in their
2863 function_arg_padding (mode, type)
2864 enum machine_mode mode;
2867 if (type != 0 && AGGREGATE_TYPE_P (type))
2870 /* This is the default definition. */
2871 return (! BYTES_BIG_ENDIAN
2874 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2875 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2876 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2877 ? downward : upward));
2880 /* If defined, a C expression that gives the alignment boundary, in bits,
2881 of an argument with the specified mode and type. If it is not defined,
2882 PARM_BOUNDARY is used for all arguments.
2884 V.4 wants long longs to be double word aligned. */
2887 function_arg_boundary (mode, type)
2888 enum machine_mode mode;
2889 tree type ATTRIBUTE_UNUSED;
2891 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2893 else if (SPE_VECTOR_MODE (mode))
2895 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2898 return PARM_BOUNDARY;
2901 /* Update the data in CUM to advance over an argument
2902 of mode MODE and data type TYPE.
2903 (TYPE is null for libcalls where that information may not be available.) */
2906 function_arg_advance (cum, mode, type, named)
2907 CUMULATIVE_ARGS *cum;
2908 enum machine_mode mode;
2912 cum->nargs_prototype--;
2914 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2916 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2919 cum->words += RS6000_ARG_SIZE (mode, type);
2921 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
2922 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
2924 else if (DEFAULT_ABI == ABI_V4)
2926 if (TARGET_HARD_FLOAT && TARGET_FPRS
2927 && (mode == SFmode || mode == DFmode))
2929 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2934 cum->words += cum->words & 1;
2935 cum->words += RS6000_ARG_SIZE (mode, type);
2941 int gregno = cum->sysv_gregno;
2943 /* Aggregates and IEEE quad get passed by reference. */
2944 if ((type && AGGREGATE_TYPE_P (type))
2948 n_words = RS6000_ARG_SIZE (mode, type);
2950 /* Long long and SPE vectors are put in odd registers. */
2951 if (n_words == 2 && (gregno & 1) == 0)
2954 /* Long long and SPE vectors are not split between registers
2956 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2958 /* Long long is aligned on the stack. */
2960 cum->words += cum->words & 1;
2961 cum->words += n_words;
2964 /* Note: continuing to accumulate gregno past when we've started
2965 spilling to the stack indicates the fact that we've started
2966 spilling to the stack to expand_builtin_saveregs. */
2967 cum->sysv_gregno = gregno + n_words;
2970 if (TARGET_DEBUG_ARG)
2972 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2973 cum->words, cum->fregno);
2974 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2975 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2976 fprintf (stderr, "mode = %4s, named = %d\n",
2977 GET_MODE_NAME (mode), named);
2982 int align = (TARGET_32BIT && (cum->words & 1) != 0
2983 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2985 cum->words += align + RS6000_ARG_SIZE (mode, type);
2987 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2988 && TARGET_HARD_FLOAT && TARGET_FPRS)
2991 if (TARGET_DEBUG_ARG)
2993 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2994 cum->words, cum->fregno);
2995 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2996 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2997 fprintf (stderr, "named = %d, align = %d\n", named, align);
3002 /* Determine where to put an argument to a function.
3003 Value is zero to push the argument on the stack,
3004 or a hard register in which to store the argument.
3006 MODE is the argument's machine mode.
3007 TYPE is the data type of the argument (as a tree).
3008 This is null for libcalls where that information may
3010 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3011 the preceding args and about the function being called.
3012 NAMED is nonzero if this argument is a named parameter
3013 (otherwise it is an extra parameter matching an ellipsis).
3015 On RS/6000 the first eight words of non-FP are normally in registers
3016 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3017 Under V.4, the first 8 FP args are in registers.
3019 If this is floating-point and no prototype is specified, we use
3020 both an FP and integer register (or possibly FP reg and stack). Library
3021 functions (when TYPE is zero) always have the proper types for args,
3022 so we can pass the FP value just in one register. emit_library_function
3023 doesn't support PARALLEL anyway. */
3026 function_arg (cum, mode, type, named)
3027 CUMULATIVE_ARGS *cum;
3028 enum machine_mode mode;
3032 enum rs6000_abi abi = DEFAULT_ABI;
3034 /* Return a marker to indicate whether CR1 needs to set or clear the
3035 bit that V.4 uses to say fp args were passed in registers.
3036 Assume that we don't need the marker for software floating point,
3037 or compiler generated library calls. */
3038 if (mode == VOIDmode)
3041 && cum->nargs_prototype < 0
3042 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3044 /* For the SPE, we need to crxor CR6 always. */
3046 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3047 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3048 return GEN_INT (cum->call_cookie
3049 | ((cum->fregno == FP_ARG_MIN_REG)
3050 ? CALL_V4_SET_FP_ARGS
3051 : CALL_V4_CLEAR_FP_ARGS));
3054 return GEN_INT (cum->call_cookie);
3057 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3059 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3060 return gen_rtx_REG (mode, cum->vregno);
3064 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3066 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3067 return gen_rtx_REG (mode, cum->sysv_gregno);
3071 else if (abi == ABI_V4)
3073 if (TARGET_HARD_FLOAT && TARGET_FPRS
3074 && (mode == SFmode || mode == DFmode))
3076 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3077 return gen_rtx_REG (mode, cum->fregno);
3084 int gregno = cum->sysv_gregno;
3086 /* Aggregates and IEEE quad get passed by reference. */
3087 if ((type && AGGREGATE_TYPE_P (type))
3091 n_words = RS6000_ARG_SIZE (mode, type);
3093 /* Long long and SPE vectors are put in odd registers. */
3094 if (n_words == 2 && (gregno & 1) == 0)
3097 /* Long long and SPE vectors are not split between registers
3099 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3101 /* SPE vectors in ... get split into 2 registers. */
3102 if (TARGET_SPE && TARGET_SPE_ABI
3103 && SPE_VECTOR_MODE (mode) && !named)
3106 enum machine_mode m = GET_MODE_INNER (mode);
3108 r1 = gen_rtx_REG (m, gregno);
3109 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3110 r2 = gen_rtx_REG (m, gregno + 1);
3111 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3112 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3114 return gen_rtx_REG (mode, gregno);
3122 int align = (TARGET_32BIT && (cum->words & 1) != 0
3123 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3124 int align_words = cum->words + align;
3126 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3129 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3132 || ((cum->nargs_prototype > 0)
3133 /* IBM AIX extended its linkage convention definition always
3134 to require FP args after register save area hole on the
3136 && (DEFAULT_ABI != ABI_AIX
3138 || (align_words < GP_ARG_NUM_REG))))
3139 return gen_rtx_REG (mode, cum->fregno);
3141 return gen_rtx_PARALLEL (mode,
3143 gen_rtx_EXPR_LIST (VOIDmode,
3144 ((align_words >= GP_ARG_NUM_REG)
3147 + RS6000_ARG_SIZE (mode, type)
3149 /* If this is partially on the stack, then
3150 we only include the portion actually
3151 in registers here. */
3152 ? gen_rtx_REG (SImode,
3153 GP_ARG_MIN_REG + align_words)
3154 : gen_rtx_REG (mode,
3155 GP_ARG_MIN_REG + align_words))),
3157 gen_rtx_EXPR_LIST (VOIDmode,
3158 gen_rtx_REG (mode, cum->fregno),
3161 else if (align_words < GP_ARG_NUM_REG)
3162 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3168 /* For an arg passed partly in registers and partly in memory,
3169 this is the number of registers used.
3170 For args passed entirely in registers or entirely in memory, zero. */
3173 function_arg_partial_nregs (cum, mode, type, named)
3174 CUMULATIVE_ARGS *cum;
3175 enum machine_mode mode;
3177 int named ATTRIBUTE_UNUSED;
3179 if (DEFAULT_ABI == ABI_V4)
3182 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3183 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3185 if (cum->nargs_prototype >= 0)
3189 if (cum->words < GP_ARG_NUM_REG
3190 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3192 int ret = GP_ARG_NUM_REG - cum->words;
3193 if (ret && TARGET_DEBUG_ARG)
3194 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3202 /* A C expression that indicates when an argument must be passed by
3203 reference. If nonzero for an argument, a copy of that argument is
3204 made in memory and a pointer to the argument is passed instead of
3205 the argument itself. The pointer is passed in whatever way is
3206 appropriate for passing a pointer to that type.
3208 Under V.4, structures and unions are passed by reference. */
3211 function_arg_pass_by_reference (cum, mode, type, named)
3212 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3213 enum machine_mode mode ATTRIBUTE_UNUSED;
3215 int named ATTRIBUTE_UNUSED;
3217 if (DEFAULT_ABI == ABI_V4
3218 && ((type && AGGREGATE_TYPE_P (type))
3221 if (TARGET_DEBUG_ARG)
3222 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3230 /* Perform any needed actions needed for a function that is receiving a
3231 variable number of arguments.
3235 MODE and TYPE are the mode and type of the current parameter.
3237 PRETEND_SIZE is a variable that should be set to the amount of stack
3238 that must be pushed by the prolog to pretend that our caller pushed
3241 Normally, this macro will push all remaining incoming registers on the
3242 stack and set PRETEND_SIZE to the length of the registers pushed. */
3245 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3246 CUMULATIVE_ARGS *cum;
3247 enum machine_mode mode;
3253 CUMULATIVE_ARGS next_cum;
3254 int reg_size = TARGET_32BIT ? 4 : 8;
3255 rtx save_area = NULL_RTX, mem;
3256 int first_reg_offset, set;
3260 fntype = TREE_TYPE (current_function_decl);
3261 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3262 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3263 != void_type_node));
3265 /* For varargs, we do not want to skip the dummy va_dcl argument.
3266 For stdargs, we do want to skip the last named argument. */
3269 function_arg_advance (&next_cum, mode, type, 1);
3271 if (DEFAULT_ABI == ABI_V4)
3273 /* Indicate to allocate space on the stack for varargs save area. */
3274 cfun->machine->sysv_varargs_p = 1;
3276 save_area = plus_constant (virtual_stack_vars_rtx,
3277 - RS6000_VARARGS_SIZE);
3279 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3283 first_reg_offset = next_cum.words;
3284 save_area = virtual_incoming_args_rtx;
3285 cfun->machine->sysv_varargs_p = 0;
3287 if (MUST_PASS_IN_STACK (mode, type))
3288 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3291 set = get_varargs_alias_set ();
3292 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3294 mem = gen_rtx_MEM (BLKmode,
3295 plus_constant (save_area,
3296 first_reg_offset * reg_size)),
3297 set_mem_alias_set (mem, set);
3298 set_mem_align (mem, BITS_PER_WORD);
3301 (GP_ARG_MIN_REG + first_reg_offset, mem,
3302 GP_ARG_NUM_REG - first_reg_offset,
3303 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3305 /* ??? Does ABI_V4 need this at all? */
3306 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3309 /* Save FP registers if needed. */
3310 if (DEFAULT_ABI == ABI_V4
3311 && TARGET_HARD_FLOAT && TARGET_FPRS
3313 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3315 int fregno = next_cum.fregno;
3316 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3317 rtx lab = gen_label_rtx ();
3318 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3320 emit_jump_insn (gen_rtx_SET (VOIDmode,
3322 gen_rtx_IF_THEN_ELSE (VOIDmode,
3323 gen_rtx_NE (VOIDmode, cr1,
3325 gen_rtx_LABEL_REF (VOIDmode, lab),
3328 while (fregno <= FP_ARG_V4_MAX_REG)
3330 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3331 set_mem_alias_set (mem, set);
3332 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3341 /* Create the va_list data type. */
3344 rs6000_build_va_list ()
3346 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3348 /* For AIX, prefer 'char *' because that's what the system
3349 header files like. */
3350 if (DEFAULT_ABI != ABI_V4)
3351 return build_pointer_type (char_type_node);
3353 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3354 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3356 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3357 unsigned_char_type_node);
3358 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3359 unsigned_char_type_node);
3360 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3362 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3365 DECL_FIELD_CONTEXT (f_gpr) = record;
3366 DECL_FIELD_CONTEXT (f_fpr) = record;
3367 DECL_FIELD_CONTEXT (f_ovf) = record;
3368 DECL_FIELD_CONTEXT (f_sav) = record;
3370 TREE_CHAIN (record) = type_decl;
3371 TYPE_NAME (record) = type_decl;
3372 TYPE_FIELDS (record) = f_gpr;
3373 TREE_CHAIN (f_gpr) = f_fpr;
3374 TREE_CHAIN (f_fpr) = f_ovf;
3375 TREE_CHAIN (f_ovf) = f_sav;
3377 layout_type (record);
3379 /* The correct type is an array type of one element. */
3380 return build_array_type (record, build_index_type (size_zero_node));
3383 /* Implement va_start. */
3386 rs6000_va_start (valist, nextarg)
3390 HOST_WIDE_INT words, n_gpr, n_fpr;
3391 tree f_gpr, f_fpr, f_ovf, f_sav;
3392 tree gpr, fpr, ovf, sav, t;
3394 /* Only SVR4 needs something special. */
3395 if (DEFAULT_ABI != ABI_V4)
3397 std_expand_builtin_va_start (valist, nextarg);
3401 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3402 f_fpr = TREE_CHAIN (f_gpr);
3403 f_ovf = TREE_CHAIN (f_fpr);
3404 f_sav = TREE_CHAIN (f_ovf);
3406 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3407 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3408 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3409 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3410 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3412 /* Count number of gp and fp argument registers used. */
3413 words = current_function_args_info.words;
3414 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3415 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3417 if (TARGET_DEBUG_ARG)
3419 fputs ("va_start: words = ", stderr);
3420 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3421 fputs (", n_gpr = ", stderr);
3422 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3423 fputs (", n_fpr = ", stderr);
3424 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3425 putc ('\n', stderr);
3428 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3429 TREE_SIDE_EFFECTS (t) = 1;
3430 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3432 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3433 TREE_SIDE_EFFECTS (t) = 1;
3434 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3436 /* Find the overflow area. */
3437 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3439 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3440 build_int_2 (words * UNITS_PER_WORD, 0));
3441 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3442 TREE_SIDE_EFFECTS (t) = 1;
3443 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3445 /* Find the register save area. */
3446 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3447 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3448 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3449 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3450 TREE_SIDE_EFFECTS (t) = 1;
3451 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3454 /* Implement va_arg. */
3457 rs6000_va_arg (valist, type)
3460 tree f_gpr, f_fpr, f_ovf, f_sav;
3461 tree gpr, fpr, ovf, sav, reg, t, u;
3462 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3463 rtx lab_false, lab_over, addr_rtx, r;
3465 if (DEFAULT_ABI != ABI_V4)
3466 return std_expand_builtin_va_arg (valist, type);
3468 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3469 f_fpr = TREE_CHAIN (f_gpr);
3470 f_ovf = TREE_CHAIN (f_fpr);
3471 f_sav = TREE_CHAIN (f_ovf);
3473 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3474 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3475 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3476 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3477 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3479 size = int_size_in_bytes (type);
3480 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3482 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3484 /* Aggregates and long doubles are passed by reference. */
3490 size = UNITS_PER_WORD;
3493 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3495 /* FP args go in FP registers, if present. */
3504 /* Otherwise into GP registers. */
3512 /* Pull the value out of the saved registers ... */
3514 lab_false = gen_label_rtx ();
3515 lab_over = gen_label_rtx ();
3516 addr_rtx = gen_reg_rtx (Pmode);
3518 /* AltiVec vectors never go in registers. */
3519 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3521 TREE_THIS_VOLATILE (reg) = 1;
3522 emit_cmp_and_jump_insns
3523 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3524 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3527 /* Long long is aligned in the registers. */
3530 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3531 build_int_2 (n_reg - 1, 0));
3532 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3533 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3534 TREE_SIDE_EFFECTS (u) = 1;
3535 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3539 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3543 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3544 build_int_2 (n_reg, 0));
3545 TREE_SIDE_EFFECTS (u) = 1;
3547 u = build1 (CONVERT_EXPR, integer_type_node, u);
3548 TREE_SIDE_EFFECTS (u) = 1;
3550 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3551 TREE_SIDE_EFFECTS (u) = 1;
3553 t = build (PLUS_EXPR, ptr_type_node, t, u);
3554 TREE_SIDE_EFFECTS (t) = 1;
3556 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3558 emit_move_insn (addr_rtx, r);
3560 emit_jump_insn (gen_jump (lab_over));
3564 emit_label (lab_false);
3566 /* ... otherwise out of the overflow area. */
3568 /* Make sure we don't find reg 7 for the next int arg.
3570 All AltiVec vectors go in the overflow area. So in the AltiVec
3571 case we need to get the vectors from the overflow area, but
3572 remember where the GPRs and FPRs are. */
3573 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3574 || !TARGET_ALTIVEC))
3576 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3577 TREE_SIDE_EFFECTS (t) = 1;
3578 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3581 /* Care for on-stack alignment if needed. */
3588 /* AltiVec vectors are 16 byte aligned. */
3589 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3594 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3595 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3599 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3601 emit_move_insn (addr_rtx, r);
3603 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3604 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3605 TREE_SIDE_EFFECTS (t) = 1;
3606 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3608 emit_label (lab_over);
3612 r = gen_rtx_MEM (Pmode, addr_rtx);
3613 set_mem_alias_set (r, get_varargs_alias_set ());
3614 emit_move_insn (addr_rtx, r);
3622 #define def_builtin(MASK, NAME, TYPE, CODE) \
3624 if ((MASK) & target_flags) \
3625 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3629 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3631 static const struct builtin_description bdesc_3arg[] =
3633 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3634 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3635 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3636 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3637 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3638 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3639 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3640 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3641 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3642 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3643 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3644 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3645 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3646 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3647 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3648 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3649 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3650 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3651 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3652 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3653 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3654 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3655 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3658 /* DST operations: void foo (void *, const int, const char). */
3660 static const struct builtin_description bdesc_dst[] =
3662 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3663 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3664 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3665 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3668 /* Simple binary operations: VECc = foo (VECa, VECb). */
3670 static struct builtin_description bdesc_2arg[] =
3672 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3673 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3674 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3675 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3676 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3677 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3678 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3679 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3680 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3681 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3682 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3683 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3684 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3685 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3686 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3687 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3688 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3689 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3690 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3691 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3692 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3693 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3694 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3695 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3696 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3697 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3698 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3699 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3700 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3701 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3702 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3703 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3704 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3705 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3706 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3707 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3708 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3709 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3710 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3711 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3712 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3713 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3714 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3715 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3716 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3717 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3718 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3719 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3720 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3721 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3722 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3723 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3724 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3725 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3726 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3727 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3728 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3729 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3730 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3731 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3732 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3733 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3734 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3735 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3736 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3737 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3738 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3739 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3740 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3741 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3742 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3743 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3744 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3745 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3746 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3747 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3748 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3749 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3750 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3751 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3752 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3753 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3754 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3755 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3756 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3757 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3758 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3759 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3760 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3761 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3762 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3763 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3764 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3765 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3766 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3767 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3768 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3769 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3770 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3771 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3772 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3773 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3774 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3775 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3776 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3777 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3778 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3779 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3780 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3781 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3782 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3783 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3784 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3786 /* Place holder, leave as first spe builtin. */
3787 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3788 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3789 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3790 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3791 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3792 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3793 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3794 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3795 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3796 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3797 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3798 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3799 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3800 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3801 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3802 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3803 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3804 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3805 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3806 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3807 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3808 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3809 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3810 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3811 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3812 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3813 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3814 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3815 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3816 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3817 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3818 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3819 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3820 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3821 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3822 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3823 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3824 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3825 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3826 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3827 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3828 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3829 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3830 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3831 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3832 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3833 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3834 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3835 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3836 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3837 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3838 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3839 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3840 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3841 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3842 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3843 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3844 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3845 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3846 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3847 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3848 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3849 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3850 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3851 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
3852 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
3853 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
3854 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
3855 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
3856 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
3857 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
3858 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
3859 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
3860 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
3861 { 0, CODE_FOR_spe_evmwlsmf, "__builtin_spe_evmwlsmf", SPE_BUILTIN_EVMWLSMF },
3862 { 0, CODE_FOR_spe_evmwlsmfa, "__builtin_spe_evmwlsmfa", SPE_BUILTIN_EVMWLSMFA },
3863 { 0, CODE_FOR_spe_evmwlsmfaaw, "__builtin_spe_evmwlsmfaaw", SPE_BUILTIN_EVMWLSMFAAW },
3864 { 0, CODE_FOR_spe_evmwlsmfanw, "__builtin_spe_evmwlsmfanw", SPE_BUILTIN_EVMWLSMFANW },
3865 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
3866 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
3867 { 0, CODE_FOR_spe_evmwlssf, "__builtin_spe_evmwlssf", SPE_BUILTIN_EVMWLSSF },
3868 { 0, CODE_FOR_spe_evmwlssfa, "__builtin_spe_evmwlssfa", SPE_BUILTIN_EVMWLSSFA },
3869 { 0, CODE_FOR_spe_evmwlssfaaw, "__builtin_spe_evmwlssfaaw", SPE_BUILTIN_EVMWLSSFAAW },
3870 { 0, CODE_FOR_spe_evmwlssfanw, "__builtin_spe_evmwlssfanw", SPE_BUILTIN_EVMWLSSFANW },
3871 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
3872 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
3873 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
3874 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
3875 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
3876 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
3877 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
3878 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
3879 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
3880 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
3881 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
3882 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
3883 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
3884 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
3885 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
3886 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
3887 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
3888 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
3889 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
3890 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
3891 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
3892 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
3893 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
3894 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
3895 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
3896 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
3897 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
3898 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
3899 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
3900 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
3901 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
3902 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
3903 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
3905 /* SPE binary operations expecting a 5-bit unsigned literal. */
3906 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
3908 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
3909 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
3910 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
3911 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
3912 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
3913 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
3914 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
3915 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
3916 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
3917 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
3918 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
3919 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
3920 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
3921 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
3922 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
3923 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
3924 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
3925 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
3926 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
3927 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
3928 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
3929 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
3930 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
3931 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
3932 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
3933 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
3935 /* Place-holder. Leave as last binary SPE builtin. */
3936 { 0, CODE_FOR_spe_evxor, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
3939 /* AltiVec predicates. */
3941 struct builtin_description_predicates
3943 const unsigned int mask;
3944 const enum insn_code icode;
3946 const char *const name;
3947 const enum rs6000_builtins code;
3950 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3952 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3953 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3954 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3955 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3956 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3957 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3958 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3959 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3960 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3961 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3962 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3963 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3964 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3967 /* SPE predicates. */
3968 static struct builtin_description bdesc_spe_predicates[] =
3970 /* Place-holder. Leave as first. */
3971 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
3972 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
3973 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
3974 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
3975 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
3976 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
3977 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
3978 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
3979 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
3980 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
3981 /* Place-holder. Leave as last. */
3982 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
3985 /* SPE evsel predicates. */
3986 static struct builtin_description bdesc_spe_evsel[] =
3988 /* Place-holder. Leave as first. */
3989 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
3990 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
3991 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
3992 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
3993 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
3994 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
3995 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
3996 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
3997 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
3998 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
3999 /* Place-holder. Leave as last. */
4000 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4003 /* ABS* opreations. */
4005 static const struct builtin_description bdesc_abs[] =
4007 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4008 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4009 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4010 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4011 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4012 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4013 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4016 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4019 static struct builtin_description bdesc_1arg[] =
4021 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4022 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4023 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4024 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4025 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4026 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4027 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4028 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4029 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4030 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4031 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4032 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4033 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4034 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4035 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4036 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4037 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4039 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4040 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4041 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4042 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4043 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4044 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4045 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4046 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4047 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4048 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4049 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4050 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4051 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4052 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4053 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4054 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4055 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4056 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4057 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4058 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4059 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4060 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4061 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4062 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4063 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4064 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4065 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4066 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4067 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4068 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4069 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4070 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4072 /* Place-holder. Leave as last unary SPE builtin. */
4073 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4077 rs6000_expand_unop_builtin (icode, arglist, target)
4078 enum insn_code icode;
4083 tree arg0 = TREE_VALUE (arglist);
4084 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4085 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4086 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4088 if (icode == CODE_FOR_nothing)
4089 /* Builtin not supported on this processor. */
4092 /* If we got invalid arguments bail out before generating bad rtl. */
4093 if (arg0 == error_mark_node)
4096 if (icode == CODE_FOR_altivec_vspltisb
4097 || icode == CODE_FOR_altivec_vspltish
4098 || icode == CODE_FOR_altivec_vspltisw
4099 || icode == CODE_FOR_spe_evsplatfi
4100 || icode == CODE_FOR_spe_evsplati)
4102 /* Only allow 5-bit *signed* literals. */
4103 if (GET_CODE (op0) != CONST_INT
4104 || INTVAL (op0) > 0x1f
4105 || INTVAL (op0) < -0x1f)
4107 error ("argument 1 must be a 5-bit signed literal");
4113 || GET_MODE (target) != tmode
4114 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4115 target = gen_reg_rtx (tmode);
4117 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4118 op0 = copy_to_mode_reg (mode0, op0);
4120 pat = GEN_FCN (icode) (target, op0);
4129 altivec_expand_abs_builtin (icode, arglist, target)
4130 enum insn_code icode;
4134 rtx pat, scratch1, scratch2;
4135 tree arg0 = TREE_VALUE (arglist);
4136 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4137 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4138 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4140 /* If we have invalid arguments, bail out before generating bad rtl. */
4141 if (arg0 == error_mark_node)
4145 || GET_MODE (target) != tmode
4146 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4147 target = gen_reg_rtx (tmode);
4149 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4150 op0 = copy_to_mode_reg (mode0, op0);
4152 scratch1 = gen_reg_rtx (mode0);
4153 scratch2 = gen_reg_rtx (mode0);
4155 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4164 rs6000_expand_binop_builtin (icode, arglist, target)
4165 enum insn_code icode;
4170 tree arg0 = TREE_VALUE (arglist);
4171 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4172 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4173 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4174 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4175 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4176 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4178 if (icode == CODE_FOR_nothing)
4179 /* Builtin not supported on this processor. */
4182 /* If we got invalid arguments bail out before generating bad rtl. */
4183 if (arg0 == error_mark_node || arg1 == error_mark_node)
4186 if (icode == CODE_FOR_altivec_vcfux
4187 || icode == CODE_FOR_altivec_vcfsx
4188 || icode == CODE_FOR_altivec_vctsxs
4189 || icode == CODE_FOR_altivec_vctuxs
4190 || icode == CODE_FOR_altivec_vspltb
4191 || icode == CODE_FOR_altivec_vsplth
4192 || icode == CODE_FOR_altivec_vspltw
4193 || icode == CODE_FOR_spe_evaddiw
4194 || icode == CODE_FOR_spe_evldd
4195 || icode == CODE_FOR_spe_evldh
4196 || icode == CODE_FOR_spe_evldw
4197 || icode == CODE_FOR_spe_evlhhesplat
4198 || icode == CODE_FOR_spe_evlhhossplat
4199 || icode == CODE_FOR_spe_evlhhousplat
4200 || icode == CODE_FOR_spe_evlwhe
4201 || icode == CODE_FOR_spe_evlwhos
4202 || icode == CODE_FOR_spe_evlwhou
4203 || icode == CODE_FOR_spe_evlwhsplat
4204 || icode == CODE_FOR_spe_evlwwsplat
4205 || icode == CODE_FOR_spe_evrlwi
4206 || icode == CODE_FOR_spe_evslwi
4207 || icode == CODE_FOR_spe_evsrwis
4208 || icode == CODE_FOR_spe_evsrwiu)
4210 /* Only allow 5-bit unsigned literals. */
4211 if (TREE_CODE (arg1) != INTEGER_CST
4212 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4214 error ("argument 2 must be a 5-bit unsigned literal");
4220 || GET_MODE (target) != tmode
4221 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4222 target = gen_reg_rtx (tmode);
4224 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4225 op0 = copy_to_mode_reg (mode0, op0);
4226 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4227 op1 = copy_to_mode_reg (mode1, op1);
4229 pat = GEN_FCN (icode) (target, op0, op1);
4238 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4239 enum insn_code icode;
4245 tree cr6_form = TREE_VALUE (arglist);
4246 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4247 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4248 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4249 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4250 enum machine_mode tmode = SImode;
4251 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4252 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4255 if (TREE_CODE (cr6_form) != INTEGER_CST)
4257 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4261 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4266 /* If we have invalid arguments, bail out before generating bad rtl. */
4267 if (arg0 == error_mark_node || arg1 == error_mark_node)
4271 || GET_MODE (target) != tmode
4272 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4273 target = gen_reg_rtx (tmode);
4275 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4276 op0 = copy_to_mode_reg (mode0, op0);
4277 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4278 op1 = copy_to_mode_reg (mode1, op1);
4280 scratch = gen_reg_rtx (mode0);
4282 pat = GEN_FCN (icode) (scratch, op0, op1,
4283 gen_rtx (SYMBOL_REF, Pmode, opcode));
4288 /* The vec_any* and vec_all* predicates use the same opcodes for two
4289 different operations, but the bits in CR6 will be different
4290 depending on what information we want. So we have to play tricks
4291 with CR6 to get the right bits out.
4293 If you think this is disgusting, look at the specs for the
4294 AltiVec predicates. */
4296 switch (cr6_form_int)
4299 emit_insn (gen_cr6_test_for_zero (target));
4302 emit_insn (gen_cr6_test_for_zero_reverse (target));
4305 emit_insn (gen_cr6_test_for_lt (target));
4308 emit_insn (gen_cr6_test_for_lt_reverse (target));
4311 error ("argument 1 of __builtin_altivec_predicate is out of range");
4319 altivec_expand_stv_builtin (icode, arglist)
4320 enum insn_code icode;
4323 tree arg0 = TREE_VALUE (arglist);
4324 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4325 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4326 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4327 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4328 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4330 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4331 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4332 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4334 /* Invalid arguments. Bail before doing anything stoopid! */
4335 if (arg0 == error_mark_node
4336 || arg1 == error_mark_node
4337 || arg2 == error_mark_node)
4340 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4341 op0 = copy_to_mode_reg (mode2, op0);
4342 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4343 op1 = copy_to_mode_reg (mode0, op1);
4344 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4345 op2 = copy_to_mode_reg (mode1, op2);
4347 pat = GEN_FCN (icode) (op1, op2, op0);
4354 rs6000_expand_ternop_builtin (icode, arglist, target)
4355 enum insn_code icode;
4360 tree arg0 = TREE_VALUE (arglist);
4361 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4362 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4363 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4364 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4365 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4366 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4367 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4368 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4369 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4371 if (icode == CODE_FOR_nothing)
4372 /* Builtin not supported on this processor. */
4375 /* If we got invalid arguments bail out before generating bad rtl. */
4376 if (arg0 == error_mark_node
4377 || arg1 == error_mark_node
4378 || arg2 == error_mark_node)
4381 if (icode == CODE_FOR_altivec_vsldoi_4sf
4382 || icode == CODE_FOR_altivec_vsldoi_4si
4383 || icode == CODE_FOR_altivec_vsldoi_8hi
4384 || icode == CODE_FOR_altivec_vsldoi_16qi)
4386 /* Only allow 4-bit unsigned literals. */
4387 if (TREE_CODE (arg2) != INTEGER_CST
4388 || TREE_INT_CST_LOW (arg2) & ~0xf)
4390 error ("argument 3 must be a 4-bit unsigned literal");
4396 || GET_MODE (target) != tmode
4397 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4398 target = gen_reg_rtx (tmode);
4400 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4401 op0 = copy_to_mode_reg (mode0, op0);
4402 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4403 op1 = copy_to_mode_reg (mode1, op1);
4404 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4405 op2 = copy_to_mode_reg (mode2, op2);
4407 pat = GEN_FCN (icode) (target, op0, op1, op2);
4415 /* Expand the lvx builtins. */
4417 altivec_expand_ld_builtin (exp, target, expandedp)
4422 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4423 tree arglist = TREE_OPERAND (exp, 1);
4424 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4426 enum machine_mode tmode, mode0;
4428 enum insn_code icode;
4432 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4433 icode = CODE_FOR_altivec_lvx_16qi;
4435 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4436 icode = CODE_FOR_altivec_lvx_8hi;
4438 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4439 icode = CODE_FOR_altivec_lvx_4si;
4441 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4442 icode = CODE_FOR_altivec_lvx_4sf;
4451 arg0 = TREE_VALUE (arglist);
4452 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4453 tmode = insn_data[icode].operand[0].mode;
4454 mode0 = insn_data[icode].operand[1].mode;
4457 || GET_MODE (target) != tmode
4458 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4459 target = gen_reg_rtx (tmode);
4461 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4462 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4464 pat = GEN_FCN (icode) (target, op0);
4471 /* Expand the stvx builtins. */
4473 altivec_expand_st_builtin (exp, target, expandedp)
4475 rtx target ATTRIBUTE_UNUSED;
4478 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4479 tree arglist = TREE_OPERAND (exp, 1);
4480 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4482 enum machine_mode mode0, mode1;
4484 enum insn_code icode;
4488 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4489 icode = CODE_FOR_altivec_stvx_16qi;
4491 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4492 icode = CODE_FOR_altivec_stvx_8hi;
4494 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4495 icode = CODE_FOR_altivec_stvx_4si;
4497 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4498 icode = CODE_FOR_altivec_stvx_4sf;
4505 arg0 = TREE_VALUE (arglist);
4506 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4507 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4508 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4509 mode0 = insn_data[icode].operand[0].mode;
4510 mode1 = insn_data[icode].operand[1].mode;
4512 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4513 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4514 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4515 op1 = copy_to_mode_reg (mode1, op1);
4517 pat = GEN_FCN (icode) (op0, op1);
4525 /* Expand the dst builtins. */
4527 altivec_expand_dst_builtin (exp, target, expandedp)
4529 rtx target ATTRIBUTE_UNUSED;
4532 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4533 tree arglist = TREE_OPERAND (exp, 1);
4534 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4535 tree arg0, arg1, arg2;
4536 enum machine_mode mode0, mode1, mode2;
4537 rtx pat, op0, op1, op2;
4538 struct builtin_description *d;
4543 /* Handle DST variants. */
4544 d = (struct builtin_description *) bdesc_dst;
4545 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4546 if (d->code == fcode)
4548 arg0 = TREE_VALUE (arglist);
4549 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4550 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4551 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4552 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4553 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4554 mode0 = insn_data[d->icode].operand[0].mode;
4555 mode1 = insn_data[d->icode].operand[1].mode;
4556 mode2 = insn_data[d->icode].operand[2].mode;
4558 /* Invalid arguments, bail out before generating bad rtl. */
4559 if (arg0 == error_mark_node
4560 || arg1 == error_mark_node
4561 || arg2 == error_mark_node)
4564 if (TREE_CODE (arg2) != INTEGER_CST
4565 || TREE_INT_CST_LOW (arg2) & ~0x3)
4567 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4571 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4572 op0 = copy_to_mode_reg (mode0, op0);
4573 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4574 op1 = copy_to_mode_reg (mode1, op1);
4576 pat = GEN_FCN (d->icode) (op0, op1, op2);
4587 /* Expand the builtin in EXP and store the result in TARGET. Store
4588 true in *EXPANDEDP if we found a builtin to expand. */
4590 altivec_expand_builtin (exp, target, expandedp)
4595 struct builtin_description *d;
4596 struct builtin_description_predicates *dp;
4598 enum insn_code icode;
4599 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4600 tree arglist = TREE_OPERAND (exp, 1);
4603 enum machine_mode tmode, mode0;
4604 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4606 target = altivec_expand_ld_builtin (exp, target, expandedp);
4610 target = altivec_expand_st_builtin (exp, target, expandedp);
4614 target = altivec_expand_dst_builtin (exp, target, expandedp);
4622 case ALTIVEC_BUILTIN_STVX:
4623 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4624 case ALTIVEC_BUILTIN_STVEBX:
4625 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4626 case ALTIVEC_BUILTIN_STVEHX:
4627 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4628 case ALTIVEC_BUILTIN_STVEWX:
4629 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4630 case ALTIVEC_BUILTIN_STVXL:
4631 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4633 case ALTIVEC_BUILTIN_MFVSCR:
4634 icode = CODE_FOR_altivec_mfvscr;
4635 tmode = insn_data[icode].operand[0].mode;
4638 || GET_MODE (target) != tmode
4639 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4640 target = gen_reg_rtx (tmode);
4642 pat = GEN_FCN (icode) (target);
4648 case ALTIVEC_BUILTIN_MTVSCR:
4649 icode = CODE_FOR_altivec_mtvscr;
4650 arg0 = TREE_VALUE (arglist);
4651 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4652 mode0 = insn_data[icode].operand[0].mode;
4654 /* If we got invalid arguments bail out before generating bad rtl. */
4655 if (arg0 == error_mark_node)
4658 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4659 op0 = copy_to_mode_reg (mode0, op0);
4661 pat = GEN_FCN (icode) (op0);
4666 case ALTIVEC_BUILTIN_DSSALL:
4667 emit_insn (gen_altivec_dssall ());
4670 case ALTIVEC_BUILTIN_DSS:
4671 icode = CODE_FOR_altivec_dss;
4672 arg0 = TREE_VALUE (arglist);
4673 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4674 mode0 = insn_data[icode].operand[0].mode;
4676 /* If we got invalid arguments bail out before generating bad rtl. */
4677 if (arg0 == error_mark_node)
4680 if (TREE_CODE (arg0) != INTEGER_CST
4681 || TREE_INT_CST_LOW (arg0) & ~0x3)
4683 error ("argument to dss must be a 2-bit unsigned literal");
4687 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4688 op0 = copy_to_mode_reg (mode0, op0);
4690 emit_insn (gen_altivec_dss (op0));
4694 /* Expand abs* operations. */
4695 d = (struct builtin_description *) bdesc_abs;
4696 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4697 if (d->code == fcode)
4698 return altivec_expand_abs_builtin (d->icode, arglist, target);
4700 /* Expand the AltiVec predicates. */
4701 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4702 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4703 if (dp->code == fcode)
4704 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4706 /* LV* are funky. We initialized them differently. */
4709 case ALTIVEC_BUILTIN_LVSL:
4710 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4712 case ALTIVEC_BUILTIN_LVSR:
4713 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4715 case ALTIVEC_BUILTIN_LVEBX:
4716 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4718 case ALTIVEC_BUILTIN_LVEHX:
4719 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4721 case ALTIVEC_BUILTIN_LVEWX:
4722 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4724 case ALTIVEC_BUILTIN_LVXL:
4725 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4727 case ALTIVEC_BUILTIN_LVX:
4728 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4739 /* Binops that need to be initialized manually, but can be expanded
4740 automagically by rs6000_expand_binop_builtin. */
4741 static struct builtin_description bdesc_2arg_spe[] =
4743 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4744 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4745 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4746 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4747 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4748 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4749 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4750 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4751 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4752 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4753 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4754 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4755 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4756 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4757 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4758 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4759 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4760 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4761 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4762 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4763 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4764 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4767 /* Expand the builtin in EXP and store the result in TARGET. Store
4768 true in *EXPANDEDP if we found a builtin to expand.
4770 This expands the SPE builtins that are not simple unary and binary
4773 spe_expand_builtin (exp, target, expandedp)
4778 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4779 tree arglist = TREE_OPERAND (exp, 1);
4781 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4782 enum insn_code icode;
4783 enum machine_mode tmode, mode0;
4785 struct builtin_description *d;
4790 /* Syntax check for a 5-bit unsigned immediate. */
4793 case SPE_BUILTIN_EVSTDD:
4794 case SPE_BUILTIN_EVSTDH:
4795 case SPE_BUILTIN_EVSTDW:
4796 case SPE_BUILTIN_EVSTWHE:
4797 case SPE_BUILTIN_EVSTWHO:
4798 case SPE_BUILTIN_EVSTWWE:
4799 case SPE_BUILTIN_EVSTWWO:
4800 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4801 if (TREE_CODE (arg1) != INTEGER_CST
4802 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4804 error ("argument 2 must be a 5-bit unsigned literal");
4812 d = (struct builtin_description *) bdesc_2arg_spe;
4813 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4814 if (d->code == fcode)
4815 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4817 d = (struct builtin_description *) bdesc_spe_predicates;
4818 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4819 if (d->code == fcode)
4820 return spe_expand_predicate_builtin (d->icode, arglist, target);
4822 d = (struct builtin_description *) bdesc_spe_evsel;
4823 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4824 if (d->code == fcode)
4825 return spe_expand_evsel_builtin (d->icode, arglist, target);
4829 case SPE_BUILTIN_EVSTDDX:
4830 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4831 case SPE_BUILTIN_EVSTDHX:
4832 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4833 case SPE_BUILTIN_EVSTDWX:
4834 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4835 case SPE_BUILTIN_EVSTWHEX:
4836 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4837 case SPE_BUILTIN_EVSTWHOX:
4838 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4839 case SPE_BUILTIN_EVSTWWEX:
4840 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4841 case SPE_BUILTIN_EVSTWWOX:
4842 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4843 case SPE_BUILTIN_EVSTDD:
4844 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4845 case SPE_BUILTIN_EVSTDH:
4846 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4847 case SPE_BUILTIN_EVSTDW:
4848 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4849 case SPE_BUILTIN_EVSTWHE:
4850 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4851 case SPE_BUILTIN_EVSTWHO:
4852 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4853 case SPE_BUILTIN_EVSTWWE:
4854 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4855 case SPE_BUILTIN_EVSTWWO:
4856 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4857 case SPE_BUILTIN_MFSPEFSCR:
4858 icode = CODE_FOR_spe_mfspefscr;
4859 tmode = insn_data[icode].operand[0].mode;
4862 || GET_MODE (target) != tmode
4863 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4864 target = gen_reg_rtx (tmode);
4866 pat = GEN_FCN (icode) (target);
4871 case SPE_BUILTIN_MTSPEFSCR:
4872 icode = CODE_FOR_spe_mtspefscr;
4873 arg0 = TREE_VALUE (arglist);
4874 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4875 mode0 = insn_data[icode].operand[0].mode;
4877 if (arg0 == error_mark_node)
4880 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4881 op0 = copy_to_mode_reg (mode0, op0);
4883 pat = GEN_FCN (icode) (op0);
4896 spe_expand_predicate_builtin (icode, arglist, target)
4897 enum insn_code icode;
4901 rtx pat, scratch, tmp;
4902 tree form = TREE_VALUE (arglist);
4903 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4904 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4905 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4906 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4907 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4908 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4912 if (TREE_CODE (form) != INTEGER_CST)
4914 error ("argument 1 of __builtin_spe_predicate must be a constant");
4918 form_int = TREE_INT_CST_LOW (form);
4923 if (arg0 == error_mark_node || arg1 == error_mark_node)
4927 || GET_MODE (target) != SImode
4928 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
4929 target = gen_reg_rtx (SImode);
4931 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4932 op0 = copy_to_mode_reg (mode0, op0);
4933 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4934 op1 = copy_to_mode_reg (mode1, op1);
4936 scratch = gen_reg_rtx (CCmode);
4938 pat = GEN_FCN (icode) (scratch, op0, op1);
4943 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
4944 _lower_. We use one compare, but look in different bits of the
4945 CR for each variant.
4947 There are 2 elements in each SPE simd type (upper/lower). The CR
4948 bits are set as follows:
4950 BIT0 | BIT 1 | BIT 2 | BIT 3
4951 U | L | (U | L) | (U & L)
4953 So, for an "all" relationship, BIT 3 would be set.
4954 For an "any" relationship, BIT 2 would be set. Etc.
4956 Following traditional nomenclature, these bits map to:
4958 BIT0 | BIT 1 | BIT 2 | BIT 3
4961 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
4966 /* All variant. OV bit. */
4968 /* We need to get to the OV bit, which is the ORDERED bit. We
4969 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
4970 that's ugly and will trigger a validate_condition_mode abort.
4971 So let's just use another pattern. */
4972 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
4974 /* Any variant. EQ bit. */
4978 /* Upper variant. LT bit. */
4982 /* Lower variant. GT bit. */
4987 error ("argument 1 of __builtin_spe_predicate is out of range");
4991 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
4992 emit_move_insn (target, tmp);
4997 /* The evsel builtins look like this:
4999 e = __builtin_spe_evsel_OP (a, b, c, d);
5003 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5004 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5008 spe_expand_evsel_builtin (icode, arglist, target)
5009 enum insn_code icode;
5014 tree arg0 = TREE_VALUE (arglist);
5015 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5016 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5017 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5018 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5019 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5020 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5021 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5022 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5023 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5028 if (arg0 == error_mark_node || arg1 == error_mark_node
5029 || arg2 == error_mark_node || arg3 == error_mark_node)
5033 || GET_MODE (target) != mode0
5034 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5035 target = gen_reg_rtx (mode0);
5037 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5038 op0 = copy_to_mode_reg (mode0, op0);
5039 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5040 op1 = copy_to_mode_reg (mode0, op1);
5041 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5042 op2 = copy_to_mode_reg (mode0, op2);
5043 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5044 op3 = copy_to_mode_reg (mode0, op3);
5046 /* Generate the compare. */
5047 scratch = gen_reg_rtx (CCmode);
5048 pat = GEN_FCN (icode) (scratch, op0, op1);
5053 if (mode0 == V2SImode)
5054 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5056 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5061 /* Expand an expression EXP that calls a built-in function,
5062 with result going to TARGET if that's convenient
5063 (and in mode MODE if that's convenient).
5064 SUBTARGET may be used as the target for computing one of EXP's operands.
5065 IGNORE is nonzero if the value is to be ignored. */
5068 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5071 rtx subtarget ATTRIBUTE_UNUSED;
5072 enum machine_mode mode ATTRIBUTE_UNUSED;
5073 int ignore ATTRIBUTE_UNUSED;
5075 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5076 tree arglist = TREE_OPERAND (exp, 1);
5077 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5078 struct builtin_description *d;
5085 ret = altivec_expand_builtin (exp, target, &success);
5092 ret = spe_expand_builtin (exp, target, &success);
5098 if (TARGET_ALTIVEC || TARGET_SPE)
5100 /* Handle simple unary operations. */
5101 d = (struct builtin_description *) bdesc_1arg;
5102 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5103 if (d->code == fcode)
5104 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5106 /* Handle simple binary operations. */
5107 d = (struct builtin_description *) bdesc_2arg;
5108 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5109 if (d->code == fcode)
5110 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5112 /* Handle simple ternary operations. */
5113 d = (struct builtin_description *) bdesc_3arg;
5114 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5115 if (d->code == fcode)
5116 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5124 rs6000_init_builtins ()
5127 spe_init_builtins ();
5129 altivec_init_builtins ();
5130 if (TARGET_ALTIVEC || TARGET_SPE)
5131 rs6000_common_init_builtins ();
5134 /* Search through a set of builtins and enable the mask bits.
5135 DESC is an array of builtins.
5136 SIZE is the totaly number of builtins.
5137 START is the builtin enum at which to start.
5138 END is the builtin enum at which to end. */
5140 enable_mask_for_builtins (desc, size, start, end)
5141 struct builtin_description *desc;
5143 enum rs6000_builtins start, end;
5147 for (i = 0; i < size; ++i)
5148 if (desc[i].code == start)
5154 for (; i < size; ++i)
5156 /* Flip all the bits on. */
5157 desc[i].mask = target_flags;
5158 if (desc[i].code == end)
5164 spe_init_builtins ()
5166 tree endlink = void_list_node;
5167 tree puint_type_node = build_pointer_type (unsigned_type_node);
5168 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5169 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5170 struct builtin_description *d;
5173 tree v2si_ftype_4_v2si
5174 = build_function_type
5176 tree_cons (NULL_TREE, V2SI_type_node,
5177 tree_cons (NULL_TREE, V2SI_type_node,
5178 tree_cons (NULL_TREE, V2SI_type_node,
5179 tree_cons (NULL_TREE, V2SI_type_node,
5182 tree v2sf_ftype_4_v2sf
5183 = build_function_type
5185 tree_cons (NULL_TREE, V2SF_type_node,
5186 tree_cons (NULL_TREE, V2SF_type_node,
5187 tree_cons (NULL_TREE, V2SF_type_node,
5188 tree_cons (NULL_TREE, V2SF_type_node,
5191 tree int_ftype_int_v2si_v2si
5192 = build_function_type
5194 tree_cons (NULL_TREE, integer_type_node,
5195 tree_cons (NULL_TREE, V2SI_type_node,
5196 tree_cons (NULL_TREE, V2SI_type_node,
5199 tree int_ftype_int_v2sf_v2sf
5200 = build_function_type
5202 tree_cons (NULL_TREE, integer_type_node,
5203 tree_cons (NULL_TREE, V2SF_type_node,
5204 tree_cons (NULL_TREE, V2SF_type_node,
5207 tree void_ftype_v2si_puint_int
5208 = build_function_type (void_type_node,
5209 tree_cons (NULL_TREE, V2SI_type_node,
5210 tree_cons (NULL_TREE, puint_type_node,
5211 tree_cons (NULL_TREE,
5215 tree void_ftype_v2si_puint_char
5216 = build_function_type (void_type_node,
5217 tree_cons (NULL_TREE, V2SI_type_node,
5218 tree_cons (NULL_TREE, puint_type_node,
5219 tree_cons (NULL_TREE,
5223 tree void_ftype_v2si_pv2si_int
5224 = build_function_type (void_type_node,
5225 tree_cons (NULL_TREE, V2SI_type_node,
5226 tree_cons (NULL_TREE, pv2si_type_node,
5227 tree_cons (NULL_TREE,
5231 tree void_ftype_v2si_pv2si_char
5232 = build_function_type (void_type_node,
5233 tree_cons (NULL_TREE, V2SI_type_node,
5234 tree_cons (NULL_TREE, pv2si_type_node,
5235 tree_cons (NULL_TREE,
5240 = build_function_type (void_type_node,
5241 tree_cons (NULL_TREE, integer_type_node, endlink));
5244 = build_function_type (integer_type_node,
5245 tree_cons (NULL_TREE, void_type_node, endlink));
5247 tree v2si_ftype_pv2si_int
5248 = build_function_type (V2SI_type_node,
5249 tree_cons (NULL_TREE, pv2si_type_node,
5250 tree_cons (NULL_TREE, integer_type_node,
5253 tree v2si_ftype_puint_int
5254 = build_function_type (V2SI_type_node,
5255 tree_cons (NULL_TREE, puint_type_node,
5256 tree_cons (NULL_TREE, integer_type_node,
5259 tree v2si_ftype_pushort_int
5260 = build_function_type (V2SI_type_node,
5261 tree_cons (NULL_TREE, pushort_type_node,
5262 tree_cons (NULL_TREE, integer_type_node,
5265 /* The initialization of the simple binary and unary builtins is
5266 done in rs6000_common_init_builtins, but we have to enable the
5267 mask bits here manually because we have run out of `target_flags'
5268 bits. We really need to redesign this mask business. */
5270 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5271 ARRAY_SIZE (bdesc_2arg),
5274 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5275 ARRAY_SIZE (bdesc_1arg),
5277 SPE_BUILTIN_EVSUBFUSIAAW);
5278 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5279 ARRAY_SIZE (bdesc_spe_predicates),
5280 SPE_BUILTIN_EVCMPEQ,
5281 SPE_BUILTIN_EVFSTSTLT);
5282 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5283 ARRAY_SIZE (bdesc_spe_evsel),
5284 SPE_BUILTIN_EVSEL_CMPGTS,
5285 SPE_BUILTIN_EVSEL_FSTSTEQ);
5287 /* Initialize irregular SPE builtins. */
5289 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5290 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5291 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5292 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5293 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5294 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5295 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5296 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5297 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5298 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5299 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5300 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5301 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5302 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5303 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5304 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5307 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5308 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5309 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5310 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5311 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5312 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5313 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5314 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5315 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5316 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5317 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5318 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5319 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5320 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5321 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5322 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5323 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5324 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5325 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5326 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5327 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5328 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5331 d = (struct builtin_description *) bdesc_spe_predicates;
5332 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5336 switch (insn_data[d->icode].operand[1].mode)
5339 type = int_ftype_int_v2si_v2si;
5342 type = int_ftype_int_v2sf_v2sf;
5348 def_builtin (d->mask, d->name, type, d->code);
5351 /* Evsel predicates. */
5352 d = (struct builtin_description *) bdesc_spe_evsel;
5353 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5357 switch (insn_data[d->icode].operand[1].mode)
5360 type = v2si_ftype_4_v2si;
5363 type = v2sf_ftype_4_v2sf;
5369 def_builtin (d->mask, d->name, type, d->code);
5374 altivec_init_builtins ()
5376 struct builtin_description *d;
5377 struct builtin_description_predicates *dp;
5379 tree pfloat_type_node = build_pointer_type (float_type_node);
5380 tree pint_type_node = build_pointer_type (integer_type_node);
5381 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5382 tree pchar_type_node = build_pointer_type (char_type_node);
5384 tree pvoid_type_node = build_pointer_type (void_type_node);
5386 tree int_ftype_int_v4si_v4si
5387 = build_function_type_list (integer_type_node,
5388 integer_type_node, V4SI_type_node,
5389 V4SI_type_node, NULL_TREE);
5390 tree v4sf_ftype_pfloat
5391 = build_function_type_list (V4SF_type_node, pfloat_type_node, NULL_TREE);
5392 tree void_ftype_pfloat_v4sf
5393 = build_function_type_list (void_type_node,
5394 pfloat_type_node, V4SF_type_node, NULL_TREE);
5395 tree v4si_ftype_pint
5396 = build_function_type_list (V4SI_type_node, pint_type_node, NULL_TREE); tree void_ftype_pint_v4si
5397 = build_function_type_list (void_type_node,
5398 pint_type_node, V4SI_type_node, NULL_TREE);
5399 tree v8hi_ftype_pshort
5400 = build_function_type_list (V8HI_type_node, pshort_type_node, NULL_TREE);
5401 tree void_ftype_pshort_v8hi
5402 = build_function_type_list (void_type_node,
5403 pshort_type_node, V8HI_type_node, NULL_TREE);
5404 tree v16qi_ftype_pchar
5405 = build_function_type_list (V16QI_type_node, pchar_type_node, NULL_TREE);
5406 tree void_ftype_pchar_v16qi
5407 = build_function_type_list (void_type_node,
5408 pchar_type_node, V16QI_type_node, NULL_TREE);
5409 tree void_ftype_v4si
5410 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5411 tree v8hi_ftype_void
5412 = build_function_type (V8HI_type_node, void_list_node);
5413 tree void_ftype_void
5414 = build_function_type (void_type_node, void_list_node);
5416 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5417 tree v16qi_ftype_int_pvoid
5418 = build_function_type_list (V16QI_type_node,
5419 integer_type_node, pvoid_type_node, NULL_TREE);
5420 tree v8hi_ftype_int_pvoid
5421 = build_function_type_list (V8HI_type_node,
5422 integer_type_node, pvoid_type_node, NULL_TREE);
5423 tree v4si_ftype_int_pvoid
5424 = build_function_type_list (V4SI_type_node,
5425 integer_type_node, pvoid_type_node, NULL_TREE);
5426 tree void_ftype_v4si_int_pvoid
5427 = build_function_type_list (void_type_node,
5428 V4SI_type_node, integer_type_node,
5429 pvoid_type_node, NULL_TREE);
5430 tree void_ftype_v16qi_int_pvoid
5431 = build_function_type_list (void_type_node,
5432 V16QI_type_node, integer_type_node,
5433 pvoid_type_node, NULL_TREE);
5434 tree void_ftype_v8hi_int_pvoid
5435 = build_function_type_list (void_type_node,
5436 V8HI_type_node, integer_type_node,
5437 pvoid_type_node, NULL_TREE);
5438 tree int_ftype_int_v8hi_v8hi
5439 = build_function_type_list (integer_type_node,
5440 integer_type_node, V8HI_type_node,
5441 V8HI_type_node, NULL_TREE);
5442 tree int_ftype_int_v16qi_v16qi
5443 = build_function_type_list (integer_type_node,
5444 integer_type_node, V16QI_type_node,
5445 V16QI_type_node, NULL_TREE);
5446 tree int_ftype_int_v4sf_v4sf
5447 = build_function_type_list (integer_type_node,
5448 integer_type_node, V4SF_type_node,
5449 V4SF_type_node, NULL_TREE);
5450 tree v4si_ftype_v4si
5451 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5452 tree v8hi_ftype_v8hi
5453 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5454 tree v16qi_ftype_v16qi
5455 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5456 tree v4sf_ftype_v4sf
5457 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5458 tree void_ftype_pvoid_int_char
5459 = build_function_type_list (void_type_node,
5460 pvoid_type_node, integer_type_node,
5461 char_type_node, NULL_TREE);
5463 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5464 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5465 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5466 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5467 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5468 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5469 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5470 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5471 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5472 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5473 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5474 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5475 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
5476 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
5477 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
5478 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
5479 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
5480 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
5481 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
5482 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5483 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5484 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5485 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5486 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5488 /* Add the DST variants. */
5489 d = (struct builtin_description *) bdesc_dst;
5490 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5491 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
5493 /* Initialize the predicates. */
5494 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5495 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5497 enum machine_mode mode1;
5500 mode1 = insn_data[dp->icode].operand[1].mode;
5505 type = int_ftype_int_v4si_v4si;
5508 type = int_ftype_int_v8hi_v8hi;
5511 type = int_ftype_int_v16qi_v16qi;
5514 type = int_ftype_int_v4sf_v4sf;
5520 def_builtin (dp->mask, dp->name, type, dp->code);
5523 /* Initialize the abs* operators. */
5524 d = (struct builtin_description *) bdesc_abs;
5525 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5527 enum machine_mode mode0;
5530 mode0 = insn_data[d->icode].operand[0].mode;
5535 type = v4si_ftype_v4si;
5538 type = v8hi_ftype_v8hi;
5541 type = v16qi_ftype_v16qi;
5544 type = v4sf_ftype_v4sf;
5550 def_builtin (d->mask, d->name, type, d->code);
5555 rs6000_common_init_builtins ()
5557 struct builtin_description *d;
5560 tree v4sf_ftype_v4sf_v4sf_v16qi
5561 = build_function_type_list (V4SF_type_node,
5562 V4SF_type_node, V4SF_type_node,
5563 V16QI_type_node, NULL_TREE);
5564 tree v4si_ftype_v4si_v4si_v16qi
5565 = build_function_type_list (V4SI_type_node,
5566 V4SI_type_node, V4SI_type_node,
5567 V16QI_type_node, NULL_TREE);
5568 tree v8hi_ftype_v8hi_v8hi_v16qi
5569 = build_function_type_list (V8HI_type_node,
5570 V8HI_type_node, V8HI_type_node,
5571 V16QI_type_node, NULL_TREE);
5572 tree v16qi_ftype_v16qi_v16qi_v16qi
5573 = build_function_type_list (V16QI_type_node,
5574 V16QI_type_node, V16QI_type_node,
5575 V16QI_type_node, NULL_TREE);
5576 tree v4si_ftype_char
5577 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5578 tree v8hi_ftype_char
5579 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5580 tree v16qi_ftype_char
5581 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5582 tree v8hi_ftype_v16qi
5583 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5584 tree v4sf_ftype_v4sf
5585 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5587 tree v2si_ftype_v2si_v2si
5588 = build_function_type_list (V2SI_type_node,
5589 V2SI_type_node, V2SI_type_node, NULL_TREE);
5591 tree v2sf_ftype_v2sf_v2sf
5592 = build_function_type_list (V2SF_type_node,
5593 V2SF_type_node, V2SF_type_node, NULL_TREE);
5595 tree v2si_ftype_int_int
5596 = build_function_type_list (V2SI_type_node,
5597 integer_type_node, integer_type_node,
5600 tree v2si_ftype_v2si
5601 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5603 tree v2sf_ftype_v2sf
5604 = build_function_type_list (V2SF_type_node,
5605 V2SF_type_node, NULL_TREE);
5607 tree v2sf_ftype_v2si
5608 = build_function_type_list (V2SF_type_node,
5609 V2SI_type_node, NULL_TREE);
5611 tree v2si_ftype_v2sf
5612 = build_function_type_list (V2SI_type_node,
5613 V2SF_type_node, NULL_TREE);
5615 tree v2si_ftype_v2si_char
5616 = build_function_type_list (V2SI_type_node,
5617 V2SI_type_node, char_type_node, NULL_TREE);
5619 tree v2si_ftype_int_char
5620 = build_function_type_list (V2SI_type_node,
5621 integer_type_node, char_type_node, NULL_TREE);
5623 tree v2si_ftype_char
5624 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5626 tree int_ftype_int_int
5627 = build_function_type_list (integer_type_node,
5628 integer_type_node, integer_type_node,
5631 tree v4si_ftype_v4si_v4si
5632 = build_function_type_list (V4SI_type_node,
5633 V4SI_type_node, V4SI_type_node, NULL_TREE);
5634 tree v4sf_ftype_v4si_char
5635 = build_function_type_list (V4SF_type_node,
5636 V4SI_type_node, char_type_node, NULL_TREE);
5637 tree v4si_ftype_v4sf_char
5638 = build_function_type_list (V4SI_type_node,
5639 V4SF_type_node, char_type_node, NULL_TREE);
5640 tree v4si_ftype_v4si_char
5641 = build_function_type_list (V4SI_type_node,
5642 V4SI_type_node, char_type_node, NULL_TREE);
5643 tree v8hi_ftype_v8hi_char
5644 = build_function_type_list (V8HI_type_node,
5645 V8HI_type_node, char_type_node, NULL_TREE);
5646 tree v16qi_ftype_v16qi_char
5647 = build_function_type_list (V16QI_type_node,
5648 V16QI_type_node, char_type_node, NULL_TREE);
5649 tree v16qi_ftype_v16qi_v16qi_char
5650 = build_function_type_list (V16QI_type_node,
5651 V16QI_type_node, V16QI_type_node,
5652 char_type_node, NULL_TREE);
5653 tree v8hi_ftype_v8hi_v8hi_char
5654 = build_function_type_list (V8HI_type_node,
5655 V8HI_type_node, V8HI_type_node,
5656 char_type_node, NULL_TREE);
5657 tree v4si_ftype_v4si_v4si_char
5658 = build_function_type_list (V4SI_type_node,
5659 V4SI_type_node, V4SI_type_node,
5660 char_type_node, NULL_TREE);
5661 tree v4sf_ftype_v4sf_v4sf_char
5662 = build_function_type_list (V4SF_type_node,
5663 V4SF_type_node, V4SF_type_node,
5664 char_type_node, NULL_TREE);
5665 tree v4sf_ftype_v4sf_v4sf
5666 = build_function_type_list (V4SF_type_node,
5667 V4SF_type_node, V4SF_type_node, NULL_TREE);
5668 tree v4sf_ftype_v4sf_v4sf_v4si
5669 = build_function_type_list (V4SF_type_node,
5670 V4SF_type_node, V4SF_type_node,
5671 V4SI_type_node, NULL_TREE);
5672 tree v4sf_ftype_v4sf_v4sf_v4sf
5673 = build_function_type_list (V4SF_type_node,
5674 V4SF_type_node, V4SF_type_node,
5675 V4SF_type_node, NULL_TREE);
5676 tree v4si_ftype_v4si_v4si_v4si
5677 = build_function_type_list (V4SI_type_node,
5678 V4SI_type_node, V4SI_type_node,
5679 V4SI_type_node, NULL_TREE);
5680 tree v8hi_ftype_v8hi_v8hi
5681 = build_function_type_list (V8HI_type_node,
5682 V8HI_type_node, V8HI_type_node, NULL_TREE);
5683 tree v8hi_ftype_v8hi_v8hi_v8hi
5684 = build_function_type_list (V8HI_type_node,
5685 V8HI_type_node, V8HI_type_node,
5686 V8HI_type_node, NULL_TREE);
5687 tree v4si_ftype_v8hi_v8hi_v4si
5688 = build_function_type_list (V4SI_type_node,
5689 V8HI_type_node, V8HI_type_node,
5690 V4SI_type_node, NULL_TREE);
5691 tree v4si_ftype_v16qi_v16qi_v4si
5692 = build_function_type_list (V4SI_type_node,
5693 V16QI_type_node, V16QI_type_node,
5694 V4SI_type_node, NULL_TREE);
5695 tree v16qi_ftype_v16qi_v16qi
5696 = build_function_type_list (V16QI_type_node,
5697 V16QI_type_node, V16QI_type_node, NULL_TREE);
5698 tree v4si_ftype_v4sf_v4sf
5699 = build_function_type_list (V4SI_type_node,
5700 V4SF_type_node, V4SF_type_node, NULL_TREE);
5701 tree v8hi_ftype_v16qi_v16qi
5702 = build_function_type_list (V8HI_type_node,
5703 V16QI_type_node, V16QI_type_node, NULL_TREE);
5704 tree v4si_ftype_v8hi_v8hi
5705 = build_function_type_list (V4SI_type_node,
5706 V8HI_type_node, V8HI_type_node, NULL_TREE);
5707 tree v8hi_ftype_v4si_v4si
5708 = build_function_type_list (V8HI_type_node,
5709 V4SI_type_node, V4SI_type_node, NULL_TREE);
5710 tree v16qi_ftype_v8hi_v8hi
5711 = build_function_type_list (V16QI_type_node,
5712 V8HI_type_node, V8HI_type_node, NULL_TREE);
5713 tree v4si_ftype_v16qi_v4si
5714 = build_function_type_list (V4SI_type_node,
5715 V16QI_type_node, V4SI_type_node, NULL_TREE);
5716 tree v4si_ftype_v16qi_v16qi
5717 = build_function_type_list (V4SI_type_node,
5718 V16QI_type_node, V16QI_type_node, NULL_TREE);
5719 tree v4si_ftype_v8hi_v4si
5720 = build_function_type_list (V4SI_type_node,
5721 V8HI_type_node, V4SI_type_node, NULL_TREE);
5722 tree v4si_ftype_v8hi
5723 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5724 tree int_ftype_v4si_v4si
5725 = build_function_type_list (integer_type_node,
5726 V4SI_type_node, V4SI_type_node, NULL_TREE);
5727 tree int_ftype_v4sf_v4sf
5728 = build_function_type_list (integer_type_node,
5729 V4SF_type_node, V4SF_type_node, NULL_TREE);
5730 tree int_ftype_v16qi_v16qi
5731 = build_function_type_list (integer_type_node,
5732 V16QI_type_node, V16QI_type_node, NULL_TREE);
5733 tree int_ftype_v8hi_v8hi
5734 = build_function_type_list (integer_type_node,
5735 V8HI_type_node, V8HI_type_node, NULL_TREE);
5737 /* Add the simple ternary operators. */
5738 d = (struct builtin_description *) bdesc_3arg;
5739 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5742 enum machine_mode mode0, mode1, mode2, mode3;
5745 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5748 mode0 = insn_data[d->icode].operand[0].mode;
5749 mode1 = insn_data[d->icode].operand[1].mode;
5750 mode2 = insn_data[d->icode].operand[2].mode;
5751 mode3 = insn_data[d->icode].operand[3].mode;
5753 /* When all four are of the same mode. */
5754 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5759 type = v4si_ftype_v4si_v4si_v4si;
5762 type = v4sf_ftype_v4sf_v4sf_v4sf;
5765 type = v8hi_ftype_v8hi_v8hi_v8hi;
5768 type = v16qi_ftype_v16qi_v16qi_v16qi;
5774 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5779 type = v4si_ftype_v4si_v4si_v16qi;
5782 type = v4sf_ftype_v4sf_v4sf_v16qi;
5785 type = v8hi_ftype_v8hi_v8hi_v16qi;
5788 type = v16qi_ftype_v16qi_v16qi_v16qi;
5794 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5795 && mode3 == V4SImode)
5796 type = v4si_ftype_v16qi_v16qi_v4si;
5797 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5798 && mode3 == V4SImode)
5799 type = v4si_ftype_v8hi_v8hi_v4si;
5800 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5801 && mode3 == V4SImode)
5802 type = v4sf_ftype_v4sf_v4sf_v4si;
5804 /* vchar, vchar, vchar, 4 bit literal. */
5805 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5807 type = v16qi_ftype_v16qi_v16qi_char;
5809 /* vshort, vshort, vshort, 4 bit literal. */
5810 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5812 type = v8hi_ftype_v8hi_v8hi_char;
5814 /* vint, vint, vint, 4 bit literal. */
5815 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5817 type = v4si_ftype_v4si_v4si_char;
5819 /* vfloat, vfloat, vfloat, 4 bit literal. */
5820 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5822 type = v4sf_ftype_v4sf_v4sf_char;
5827 def_builtin (d->mask, d->name, type, d->code);
5830 /* Add the simple binary operators. */
5831 d = (struct builtin_description *) bdesc_2arg;
5832 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5834 enum machine_mode mode0, mode1, mode2;
5837 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5840 mode0 = insn_data[d->icode].operand[0].mode;
5841 mode1 = insn_data[d->icode].operand[1].mode;
5842 mode2 = insn_data[d->icode].operand[2].mode;
5844 /* When all three operands are of the same mode. */
5845 if (mode0 == mode1 && mode1 == mode2)
5850 type = v4sf_ftype_v4sf_v4sf;
5853 type = v4si_ftype_v4si_v4si;
5856 type = v16qi_ftype_v16qi_v16qi;
5859 type = v8hi_ftype_v8hi_v8hi;
5862 type = v2si_ftype_v2si_v2si;
5865 type = v2sf_ftype_v2sf_v2sf;
5868 type = int_ftype_int_int;
5875 /* A few other combos we really don't want to do manually. */
5877 /* vint, vfloat, vfloat. */
5878 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
5879 type = v4si_ftype_v4sf_v4sf;
5881 /* vshort, vchar, vchar. */
5882 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
5883 type = v8hi_ftype_v16qi_v16qi;
5885 /* vint, vshort, vshort. */
5886 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
5887 type = v4si_ftype_v8hi_v8hi;
5889 /* vshort, vint, vint. */
5890 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
5891 type = v8hi_ftype_v4si_v4si;
5893 /* vchar, vshort, vshort. */
5894 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
5895 type = v16qi_ftype_v8hi_v8hi;
5897 /* vint, vchar, vint. */
5898 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
5899 type = v4si_ftype_v16qi_v4si;
5901 /* vint, vchar, vchar. */
5902 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
5903 type = v4si_ftype_v16qi_v16qi;
5905 /* vint, vshort, vint. */
5906 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
5907 type = v4si_ftype_v8hi_v4si;
5909 /* vint, vint, 5 bit literal. */
5910 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
5911 type = v4si_ftype_v4si_char;
5913 /* vshort, vshort, 5 bit literal. */
5914 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
5915 type = v8hi_ftype_v8hi_char;
5917 /* vchar, vchar, 5 bit literal. */
5918 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
5919 type = v16qi_ftype_v16qi_char;
5921 /* vfloat, vint, 5 bit literal. */
5922 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
5923 type = v4sf_ftype_v4si_char;
5925 /* vint, vfloat, 5 bit literal. */
5926 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
5927 type = v4si_ftype_v4sf_char;
5929 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
5930 type = v2si_ftype_int_int;
5932 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
5933 type = v2si_ftype_v2si_char;
5935 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
5936 type = v2si_ftype_int_char;
5939 else if (mode0 == SImode)
5944 type = int_ftype_v4si_v4si;
5947 type = int_ftype_v4sf_v4sf;
5950 type = int_ftype_v16qi_v16qi;
5953 type = int_ftype_v8hi_v8hi;
5963 def_builtin (d->mask, d->name, type, d->code);
5966 /* Add the simple unary operators. */
5967 d = (struct builtin_description *) bdesc_1arg;
5968 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5970 enum machine_mode mode0, mode1;
5973 if (d->name == 0 || d->icode == CODE_FOR_nothing)
5976 mode0 = insn_data[d->icode].operand[0].mode;
5977 mode1 = insn_data[d->icode].operand[1].mode;
5979 if (mode0 == V4SImode && mode1 == QImode)
5980 type = v4si_ftype_char;
5981 else if (mode0 == V8HImode && mode1 == QImode)
5982 type = v8hi_ftype_char;
5983 else if (mode0 == V16QImode && mode1 == QImode)
5984 type = v16qi_ftype_char;
5985 else if (mode0 == V4SFmode && mode1 == V4SFmode)
5986 type = v4sf_ftype_v4sf;
5987 else if (mode0 == V8HImode && mode1 == V16QImode)
5988 type = v8hi_ftype_v16qi;
5989 else if (mode0 == V4SImode && mode1 == V8HImode)
5990 type = v4si_ftype_v8hi;
5991 else if (mode0 == V2SImode && mode1 == V2SImode)
5992 type = v2si_ftype_v2si;
5993 else if (mode0 == V2SFmode && mode1 == V2SFmode)
5994 type = v2sf_ftype_v2sf;
5995 else if (mode0 == V2SFmode && mode1 == V2SImode)
5996 type = v2sf_ftype_v2si;
5997 else if (mode0 == V2SImode && mode1 == V2SFmode)
5998 type = v2si_ftype_v2sf;
5999 else if (mode0 == V2SImode && mode1 == QImode)
6000 type = v2si_ftype_char;
6004 def_builtin (d->mask, d->name, type, d->code);
6008 /* Generate a memory reference for expand_block_move, copying volatile,
6009 and other bits from an original memory reference. */
6012 expand_block_move_mem (mode, addr, orig_mem)
6013 enum machine_mode mode;
6017 rtx mem = gen_rtx_MEM (mode, addr);
6019 MEM_COPY_ATTRIBUTES (mem, orig_mem);
6023 /* Expand a block move operation, and return 1 if successful. Return 0
6024 if we should let the compiler generate normal code.
6026 operands[0] is the destination
6027 operands[1] is the source
6028 operands[2] is the length
6029 operands[3] is the alignment */
6031 #define MAX_MOVE_REG 4
6034 expand_block_move (operands)
6037 rtx orig_dest = operands[0];
6038 rtx orig_src = operands[1];
6039 rtx bytes_rtx = operands[2];
6040 rtx align_rtx = operands[3];
6041 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6052 rtx stores[MAX_MOVE_REG];
6055 /* If this is not a fixed size move, just call memcpy */
6059 /* If this is not a fixed size alignment, abort */
6060 if (GET_CODE (align_rtx) != CONST_INT)
6062 align = INTVAL (align_rtx);
6064 /* Anything to move? */
6065 bytes = INTVAL (bytes_rtx);
6069 /* store_one_arg depends on expand_block_move to handle at least the size of
6070 reg_parm_stack_space. */
6071 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6074 /* Move the address into scratch registers. */
6075 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
6076 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
6078 if (TARGET_STRING) /* string instructions are available */
6080 for ( ; bytes > 0; bytes -= move_bytes)
6082 if (bytes > 24 /* move up to 32 bytes at a time */
6090 && ! fixed_regs[12])
6092 move_bytes = (bytes > 32) ? 32 : bytes;
6093 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
6096 expand_block_move_mem (BLKmode,
6099 GEN_INT ((move_bytes == 32)
6103 else if (bytes > 16 /* move up to 24 bytes at a time */
6109 && ! fixed_regs[10])
6111 move_bytes = (bytes > 24) ? 24 : bytes;
6112 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
6115 expand_block_move_mem (BLKmode,
6118 GEN_INT (move_bytes),
6121 else if (bytes > 8 /* move up to 16 bytes at a time */
6127 move_bytes = (bytes > 16) ? 16 : bytes;
6128 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
6131 expand_block_move_mem (BLKmode,
6134 GEN_INT (move_bytes),
6137 else if (bytes >= 8 && TARGET_POWERPC64
6138 /* 64-bit loads and stores require word-aligned
6140 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6143 tmp_reg = gen_reg_rtx (DImode);
6144 emit_move_insn (tmp_reg,
6145 expand_block_move_mem (DImode,
6146 src_reg, orig_src));
6147 emit_move_insn (expand_block_move_mem (DImode,
6148 dest_reg, orig_dest),
6151 else if (bytes > 4 && !TARGET_POWERPC64)
6152 { /* move up to 8 bytes at a time */
6153 move_bytes = (bytes > 8) ? 8 : bytes;
6154 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
6157 expand_block_move_mem (BLKmode,
6160 GEN_INT (move_bytes),
6163 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6164 { /* move 4 bytes */
6166 tmp_reg = gen_reg_rtx (SImode);
6167 emit_move_insn (tmp_reg,
6168 expand_block_move_mem (SImode,
6169 src_reg, orig_src));
6170 emit_move_insn (expand_block_move_mem (SImode,
6171 dest_reg, orig_dest),
6174 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6175 { /* move 2 bytes */
6177 tmp_reg = gen_reg_rtx (HImode);
6178 emit_move_insn (tmp_reg,
6179 expand_block_move_mem (HImode,
6180 src_reg, orig_src));
6181 emit_move_insn (expand_block_move_mem (HImode,
6182 dest_reg, orig_dest),
6185 else if (bytes == 1) /* move 1 byte */
6188 tmp_reg = gen_reg_rtx (QImode);
6189 emit_move_insn (tmp_reg,
6190 expand_block_move_mem (QImode,
6191 src_reg, orig_src));
6192 emit_move_insn (expand_block_move_mem (QImode,
6193 dest_reg, orig_dest),
6197 { /* move up to 4 bytes at a time */
6198 move_bytes = (bytes > 4) ? 4 : bytes;
6199 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
6202 expand_block_move_mem (BLKmode,
6205 GEN_INT (move_bytes),
6209 if (bytes > move_bytes)
6211 if (! TARGET_POWERPC64)
6213 emit_insn (gen_addsi3 (src_reg, src_reg,
6214 GEN_INT (move_bytes)));
6215 emit_insn (gen_addsi3 (dest_reg, dest_reg,
6216 GEN_INT (move_bytes)));
6220 emit_insn (gen_adddi3 (src_reg, src_reg,
6221 GEN_INT (move_bytes)));
6222 emit_insn (gen_adddi3 (dest_reg, dest_reg,
6223 GEN_INT (move_bytes)));
6229 else /* string instructions not available */
6231 num_reg = offset = 0;
6232 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
6234 /* Calculate the correct offset for src/dest */
6238 dest_addr = dest_reg;
6242 src_addr = plus_constant (src_reg, offset);
6243 dest_addr = plus_constant (dest_reg, offset);
6246 /* Generate the appropriate load and store, saving the stores
6248 if (bytes >= 8 && TARGET_POWERPC64
6249 /* 64-bit loads and stores require word-aligned
6251 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6254 tmp_reg = gen_reg_rtx (DImode);
6255 emit_insn (gen_movdi (tmp_reg,
6256 expand_block_move_mem (DImode,
6259 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
6264 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6267 tmp_reg = gen_reg_rtx (SImode);
6268 emit_insn (gen_movsi (tmp_reg,
6269 expand_block_move_mem (SImode,
6272 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
6277 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6280 tmp_reg = gen_reg_rtx (HImode);
6281 emit_insn (gen_movhi (tmp_reg,
6282 expand_block_move_mem (HImode,
6285 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
6293 tmp_reg = gen_reg_rtx (QImode);
6294 emit_insn (gen_movqi (tmp_reg,
6295 expand_block_move_mem (QImode,
6298 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
6304 if (num_reg >= MAX_MOVE_REG)
6306 for (i = 0; i < num_reg; i++)
6307 emit_insn (stores[i]);
6312 for (i = 0; i < num_reg; i++)
6313 emit_insn (stores[i]);
6320 /* Return 1 if OP is a load multiple operation. It is known to be a
6321 PARALLEL and the first section will be tested. */
6324 load_multiple_operation (op, mode)
6326 enum machine_mode mode ATTRIBUTE_UNUSED;
6328 int count = XVECLEN (op, 0);
6329 unsigned int dest_regno;
6333 /* Perform a quick check so we don't blow up below. */
6335 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6336 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6337 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6340 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6341 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6343 for (i = 1; i < count; i++)
6345 rtx elt = XVECEXP (op, 0, i);
6347 if (GET_CODE (elt) != SET
6348 || GET_CODE (SET_DEST (elt)) != REG
6349 || GET_MODE (SET_DEST (elt)) != SImode
6350 || REGNO (SET_DEST (elt)) != dest_regno + i
6351 || GET_CODE (SET_SRC (elt)) != MEM
6352 || GET_MODE (SET_SRC (elt)) != SImode
6353 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6354 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6355 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6356 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6363 /* Similar, but tests for store multiple. Here, the second vector element
6364 is a CLOBBER. It will be tested later. */
6367 store_multiple_operation (op, mode)
6369 enum machine_mode mode ATTRIBUTE_UNUSED;
6371 int count = XVECLEN (op, 0) - 1;
6372 unsigned int src_regno;
6376 /* Perform a quick check so we don't blow up below. */
6378 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6379 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6380 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6383 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6384 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6386 for (i = 1; i < count; i++)
6388 rtx elt = XVECEXP (op, 0, i + 1);
6390 if (GET_CODE (elt) != SET
6391 || GET_CODE (SET_SRC (elt)) != REG
6392 || GET_MODE (SET_SRC (elt)) != SImode
6393 || REGNO (SET_SRC (elt)) != src_regno + i
6394 || GET_CODE (SET_DEST (elt)) != MEM
6395 || GET_MODE (SET_DEST (elt)) != SImode
6396 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6397 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6398 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6399 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6406 /* Return 1 for a parallel vrsave operation. */
6409 vrsave_operation (op, mode)
6411 enum machine_mode mode ATTRIBUTE_UNUSED;
6413 int count = XVECLEN (op, 0);
6414 unsigned int dest_regno, src_regno;
6418 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6419 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6420 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6423 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6424 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6426 if (dest_regno != VRSAVE_REGNO
6427 && src_regno != VRSAVE_REGNO)
6430 for (i = 1; i < count; i++)
6432 rtx elt = XVECEXP (op, 0, i);
6434 if (GET_CODE (elt) != CLOBBER
6435 && GET_CODE (elt) != SET)
6442 /* Return 1 for an PARALLEL suitable for mtcrf. */
6445 mtcrf_operation (op, mode)
6447 enum machine_mode mode ATTRIBUTE_UNUSED;
6449 int count = XVECLEN (op, 0);
6453 /* Perform a quick check so we don't blow up below. */
6455 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6456 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6457 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6459 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6461 if (GET_CODE (src_reg) != REG
6462 || GET_MODE (src_reg) != SImode
6463 || ! INT_REGNO_P (REGNO (src_reg)))
6466 for (i = 0; i < count; i++)
6468 rtx exp = XVECEXP (op, 0, i);
6472 if (GET_CODE (exp) != SET
6473 || GET_CODE (SET_DEST (exp)) != REG
6474 || GET_MODE (SET_DEST (exp)) != CCmode
6475 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6477 unspec = SET_SRC (exp);
6478 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6480 if (GET_CODE (unspec) != UNSPEC
6481 || XINT (unspec, 1) != 20
6482 || XVECLEN (unspec, 0) != 2
6483 || XVECEXP (unspec, 0, 0) != src_reg
6484 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6485 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6491 /* Return 1 for an PARALLEL suitable for lmw. */
6494 lmw_operation (op, mode)
6496 enum machine_mode mode ATTRIBUTE_UNUSED;
6498 int count = XVECLEN (op, 0);
6499 unsigned int dest_regno;
6501 unsigned int base_regno;
6502 HOST_WIDE_INT offset;
6505 /* Perform a quick check so we don't blow up below. */
6507 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6508 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6509 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6512 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6513 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6516 || count != 32 - (int) dest_regno)
6519 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6522 base_regno = REGNO (src_addr);
6523 if (base_regno == 0)
6526 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6528 offset = INTVAL (XEXP (src_addr, 1));
6529 base_regno = REGNO (XEXP (src_addr, 0));
6534 for (i = 0; i < count; i++)
6536 rtx elt = XVECEXP (op, 0, i);
6539 HOST_WIDE_INT newoffset;
6541 if (GET_CODE (elt) != SET
6542 || GET_CODE (SET_DEST (elt)) != REG
6543 || GET_MODE (SET_DEST (elt)) != SImode
6544 || REGNO (SET_DEST (elt)) != dest_regno + i
6545 || GET_CODE (SET_SRC (elt)) != MEM
6546 || GET_MODE (SET_SRC (elt)) != SImode)
6548 newaddr = XEXP (SET_SRC (elt), 0);
6549 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6554 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6556 addr_reg = XEXP (newaddr, 0);
6557 newoffset = INTVAL (XEXP (newaddr, 1));
6561 if (REGNO (addr_reg) != base_regno
6562 || newoffset != offset + 4 * i)
6569 /* Return 1 for an PARALLEL suitable for stmw. */
6572 stmw_operation (op, mode)
6574 enum machine_mode mode ATTRIBUTE_UNUSED;
6576 int count = XVECLEN (op, 0);
6577 unsigned int src_regno;
6579 unsigned int base_regno;
6580 HOST_WIDE_INT offset;
6583 /* Perform a quick check so we don't blow up below. */
6585 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6586 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6587 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6590 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6591 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6594 || count != 32 - (int) src_regno)
6597 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6600 base_regno = REGNO (dest_addr);
6601 if (base_regno == 0)
6604 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6606 offset = INTVAL (XEXP (dest_addr, 1));
6607 base_regno = REGNO (XEXP (dest_addr, 0));
6612 for (i = 0; i < count; i++)
6614 rtx elt = XVECEXP (op, 0, i);
6617 HOST_WIDE_INT newoffset;
6619 if (GET_CODE (elt) != SET
6620 || GET_CODE (SET_SRC (elt)) != REG
6621 || GET_MODE (SET_SRC (elt)) != SImode
6622 || REGNO (SET_SRC (elt)) != src_regno + i
6623 || GET_CODE (SET_DEST (elt)) != MEM
6624 || GET_MODE (SET_DEST (elt)) != SImode)
6626 newaddr = XEXP (SET_DEST (elt), 0);
6627 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6632 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6634 addr_reg = XEXP (newaddr, 0);
6635 newoffset = INTVAL (XEXP (newaddr, 1));
6639 if (REGNO (addr_reg) != base_regno
6640 || newoffset != offset + 4 * i)
6647 /* A validation routine: say whether CODE, a condition code, and MODE
6648 match. The other alternatives either don't make sense or should
6649 never be generated. */
6652 validate_condition_mode (code, mode)
6654 enum machine_mode mode;
6656 if (GET_RTX_CLASS (code) != '<'
6657 || GET_MODE_CLASS (mode) != MODE_CC)
6660 /* These don't make sense. */
6661 if ((code == GT || code == LT || code == GE || code == LE)
6662 && mode == CCUNSmode)
6665 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6666 && mode != CCUNSmode)
6669 if (mode != CCFPmode
6670 && (code == ORDERED || code == UNORDERED
6671 || code == UNEQ || code == LTGT
6672 || code == UNGT || code == UNLT
6673 || code == UNGE || code == UNLE))
6676 /* These should never be generated except for
6677 flag_unsafe_math_optimizations and flag_finite_math_only. */
6678 if (mode == CCFPmode
6679 && ! flag_unsafe_math_optimizations
6680 && ! flag_finite_math_only
6681 && (code == LE || code == GE
6682 || code == UNEQ || code == LTGT
6683 || code == UNGT || code == UNLT))
6686 /* These are invalid; the information is not there. */
6687 if (mode == CCEQmode
6688 && code != EQ && code != NE)
6692 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6693 We only check the opcode against the mode of the CC value here. */
6696 branch_comparison_operator (op, mode)
6698 enum machine_mode mode ATTRIBUTE_UNUSED;
6700 enum rtx_code code = GET_CODE (op);
6701 enum machine_mode cc_mode;
6703 if (GET_RTX_CLASS (code) != '<')
6706 cc_mode = GET_MODE (XEXP (op, 0));
6707 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6710 validate_condition_mode (code, cc_mode);
6715 /* Return 1 if OP is a comparison operation that is valid for a branch
6716 insn and which is true if the corresponding bit in the CC register
6720 branch_positive_comparison_operator (op, mode)
6722 enum machine_mode mode;
6726 if (! branch_comparison_operator (op, mode))
6729 code = GET_CODE (op);
6730 return (code == EQ || code == LT || code == GT
6731 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6732 || code == LTU || code == GTU
6733 || code == UNORDERED);
6736 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6737 We check the opcode against the mode of the CC value and disallow EQ or
6738 NE comparisons for integers. */
6741 scc_comparison_operator (op, mode)
6743 enum machine_mode mode;
6745 enum rtx_code code = GET_CODE (op);
6746 enum machine_mode cc_mode;
6748 if (GET_MODE (op) != mode && mode != VOIDmode)
6751 if (GET_RTX_CLASS (code) != '<')
6754 cc_mode = GET_MODE (XEXP (op, 0));
6755 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6758 validate_condition_mode (code, cc_mode);
6760 if (code == NE && cc_mode != CCFPmode)
6767 trap_comparison_operator (op, mode)
6769 enum machine_mode mode;
6771 if (mode != VOIDmode && mode != GET_MODE (op))
6773 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6777 boolean_operator (op, mode)
6779 enum machine_mode mode ATTRIBUTE_UNUSED;
6781 enum rtx_code code = GET_CODE (op);
6782 return (code == AND || code == IOR || code == XOR);
6786 boolean_or_operator (op, mode)
6788 enum machine_mode mode ATTRIBUTE_UNUSED;
6790 enum rtx_code code = GET_CODE (op);
6791 return (code == IOR || code == XOR);
6795 min_max_operator (op, mode)
6797 enum machine_mode mode ATTRIBUTE_UNUSED;
6799 enum rtx_code code = GET_CODE (op);
6800 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6803 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6804 mask required to convert the result of a rotate insn into a shift
6805 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6808 includes_lshift_p (shiftop, andop)
6812 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6814 shift_mask <<= INTVAL (shiftop);
6816 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6819 /* Similar, but for right shift. */
6822 includes_rshift_p (shiftop, andop)
6826 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6828 shift_mask >>= INTVAL (shiftop);
6830 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6833 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6834 to perform a left shift. It must have exactly SHIFTOP least
6835 signifigant 0's, then one or more 1's, then zero or more 0's. */
6838 includes_rldic_lshift_p (shiftop, andop)
6842 if (GET_CODE (andop) == CONST_INT)
6844 HOST_WIDE_INT c, lsb, shift_mask;
6847 if (c == 0 || c == ~0)
6851 shift_mask <<= INTVAL (shiftop);
6853 /* Find the least signifigant one bit. */
6856 /* It must coincide with the LSB of the shift mask. */
6857 if (-lsb != shift_mask)
6860 /* Invert to look for the next transition (if any). */
6863 /* Remove the low group of ones (originally low group of zeros). */
6866 /* Again find the lsb, and check we have all 1's above. */
6870 else if (GET_CODE (andop) == CONST_DOUBLE
6871 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6873 HOST_WIDE_INT low, high, lsb;
6874 HOST_WIDE_INT shift_mask_low, shift_mask_high;
6876 low = CONST_DOUBLE_LOW (andop);
6877 if (HOST_BITS_PER_WIDE_INT < 64)
6878 high = CONST_DOUBLE_HIGH (andop);
6880 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
6881 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
6884 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6886 shift_mask_high = ~0;
6887 if (INTVAL (shiftop) > 32)
6888 shift_mask_high <<= INTVAL (shiftop) - 32;
6892 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
6899 return high == -lsb;
6902 shift_mask_low = ~0;
6903 shift_mask_low <<= INTVAL (shiftop);
6907 if (-lsb != shift_mask_low)
6910 if (HOST_BITS_PER_WIDE_INT < 64)
6915 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6918 return high == -lsb;
6922 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
6928 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6929 to perform a left shift. It must have SHIFTOP or more least
6930 signifigant 0's, with the remainder of the word 1's. */
6933 includes_rldicr_lshift_p (shiftop, andop)
6937 if (GET_CODE (andop) == CONST_INT)
6939 HOST_WIDE_INT c, lsb, shift_mask;
6942 shift_mask <<= INTVAL (shiftop);
6945 /* Find the least signifigant one bit. */
6948 /* It must be covered by the shift mask.
6949 This test also rejects c == 0. */
6950 if ((lsb & shift_mask) == 0)
6953 /* Check we have all 1's above the transition, and reject all 1's. */
6954 return c == -lsb && lsb != 1;
6956 else if (GET_CODE (andop) == CONST_DOUBLE
6957 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6959 HOST_WIDE_INT low, lsb, shift_mask_low;
6961 low = CONST_DOUBLE_LOW (andop);
6963 if (HOST_BITS_PER_WIDE_INT < 64)
6965 HOST_WIDE_INT high, shift_mask_high;
6967 high = CONST_DOUBLE_HIGH (andop);
6971 shift_mask_high = ~0;
6972 if (INTVAL (shiftop) > 32)
6973 shift_mask_high <<= INTVAL (shiftop) - 32;
6977 if ((lsb & shift_mask_high) == 0)
6980 return high == -lsb;
6986 shift_mask_low = ~0;
6987 shift_mask_low <<= INTVAL (shiftop);
6991 if ((lsb & shift_mask_low) == 0)
6994 return low == -lsb && lsb != 1;
7000 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7001 for lfq and stfq insns.
7003 Note reg1 and reg2 *must* be hard registers. To be sure we will
7004 abort if we are passed pseudo registers. */
7007 registers_ok_for_quad_peep (reg1, reg2)
7010 /* We might have been passed a SUBREG. */
7011 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7014 return (REGNO (reg1) == REGNO (reg2) - 1);
7017 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7018 addr1 and addr2 must be in consecutive memory locations
7019 (addr2 == addr1 + 8). */
7022 addrs_ok_for_quad_peep (addr1, addr2)
7029 /* Extract an offset (if used) from the first addr. */
7030 if (GET_CODE (addr1) == PLUS)
7032 /* If not a REG, return zero. */
7033 if (GET_CODE (XEXP (addr1, 0)) != REG)
7037 reg1 = REGNO (XEXP (addr1, 0));
7038 /* The offset must be constant! */
7039 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7041 offset1 = INTVAL (XEXP (addr1, 1));
7044 else if (GET_CODE (addr1) != REG)
7048 reg1 = REGNO (addr1);
7049 /* This was a simple (mem (reg)) expression. Offset is 0. */
7053 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7054 if (GET_CODE (addr2) != PLUS)
7057 if (GET_CODE (XEXP (addr2, 0)) != REG
7058 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7061 if (reg1 != REGNO (XEXP (addr2, 0)))
7064 /* The offset for the second addr must be 8 more than the first addr. */
7065 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7068 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7073 /* Return the register class of a scratch register needed to copy IN into
7074 or out of a register in CLASS in MODE. If it can be done directly,
7075 NO_REGS is returned. */
7078 secondary_reload_class (class, mode, in)
7079 enum reg_class class;
7080 enum machine_mode mode ATTRIBUTE_UNUSED;
7085 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7087 /* We cannot copy a symbolic operand directly into anything
7088 other than BASE_REGS for TARGET_ELF. So indicate that a
7089 register from BASE_REGS is needed as an intermediate
7092 On Darwin, pic addresses require a load from memory, which
7093 needs a base register. */
7094 if (class != BASE_REGS
7095 && (GET_CODE (in) == SYMBOL_REF
7096 || GET_CODE (in) == HIGH
7097 || GET_CODE (in) == LABEL_REF
7098 || GET_CODE (in) == CONST))
7102 if (GET_CODE (in) == REG)
7105 if (regno >= FIRST_PSEUDO_REGISTER)
7107 regno = true_regnum (in);
7108 if (regno >= FIRST_PSEUDO_REGISTER)
7112 else if (GET_CODE (in) == SUBREG)
7114 regno = true_regnum (in);
7115 if (regno >= FIRST_PSEUDO_REGISTER)
7121 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7123 if (class == GENERAL_REGS || class == BASE_REGS
7124 || (regno >= 0 && INT_REGNO_P (regno)))
7127 /* Constants, memory, and FP registers can go into FP registers. */
7128 if ((regno == -1 || FP_REGNO_P (regno))
7129 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7132 /* Memory, and AltiVec registers can go into AltiVec registers. */
7133 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7134 && class == ALTIVEC_REGS)
7137 /* We can copy among the CR registers. */
7138 if ((class == CR_REGS || class == CR0_REGS)
7139 && regno >= 0 && CR_REGNO_P (regno))
7142 /* Otherwise, we need GENERAL_REGS. */
7143 return GENERAL_REGS;
7146 /* Given a comparison operation, return the bit number in CCR to test. We
7147 know this is a valid comparison.
7149 SCC_P is 1 if this is for an scc. That means that %D will have been
7150 used instead of %C, so the bits will be in different places.
7152 Return -1 if OP isn't a valid comparison for some reason. */
7159 enum rtx_code code = GET_CODE (op);
7160 enum machine_mode cc_mode;
7165 if (GET_RTX_CLASS (code) != '<')
7170 if (GET_CODE (reg) != REG
7171 || ! CR_REGNO_P (REGNO (reg)))
7174 cc_mode = GET_MODE (reg);
7175 cc_regnum = REGNO (reg);
7176 base_bit = 4 * (cc_regnum - CR0_REGNO);
7178 validate_condition_mode (code, cc_mode);
7183 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7184 return base_bit + 1;
7185 return scc_p ? base_bit + 3 : base_bit + 2;
7187 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7188 return base_bit + 1;
7189 return base_bit + 2;
7190 case GT: case GTU: case UNLE:
7191 return base_bit + 1;
7192 case LT: case LTU: case UNGE:
7194 case ORDERED: case UNORDERED:
7195 return base_bit + 3;
7198 /* If scc, we will have done a cror to put the bit in the
7199 unordered position. So test that bit. For integer, this is ! LT
7200 unless this is an scc insn. */
7201 return scc_p ? base_bit + 3 : base_bit;
7204 return scc_p ? base_bit + 3 : base_bit + 1;
7211 /* Return the GOT register. */
7214 rs6000_got_register (value)
7215 rtx value ATTRIBUTE_UNUSED;
7217 /* The second flow pass currently (June 1999) can't update
7218 regs_ever_live without disturbing other parts of the compiler, so
7219 update it here to make the prolog/epilogue code happy. */
7220 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7221 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7223 current_function_uses_pic_offset_table = 1;
7225 return pic_offset_table_rtx;
7228 /* Function to init struct machine_function.
7229 This will be called, via a pointer variable,
7230 from push_function_context. */
7232 static struct machine_function *
7233 rs6000_init_machine_status ()
7235 return ggc_alloc_cleared (sizeof (machine_function));
7238 /* These macros test for integers and extract the low-order bits. */
7240 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7241 && GET_MODE (X) == VOIDmode)
7243 #define INT_LOWPART(X) \
7244 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7251 unsigned long val = INT_LOWPART (op);
7253 /* If the high bit is zero, the value is the first 1 bit we find
7255 if ((val & 0x80000000) == 0)
7257 if ((val & 0xffffffff) == 0)
7261 while (((val <<= 1) & 0x80000000) == 0)
7266 /* If the high bit is set and the low bit is not, or the mask is all
7267 1's, the value is zero. */
7268 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7271 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7274 while (((val >>= 1) & 1) != 0)
7285 unsigned long val = INT_LOWPART (op);
7287 /* If the low bit is zero, the value is the first 1 bit we find from
7291 if ((val & 0xffffffff) == 0)
7295 while (((val >>= 1) & 1) == 0)
7301 /* If the low bit is set and the high bit is not, or the mask is all
7302 1's, the value is 31. */
7303 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7306 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7309 while (((val <<= 1) & 0x80000000) != 0)
7315 /* Print an operand. Recognize special options, documented below. */
7318 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7319 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7321 #define SMALL_DATA_RELOC "sda21"
7322 #define SMALL_DATA_REG 0
7326 print_operand (file, x, code)
7333 unsigned HOST_WIDE_INT uval;
7338 /* Write out an instruction after the call which may be replaced
7339 with glue code by the loader. This depends on the AIX version. */
7340 asm_fprintf (file, RS6000_CALL_GLUE);
7343 /* %a is output_address. */
7346 /* If X is a constant integer whose low-order 5 bits are zero,
7347 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7348 in the AIX assembler where "sri" with a zero shift count
7349 writes a trash instruction. */
7350 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7357 /* If constant, low-order 16 bits of constant, unsigned.
7358 Otherwise, write normally. */
7360 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7362 print_operand (file, x, 0);
7366 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7367 for 64-bit mask direction. */
7368 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7371 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7375 /* There used to be a comment for 'C' reading "This is an
7376 optional cror needed for certain floating-point
7377 comparisons. Otherwise write nothing." */
7379 /* Similar, except that this is for an scc, so we must be able to
7380 encode the test in a single bit that is one. We do the above
7381 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7382 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7383 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7385 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7387 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7389 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7392 else if (GET_CODE (x) == NE)
7394 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7396 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7397 base_bit + 2, base_bit + 2);
7399 else if (TARGET_SPE && TARGET_HARD_FLOAT
7400 && GET_CODE (x) == EQ
7401 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7403 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7405 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7406 base_bit + 1, base_bit + 1);
7411 /* X is a CR register. Print the number of the EQ bit of the CR */
7412 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7413 output_operand_lossage ("invalid %%E value");
7415 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7419 /* X is a CR register. Print the shift count needed to move it
7420 to the high-order four bits. */
7421 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7422 output_operand_lossage ("invalid %%f value");
7424 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7428 /* Similar, but print the count for the rotate in the opposite
7430 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7431 output_operand_lossage ("invalid %%F value");
7433 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7437 /* X is a constant integer. If it is negative, print "m",
7438 otherwise print "z". This is to make a aze or ame insn. */
7439 if (GET_CODE (x) != CONST_INT)
7440 output_operand_lossage ("invalid %%G value");
7441 else if (INTVAL (x) >= 0)
7448 /* If constant, output low-order five bits. Otherwise, write
7451 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7453 print_operand (file, x, 0);
7457 /* If constant, output low-order six bits. Otherwise, write
7460 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7462 print_operand (file, x, 0);
7466 /* Print `i' if this is a constant, else nothing. */
7472 /* Write the bit number in CCR for jump. */
7475 output_operand_lossage ("invalid %%j code");
7477 fprintf (file, "%d", i);
7481 /* Similar, but add one for shift count in rlinm for scc and pass
7482 scc flag to `ccr_bit'. */
7485 output_operand_lossage ("invalid %%J code");
7487 /* If we want bit 31, write a shift count of zero, not 32. */
7488 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7492 /* X must be a constant. Write the 1's complement of the
7495 output_operand_lossage ("invalid %%k value");
7497 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7501 /* X must be a symbolic constant on ELF. Write an
7502 expression suitable for an 'addi' that adds in the low 16
7504 if (GET_CODE (x) != CONST)
7506 print_operand_address (file, x);
7511 if (GET_CODE (XEXP (x, 0)) != PLUS
7512 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7513 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7514 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7515 output_operand_lossage ("invalid %%K value");
7516 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7518 /* For GNU as, there must be a non-alphanumeric character
7519 between 'l' and the number. The '-' is added by
7520 print_operand() already. */
7521 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7523 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7527 /* %l is output_asm_label. */
7530 /* Write second word of DImode or DFmode reference. Works on register
7531 or non-indexed memory only. */
7532 if (GET_CODE (x) == REG)
7533 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7534 else if (GET_CODE (x) == MEM)
7536 /* Handle possible auto-increment. Since it is pre-increment and
7537 we have already done it, we can just use an offset of word. */
7538 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7539 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7540 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7543 output_address (XEXP (adjust_address_nv (x, SImode,
7547 if (small_data_operand (x, GET_MODE (x)))
7548 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7549 reg_names[SMALL_DATA_REG]);
7554 /* MB value for a mask operand. */
7555 if (! mask_operand (x, SImode))
7556 output_operand_lossage ("invalid %%m value");
7558 fprintf (file, "%d", extract_MB (x));
7562 /* ME value for a mask operand. */
7563 if (! mask_operand (x, SImode))
7564 output_operand_lossage ("invalid %%M value");
7566 fprintf (file, "%d", extract_ME (x));
7569 /* %n outputs the negative of its operand. */
7572 /* Write the number of elements in the vector times 4. */
7573 if (GET_CODE (x) != PARALLEL)
7574 output_operand_lossage ("invalid %%N value");
7576 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7580 /* Similar, but subtract 1 first. */
7581 if (GET_CODE (x) != PARALLEL)
7582 output_operand_lossage ("invalid %%O value");
7584 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7588 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7590 || INT_LOWPART (x) < 0
7591 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7592 output_operand_lossage ("invalid %%p value");
7594 fprintf (file, "%d", i);
7598 /* The operand must be an indirect memory reference. The result
7599 is the register number. */
7600 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7601 || REGNO (XEXP (x, 0)) >= 32)
7602 output_operand_lossage ("invalid %%P value");
7604 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7608 /* This outputs the logical code corresponding to a boolean
7609 expression. The expression may have one or both operands
7610 negated (if one, only the first one). For condition register
7611 logical operations, it will also treat the negated
7612 CR codes as NOTs, but not handle NOTs of them. */
7614 const char *const *t = 0;
7616 enum rtx_code code = GET_CODE (x);
7617 static const char * const tbl[3][3] = {
7618 { "and", "andc", "nor" },
7619 { "or", "orc", "nand" },
7620 { "xor", "eqv", "xor" } };
7624 else if (code == IOR)
7626 else if (code == XOR)
7629 output_operand_lossage ("invalid %%q value");
7631 if (GET_CODE (XEXP (x, 0)) != NOT)
7635 if (GET_CODE (XEXP (x, 1)) == NOT)
7646 /* X is a CR register. Print the mask for `mtcrf'. */
7647 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7648 output_operand_lossage ("invalid %%R value");
7650 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7654 /* Low 5 bits of 32 - value */
7656 output_operand_lossage ("invalid %%s value");
7658 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7662 /* PowerPC64 mask position. All 0's is excluded.
7663 CONST_INT 32-bit mask is considered sign-extended so any
7664 transition must occur within the CONST_INT, not on the boundary. */
7665 if (! mask64_operand (x, DImode))
7666 output_operand_lossage ("invalid %%S value");
7668 uval = INT_LOWPART (x);
7670 if (uval & 1) /* Clear Left */
7672 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7675 else /* Clear Right */
7678 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7685 fprintf (file, "%d", i);
7689 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7690 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7693 /* Bit 3 is OV bit. */
7694 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7696 /* If we want bit 31, write a shift count of zero, not 32. */
7697 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7701 /* Print the symbolic name of a branch target register. */
7702 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7703 && REGNO (x) != COUNT_REGISTER_REGNUM))
7704 output_operand_lossage ("invalid %%T value");
7705 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7706 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7708 fputs ("ctr", file);
7712 /* High-order 16 bits of constant for use in unsigned operand. */
7714 output_operand_lossage ("invalid %%u value");
7716 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7717 (INT_LOWPART (x) >> 16) & 0xffff);
7721 /* High-order 16 bits of constant for use in signed operand. */
7723 output_operand_lossage ("invalid %%v value");
7725 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7726 (INT_LOWPART (x) >> 16) & 0xffff);
7730 /* Print `u' if this has an auto-increment or auto-decrement. */
7731 if (GET_CODE (x) == MEM
7732 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7733 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7738 /* Print the trap code for this operand. */
7739 switch (GET_CODE (x))
7742 fputs ("eq", file); /* 4 */
7745 fputs ("ne", file); /* 24 */
7748 fputs ("lt", file); /* 16 */
7751 fputs ("le", file); /* 20 */
7754 fputs ("gt", file); /* 8 */
7757 fputs ("ge", file); /* 12 */
7760 fputs ("llt", file); /* 2 */
7763 fputs ("lle", file); /* 6 */
7766 fputs ("lgt", file); /* 1 */
7769 fputs ("lge", file); /* 5 */
7777 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7780 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7781 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7783 print_operand (file, x, 0);
7787 /* MB value for a PowerPC64 rldic operand. */
7788 val = (GET_CODE (x) == CONST_INT
7789 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7794 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7795 if ((val <<= 1) < 0)
7798 #if HOST_BITS_PER_WIDE_INT == 32
7799 if (GET_CODE (x) == CONST_INT && i >= 0)
7800 i += 32; /* zero-extend high-part was all 0's */
7801 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7803 val = CONST_DOUBLE_LOW (x);
7810 for ( ; i < 64; i++)
7811 if ((val <<= 1) < 0)
7816 fprintf (file, "%d", i + 1);
7820 if (GET_CODE (x) == MEM
7821 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7826 /* Like 'L', for third word of TImode */
7827 if (GET_CODE (x) == REG)
7828 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7829 else if (GET_CODE (x) == MEM)
7831 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7832 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7833 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7835 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7836 if (small_data_operand (x, GET_MODE (x)))
7837 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7838 reg_names[SMALL_DATA_REG]);
7843 /* X is a SYMBOL_REF. Write out the name preceded by a
7844 period and without any trailing data in brackets. Used for function
7845 names. If we are configured for System V (or the embedded ABI) on
7846 the PowerPC, do not emit the period, since those systems do not use
7847 TOCs and the like. */
7848 if (GET_CODE (x) != SYMBOL_REF)
7851 if (XSTR (x, 0)[0] != '.')
7853 switch (DEFAULT_ABI)
7863 case ABI_AIX_NODESC:
7869 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
7871 assemble_name (file, XSTR (x, 0));
7876 /* Like 'L', for last word of TImode. */
7877 if (GET_CODE (x) == REG)
7878 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
7879 else if (GET_CODE (x) == MEM)
7881 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7882 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7883 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
7885 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
7886 if (small_data_operand (x, GET_MODE (x)))
7887 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7888 reg_names[SMALL_DATA_REG]);
7892 /* Print AltiVec or SPE memory operand. */
7897 if (GET_CODE (x) != MEM)
7905 if (GET_CODE (tmp) == REG)
7907 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
7910 /* Handle [reg+UIMM]. */
7911 else if (GET_CODE (tmp) == PLUS &&
7912 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
7916 if (GET_CODE (XEXP (tmp, 0)) != REG)
7919 x = INTVAL (XEXP (tmp, 1));
7920 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
7924 /* Fall through. Must be [reg+reg]. */
7926 if (GET_CODE (tmp) == REG)
7927 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
7928 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
7930 if (REGNO (XEXP (tmp, 0)) == 0)
7931 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
7932 reg_names[ REGNO (XEXP (tmp, 0)) ]);
7934 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
7935 reg_names[ REGNO (XEXP (tmp, 1)) ]);
7943 if (GET_CODE (x) == REG)
7944 fprintf (file, "%s", reg_names[REGNO (x)]);
7945 else if (GET_CODE (x) == MEM)
7947 /* We need to handle PRE_INC and PRE_DEC here, since we need to
7948 know the width from the mode. */
7949 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
7950 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
7951 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7952 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
7953 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
7954 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7956 output_address (XEXP (x, 0));
7959 output_addr_const (file, x);
7963 output_operand_lossage ("invalid %%xn code");
7967 /* Print the address of an operand. */
7970 print_operand_address (file, x)
7974 if (GET_CODE (x) == REG)
7975 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
7976 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
7977 || GET_CODE (x) == LABEL_REF)
7979 output_addr_const (file, x);
7980 if (small_data_operand (x, GET_MODE (x)))
7981 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7982 reg_names[SMALL_DATA_REG]);
7983 else if (TARGET_TOC)
7986 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
7988 if (REGNO (XEXP (x, 0)) == 0)
7989 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
7990 reg_names[ REGNO (XEXP (x, 0)) ]);
7992 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
7993 reg_names[ REGNO (XEXP (x, 1)) ]);
7995 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
7997 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
7998 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8001 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8002 && CONSTANT_P (XEXP (x, 1)))
8004 output_addr_const (file, XEXP (x, 1));
8005 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8009 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8010 && CONSTANT_P (XEXP (x, 1)))
8012 fprintf (file, "lo16(");
8013 output_addr_const (file, XEXP (x, 1));
8014 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8017 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8019 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8021 rtx contains_minus = XEXP (x, 1);
8025 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8026 turn it into (sym) for output_addr_const. */
8027 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8028 contains_minus = XEXP (contains_minus, 0);
8030 minus = XEXP (contains_minus, 0);
8031 symref = XEXP (minus, 0);
8032 XEXP (contains_minus, 0) = symref;
8037 name = XSTR (symref, 0);
8038 newname = alloca (strlen (name) + sizeof ("@toc"));
8039 strcpy (newname, name);
8040 strcat (newname, "@toc");
8041 XSTR (symref, 0) = newname;
8043 output_addr_const (file, XEXP (x, 1));
8045 XSTR (symref, 0) = name;
8046 XEXP (contains_minus, 0) = minus;
8049 output_addr_const (file, XEXP (x, 1));
8051 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8057 /* Target hook for assembling integer objects. The powerpc version has
8058 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8059 is defined. It also needs to handle DI-mode objects on 64-bit
8063 rs6000_assemble_integer (x, size, aligned_p)
8068 #ifdef RELOCATABLE_NEEDS_FIXUP
8069 /* Special handling for SI values. */
8070 if (size == 4 && aligned_p)
8072 extern int in_toc_section PARAMS ((void));
8073 static int recurse = 0;
8075 /* For -mrelocatable, we mark all addresses that need to be fixed up
8076 in the .fixup section. */
8077 if (TARGET_RELOCATABLE
8078 && !in_toc_section ()
8079 && !in_text_section ()
8081 && GET_CODE (x) != CONST_INT
8082 && GET_CODE (x) != CONST_DOUBLE
8088 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8090 ASM_OUTPUT_LABEL (asm_out_file, buf);
8091 fprintf (asm_out_file, "\t.long\t(");
8092 output_addr_const (asm_out_file, x);
8093 fprintf (asm_out_file, ")@fixup\n");
8094 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8095 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8096 fprintf (asm_out_file, "\t.long\t");
8097 assemble_name (asm_out_file, buf);
8098 fprintf (asm_out_file, "\n\t.previous\n");
8102 /* Remove initial .'s to turn a -mcall-aixdesc function
8103 address into the address of the descriptor, not the function
8105 else if (GET_CODE (x) == SYMBOL_REF
8106 && XSTR (x, 0)[0] == '.'
8107 && DEFAULT_ABI == ABI_AIX)
8109 const char *name = XSTR (x, 0);
8110 while (*name == '.')
8113 fprintf (asm_out_file, "\t.long\t%s\n", name);
8117 #endif /* RELOCATABLE_NEEDS_FIXUP */
8118 return default_assemble_integer (x, size, aligned_p);
8122 rs6000_reverse_condition (mode, code)
8123 enum machine_mode mode;
8126 /* Reversal of FP compares takes care -- an ordered compare
8127 becomes an unordered compare and vice versa. */
8128 if (mode == CCFPmode)
8129 return reverse_condition_maybe_unordered (code);
8131 return reverse_condition (code);
8134 /* Generate a compare for CODE. Return a brand-new rtx that
8135 represents the result of the compare. */
8138 rs6000_generate_compare (code)
8141 enum machine_mode comp_mode;
8144 if (rs6000_compare_fp_p)
8145 comp_mode = CCFPmode;
8146 else if (code == GTU || code == LTU
8147 || code == GEU || code == LEU)
8148 comp_mode = CCUNSmode;
8152 /* First, the compare. */
8153 compare_result = gen_reg_rtx (comp_mode);
8155 /* SPE FP compare instructions on the GPRs. Yuck! */
8156 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8158 rtx cmp, or1, or2, or_result, compare_result2;
8166 cmp = flag_unsafe_math_optimizations
8167 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8169 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8170 rs6000_compare_op1);
8178 cmp = flag_unsafe_math_optimizations
8179 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8181 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8182 rs6000_compare_op1);
8190 cmp = flag_unsafe_math_optimizations
8191 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8193 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8194 rs6000_compare_op1);
8200 /* Synthesize LE and GE from LT/GT || EQ. */
8201 if (code == LE || code == GE || code == LEU || code == GEU)
8203 /* Synthesize GE/LE frome GT/LT || EQ. */
8209 case LE: code = LT; break;
8210 case GE: code = GT; break;
8211 case LEU: code = LT; break;
8212 case GEU: code = GT; break;
8216 or1 = gen_reg_rtx (SImode);
8217 or2 = gen_reg_rtx (SImode);
8218 or_result = gen_reg_rtx (CCEQmode);
8219 compare_result2 = gen_reg_rtx (CCFPmode);
8222 cmp = flag_unsafe_math_optimizations
8223 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8225 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8226 rs6000_compare_op1);
8229 /* The MC8540 FP compare instructions set the CR bits
8230 differently than other PPC compare instructions. For
8231 that matter, there is no generic test instruction, but a
8232 testgt, testlt, and testeq. For a true condition, bit 2
8233 is set (x1xx) in the CR. Following the traditional CR
8239 ... bit 2 would be a GT CR alias, so later on we
8240 look in the GT bits for the branch instructins.
8241 However, we must be careful to emit correct RTL in
8242 the meantime, so optimizations don't get confused. */
8244 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8245 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8247 /* OR them together. */
8248 cmp = gen_rtx_SET (VOIDmode, or_result,
8249 gen_rtx_COMPARE (CCEQmode,
8250 gen_rtx_IOR (SImode, or1, or2),
8252 compare_result = or_result;
8257 /* We only care about 1 bit (x1xx), so map everything to NE to
8258 maintain rtl sanity. We'll get to the right bit (x1xx) at
8259 code output time. */
8260 if (code == NE || code == LTGT)
8261 /* Do the inverse here because we have no cmpne
8262 instruction. We use the cmpeq instruction and expect
8263 to get a 0 instead. */
8272 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8273 gen_rtx_COMPARE (comp_mode,
8275 rs6000_compare_op1)));
8277 /* Some kinds of FP comparisons need an OR operation;
8278 except for flag_unsafe_math_optimizations we don't bother. */
8279 if (rs6000_compare_fp_p
8280 && ! flag_unsafe_math_optimizations
8281 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8282 && (code == LE || code == GE
8283 || code == UNEQ || code == LTGT
8284 || code == UNGT || code == UNLT))
8286 enum rtx_code or1, or2;
8287 rtx or1_rtx, or2_rtx, compare2_rtx;
8288 rtx or_result = gen_reg_rtx (CCEQmode);
8292 case LE: or1 = LT; or2 = EQ; break;
8293 case GE: or1 = GT; or2 = EQ; break;
8294 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8295 case LTGT: or1 = LT; or2 = GT; break;
8296 case UNGT: or1 = UNORDERED; or2 = GT; break;
8297 case UNLT: or1 = UNORDERED; or2 = LT; break;
8300 validate_condition_mode (or1, comp_mode);
8301 validate_condition_mode (or2, comp_mode);
8302 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8303 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8304 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8305 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8307 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8309 compare_result = or_result;
8313 validate_condition_mode (code, GET_MODE (compare_result));
8315 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8319 /* Emit the RTL for an sCOND pattern. */
8322 rs6000_emit_sCOND (code, result)
8327 enum machine_mode op_mode;
8329 condition_rtx = rs6000_generate_compare (code);
8331 op_mode = GET_MODE (rs6000_compare_op0);
8332 if (op_mode == VOIDmode)
8333 op_mode = GET_MODE (rs6000_compare_op1);
8335 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8337 PUT_MODE (condition_rtx, DImode);
8338 convert_move (result, condition_rtx, 0);
8342 PUT_MODE (condition_rtx, SImode);
8343 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8347 /* Emit a branch of kind CODE to location LOC. */
8350 rs6000_emit_cbranch (code, loc)
8354 rtx condition_rtx, loc_ref;
8356 condition_rtx = rs6000_generate_compare (code);
8357 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8358 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8359 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8363 /* Return the string to output a conditional branch to LABEL, which is
8364 the operand number of the label, or -1 if the branch is really a
8367 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8368 condition code register and its mode specifies what kind of
8371 REVERSED is non-zero if we should reverse the sense of the comparison.
8373 INSN is the insn. */
8376 output_cbranch (op, label, reversed, insn)
8382 static char string[64];
8383 enum rtx_code code = GET_CODE (op);
8384 rtx cc_reg = XEXP (op, 0);
8385 enum machine_mode mode = GET_MODE (cc_reg);
8386 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8387 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8388 int really_reversed = reversed ^ need_longbranch;
8394 validate_condition_mode (code, mode);
8396 /* Work out which way this really branches. We could use
8397 reverse_condition_maybe_unordered here always but this
8398 makes the resulting assembler clearer. */
8399 if (really_reversed)
8400 code = rs6000_reverse_condition (mode, code);
8402 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8404 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8407 /* Opposite of GT. */
8409 else if (code == NE)
8417 /* Not all of these are actually distinct opcodes, but
8418 we distinguish them for clarity of the resulting assembler. */
8420 ccode = "ne"; break;
8422 ccode = "eq"; break;
8424 ccode = "ge"; break;
8425 case GT: case GTU: case UNGT:
8426 ccode = "gt"; break;
8428 ccode = "le"; break;
8429 case LT: case LTU: case UNLT:
8430 ccode = "lt"; break;
8431 case UNORDERED: ccode = "un"; break;
8432 case ORDERED: ccode = "nu"; break;
8433 case UNGE: ccode = "nl"; break;
8434 case UNLE: ccode = "ng"; break;
8439 /* Maybe we have a guess as to how likely the branch is.
8440 The old mnemonics don't have a way to specify this information. */
8442 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8443 if (note != NULL_RTX)
8445 /* PROB is the difference from 50%. */
8446 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8447 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8449 /* Only hint for highly probable/improbable branches on newer
8450 cpus as static prediction overrides processor dynamic
8451 prediction. For older cpus we may as well always hint, but
8452 assume not taken for branches that are very close to 50% as a
8453 mispredicted taken branch is more expensive than a
8454 mispredicted not-taken branch. */
8456 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8458 if (abs (prob) > REG_BR_PROB_BASE / 20
8459 && ((prob > 0) ^ need_longbranch))
8467 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8469 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8471 /* We need to escape any '%' characters in the reg_names string.
8472 Assume they'd only be the first character... */
8473 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8475 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8479 /* If the branch distance was too far, we may have to use an
8480 unconditional branch to go the distance. */
8481 if (need_longbranch)
8482 s += sprintf (s, ",$+8\n\tb %s", label);
8484 s += sprintf (s, ",%s", label);
8490 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8491 operands of the last comparison is nonzero/true, FALSE_COND if it
8492 is zero/false. Return 0 if the hardware has no such operation. */
8495 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8501 enum rtx_code code = GET_CODE (op);
8502 rtx op0 = rs6000_compare_op0;
8503 rtx op1 = rs6000_compare_op1;
8505 enum machine_mode compare_mode = GET_MODE (op0);
8506 enum machine_mode result_mode = GET_MODE (dest);
8509 /* These modes should always match. */
8510 if (GET_MODE (op1) != compare_mode
8511 /* In the isel case however, we can use a compare immediate, so
8512 op1 may be a small constant. */
8513 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8515 if (GET_MODE (true_cond) != result_mode)
8517 if (GET_MODE (false_cond) != result_mode)
8520 /* First, work out if the hardware can do this at all, or
8521 if it's too slow... */
8522 if (! rs6000_compare_fp_p)
8525 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8529 /* Eliminate half of the comparisons by switching operands, this
8530 makes the remaining code simpler. */
8531 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8532 || code == LTGT || code == LT)
8534 code = reverse_condition_maybe_unordered (code);
8536 true_cond = false_cond;
8540 /* UNEQ and LTGT take four instructions for a comparison with zero,
8541 it'll probably be faster to use a branch here too. */
8545 if (GET_CODE (op1) == CONST_DOUBLE)
8546 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8548 /* We're going to try to implement comparions by performing
8549 a subtract, then comparing against zero. Unfortunately,
8550 Inf - Inf is NaN which is not zero, and so if we don't
8551 know that the the operand is finite and the comparison
8552 would treat EQ different to UNORDERED, we can't do it. */
8553 if (! flag_unsafe_math_optimizations
8554 && code != GT && code != UNGE
8555 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
8556 /* Constructs of the form (a OP b ? a : b) are safe. */
8557 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8558 || (! rtx_equal_p (op0, true_cond)
8559 && ! rtx_equal_p (op1, true_cond))))
8561 /* At this point we know we can use fsel. */
8563 /* Reduce the comparison to a comparison against zero. */
8564 temp = gen_reg_rtx (compare_mode);
8565 emit_insn (gen_rtx_SET (VOIDmode, temp,
8566 gen_rtx_MINUS (compare_mode, op0, op1)));
8568 op1 = CONST0_RTX (compare_mode);
8570 /* If we don't care about NaNs we can reduce some of the comparisons
8571 down to faster ones. */
8572 if (flag_unsafe_math_optimizations)
8578 true_cond = false_cond;
8591 /* Now, reduce everything down to a GE. */
8598 temp = gen_reg_rtx (compare_mode);
8599 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8604 temp = gen_reg_rtx (compare_mode);
8605 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8610 temp = gen_reg_rtx (compare_mode);
8611 emit_insn (gen_rtx_SET (VOIDmode, temp,
8612 gen_rtx_NEG (compare_mode,
8613 gen_rtx_ABS (compare_mode, op0))));
8618 temp = gen_reg_rtx (result_mode);
8619 emit_insn (gen_rtx_SET (VOIDmode, temp,
8620 gen_rtx_IF_THEN_ELSE (result_mode,
8621 gen_rtx_GE (VOIDmode,
8623 true_cond, false_cond)));
8625 true_cond = false_cond;
8627 temp = gen_reg_rtx (compare_mode);
8628 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8633 temp = gen_reg_rtx (result_mode);
8634 emit_insn (gen_rtx_SET (VOIDmode, temp,
8635 gen_rtx_IF_THEN_ELSE (result_mode,
8636 gen_rtx_GE (VOIDmode,
8638 true_cond, false_cond)));
8640 false_cond = true_cond;
8642 temp = gen_reg_rtx (compare_mode);
8643 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8651 emit_insn (gen_rtx_SET (VOIDmode, dest,
8652 gen_rtx_IF_THEN_ELSE (result_mode,
8653 gen_rtx_GE (VOIDmode,
8655 true_cond, false_cond)));
8659 /* Same as above, but for ints (isel). */
8662 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8668 rtx condition_rtx, cr;
8670 /* All isel implementations thus far are 32-bits. */
8671 if (GET_MODE (rs6000_compare_op0) != SImode)
8674 /* We still have to do the compare, because isel doesn't do a
8675 compare, it just looks at the CRx bits set by a previous compare
8677 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8678 cr = XEXP (condition_rtx, 0);
8680 if (GET_MODE (cr) == CCmode)
8681 emit_insn (gen_isel_signed (dest, condition_rtx,
8682 true_cond, false_cond, cr));
8684 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8685 true_cond, false_cond, cr));
8691 output_isel (operands)
8696 code = GET_CODE (operands[1]);
8697 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8699 PUT_CODE (operands[1], reverse_condition (code));
8700 return "isel %0,%3,%2,%j1";
8703 return "isel %0,%2,%3,%j1";
8707 rs6000_emit_minmax (dest, code, op0, op1)
8713 enum machine_mode mode = GET_MODE (op0);
8717 if (code == SMAX || code == SMIN)
8722 if (code == SMAX || code == UMAX)
8723 target = emit_conditional_move (dest, c, op0, op1, mode,
8726 target = emit_conditional_move (dest, c, op0, op1, mode,
8728 if (target == NULL_RTX)
8731 emit_move_insn (dest, target);
8734 /* This page contains routines that are used to determine what the
8735 function prologue and epilogue code will do and write them out. */
8737 /* Return the first fixed-point register that is required to be
8738 saved. 32 if none. */
8741 first_reg_to_save ()
8745 /* Find lowest numbered live register. */
8746 for (first_reg = 13; first_reg <= 31; first_reg++)
8747 if (regs_ever_live[first_reg]
8748 && (! call_used_regs[first_reg]
8749 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8750 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8751 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8755 if (flag_pic && current_function_uses_pic_offset_table &&
8756 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
8757 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8763 /* Similar, for FP regs. */
8766 first_fp_reg_to_save ()
8770 /* Find lowest numbered live register. */
8771 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8772 if (regs_ever_live[first_reg])
8778 /* Similar, for AltiVec regs. */
8781 first_altivec_reg_to_save ()
8785 /* Stack frame remains as is unless we are in AltiVec ABI. */
8786 if (! TARGET_ALTIVEC_ABI)
8787 return LAST_ALTIVEC_REGNO + 1;
8789 /* Find lowest numbered live register. */
8790 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8791 if (regs_ever_live[i])
8797 /* Return a 32-bit mask of the AltiVec registers we need to set in
8798 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8799 the 32-bit word is 0. */
8802 compute_vrsave_mask ()
8804 unsigned int i, mask = 0;
8806 /* First, find out if we use _any_ altivec registers. */
8807 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8808 if (regs_ever_live[i])
8809 mask |= ALTIVEC_REG_BIT (i);
8814 /* Next, add all registers that are call-clobbered. We do this
8815 because post-reload register optimizers such as regrename_optimize
8816 may choose to use them. They never change the register class
8817 chosen by reload, so cannot create new uses of altivec registers
8818 if there were none before, so the early exit above is safe. */
8819 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8820 altivec registers not saved in the mask, which might well make the
8821 adjustments below more effective in eliding the save/restore of
8822 VRSAVE in small functions. */
8823 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8824 if (call_used_regs[i])
8825 mask |= ALTIVEC_REG_BIT (i);
8827 /* Next, remove the argument registers from the set. These must
8828 be in the VRSAVE mask set by the caller, so we don't need to add
8829 them in again. More importantly, the mask we compute here is
8830 used to generate CLOBBERs in the set_vrsave insn, and we do not
8831 wish the argument registers to die. */
8832 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
8833 mask &= ~ALTIVEC_REG_BIT (i);
8835 /* Similarly, remove the return value from the set. */
8838 diddle_return_value (is_altivec_return_reg, &yes);
8840 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
8847 is_altivec_return_reg (reg, xyes)
8851 bool *yes = (bool *) xyes;
8852 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
8857 /* Calculate the stack information for the current function. This is
8858 complicated by having two separate calling sequences, the AIX calling
8859 sequence and the V.4 calling sequence.
8861 AIX (and Darwin/Mac OS X) stack frames look like:
8863 SP----> +---------------------------------------+
8864 | back chain to caller | 0 0
8865 +---------------------------------------+
8866 | saved CR | 4 8 (8-11)
8867 +---------------------------------------+
8869 +---------------------------------------+
8870 | reserved for compilers | 12 24
8871 +---------------------------------------+
8872 | reserved for binders | 16 32
8873 +---------------------------------------+
8874 | saved TOC pointer | 20 40
8875 +---------------------------------------+
8876 | Parameter save area (P) | 24 48
8877 +---------------------------------------+
8878 | Alloca space (A) | 24+P etc.
8879 +---------------------------------------+
8880 | Local variable space (L) | 24+P+A
8881 +---------------------------------------+
8882 | Float/int conversion temporary (X) | 24+P+A+L
8883 +---------------------------------------+
8884 | Save area for AltiVec registers (W) | 24+P+A+L+X
8885 +---------------------------------------+
8886 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8887 +---------------------------------------+
8888 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8889 +---------------------------------------+
8890 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8891 +---------------------------------------+
8892 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8893 +---------------------------------------+
8894 old SP->| back chain to caller's caller |
8895 +---------------------------------------+
8897 The required alignment for AIX configurations is two words (i.e., 8
8901 V.4 stack frames look like:
8903 SP----> +---------------------------------------+
8904 | back chain to caller | 0
8905 +---------------------------------------+
8906 | caller's saved LR | 4
8907 +---------------------------------------+
8908 | Parameter save area (P) | 8
8909 +---------------------------------------+
8910 | Alloca space (A) | 8+P
8911 +---------------------------------------+
8912 | Varargs save area (V) | 8+P+A
8913 +---------------------------------------+
8914 | Local variable space (L) | 8+P+A+V
8915 +---------------------------------------+
8916 | Float/int conversion temporary (X) | 8+P+A+V+L
8917 +---------------------------------------+
8918 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
8919 +---------------------------------------+
8920 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
8921 +---------------------------------------+
8922 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
8923 +---------------------------------------+
8924 | SPE: area for 64-bit GP registers |
8925 +---------------------------------------+
8926 | SPE alignment padding |
8927 +---------------------------------------+
8928 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
8929 +---------------------------------------+
8930 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
8931 +---------------------------------------+
8932 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
8933 +---------------------------------------+
8934 old SP->| back chain to caller's caller |
8935 +---------------------------------------+
8937 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
8938 given. (But note below and in sysv4.h that we require only 8 and
8939 may round up the size of our stack frame anyways. The historical
8940 reason is early versions of powerpc-linux which didn't properly
8941 align the stack at program startup. A happy side-effect is that
8942 -mno-eabi libraries can be used with -meabi programs.)
8944 The EABI configuration defaults to the V.4 layout, unless
8945 -mcall-aix is used, in which case the AIX layout is used. However,
8946 the stack alignment requirements may differ. If -mno-eabi is not
8947 given, the required stack alignment is 8 bytes; if -mno-eabi is
8948 given, the required alignment is 16 bytes. (But see V.4 comment
8951 #ifndef ABI_STACK_BOUNDARY
8952 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
8956 rs6000_stack_info ()
8958 static rs6000_stack_t info, zero_info;
8959 rs6000_stack_t *info_ptr = &info;
8960 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8961 enum rs6000_abi abi;
8965 /* Zero all fields portably. */
8968 /* Select which calling sequence. */
8969 info_ptr->abi = abi = DEFAULT_ABI;
8971 /* Calculate which registers need to be saved & save area size. */
8972 info_ptr->first_gp_reg_save = first_reg_to_save ();
8973 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8974 even if it currently looks like we won't. */
8975 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
8976 || (flag_pic == 1 && abi == ABI_V4)
8977 || (flag_pic && abi == ABI_DARWIN))
8978 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8979 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
8981 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
8983 /* For the SPE, we have an additional upper 32-bits on each GPR.
8984 Ideally we should save the entire 64-bits only when the upper
8985 half is used in SIMD instructions. Since we only record
8986 registers live (not the size they are used in), this proves
8987 difficult because we'd have to traverse the instruction chain at
8988 the right time, taking reload into account. This is a real pain,
8989 so we opt to save the GPRs in 64-bits always. Anyone overly
8990 concerned with frame size can fix this. ;-).
8992 So... since we save all GPRs (except the SP) in 64-bits, the
8993 traditional GP save area will be empty. */
8995 info_ptr->gp_size = 0;
8997 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
8998 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9000 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9001 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9002 - info_ptr->first_altivec_reg_save);
9004 /* Does this function call anything? */
9005 info_ptr->calls_p = (! current_function_is_leaf
9006 || cfun->machine->ra_needs_full_frame);
9008 /* Determine if we need to save the link register. */
9009 if (rs6000_ra_ever_killed ()
9010 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
9011 #ifdef TARGET_RELOCATABLE
9012 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9014 || (info_ptr->first_fp_reg_save != 64
9015 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9016 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9017 || (abi == ABI_V4 && current_function_calls_alloca)
9018 || (DEFAULT_ABI == ABI_DARWIN
9020 && current_function_uses_pic_offset_table)
9021 || info_ptr->calls_p)
9023 info_ptr->lr_save_p = 1;
9024 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9027 /* Determine if we need to save the condition code registers. */
9028 if (regs_ever_live[CR2_REGNO]
9029 || regs_ever_live[CR3_REGNO]
9030 || regs_ever_live[CR4_REGNO])
9032 info_ptr->cr_save_p = 1;
9034 info_ptr->cr_size = reg_size;
9037 /* If the current function calls __builtin_eh_return, then we need
9038 to allocate stack space for registers that will hold data for
9039 the exception handler. */
9040 if (current_function_calls_eh_return)
9043 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9046 /* SPE saves EH registers in 64-bits. */
9047 ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9052 /* Determine various sizes. */
9053 info_ptr->reg_size = reg_size;
9054 info_ptr->fixed_size = RS6000_SAVE_AREA;
9055 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9056 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9057 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9061 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9063 info_ptr->spe_gp_size = 0;
9065 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9067 info_ptr->vrsave_mask = compute_vrsave_mask ();
9068 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9072 info_ptr->vrsave_mask = 0;
9073 info_ptr->vrsave_size = 0;
9076 /* Calculate the offsets. */
9084 case ABI_AIX_NODESC:
9086 info_ptr->fp_save_offset = - info_ptr->fp_size;
9087 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9089 if (TARGET_ALTIVEC_ABI)
9091 info_ptr->vrsave_save_offset
9092 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9094 /* Align stack so vector save area is on a quadword boundary. */
9095 if (info_ptr->altivec_size != 0)
9096 info_ptr->altivec_padding_size
9097 = 16 - (-info_ptr->vrsave_save_offset % 16);
9099 info_ptr->altivec_padding_size = 0;
9101 info_ptr->altivec_save_offset
9102 = info_ptr->vrsave_save_offset
9103 - info_ptr->altivec_padding_size
9104 - info_ptr->altivec_size;
9106 /* Adjust for AltiVec case. */
9107 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9110 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9111 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9112 info_ptr->lr_save_offset = 2*reg_size;
9116 info_ptr->fp_save_offset = - info_ptr->fp_size;
9117 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9118 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9122 /* Align stack so SPE GPR save area is aligned on a
9123 double-word boundary. */
9124 if (info_ptr->spe_gp_size != 0)
9125 info_ptr->spe_padding_size
9126 = 8 - (-info_ptr->cr_save_offset % 8);
9128 info_ptr->spe_padding_size = 0;
9130 info_ptr->spe_gp_save_offset
9131 = info_ptr->cr_save_offset
9132 - info_ptr->spe_padding_size
9133 - info_ptr->spe_gp_size;
9135 /* Adjust for SPE case. */
9136 info_ptr->toc_save_offset
9137 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9139 else if (TARGET_ALTIVEC_ABI)
9141 info_ptr->vrsave_save_offset
9142 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9144 /* Align stack so vector save area is on a quadword boundary. */
9145 if (info_ptr->altivec_size != 0)
9146 info_ptr->altivec_padding_size
9147 = 16 - (-info_ptr->vrsave_save_offset % 16);
9149 info_ptr->altivec_padding_size = 0;
9151 info_ptr->altivec_save_offset
9152 = info_ptr->vrsave_save_offset
9153 - info_ptr->altivec_padding_size
9154 - info_ptr->altivec_size;
9156 /* Adjust for AltiVec case. */
9157 info_ptr->toc_save_offset
9158 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9161 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9162 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9163 info_ptr->lr_save_offset = reg_size;
9167 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9169 + info_ptr->altivec_size
9170 + info_ptr->altivec_padding_size
9171 + info_ptr->vrsave_size
9172 + info_ptr->spe_gp_size
9173 + info_ptr->spe_padding_size
9177 + info_ptr->vrsave_size
9178 + info_ptr->toc_size,
9179 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9182 total_raw_size = (info_ptr->vars_size
9183 + info_ptr->parm_size
9184 + info_ptr->save_size
9185 + info_ptr->varargs_size
9186 + info_ptr->fixed_size);
9188 info_ptr->total_size =
9189 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9191 /* Determine if we need to allocate any stack frame:
9193 For AIX we need to push the stack if a frame pointer is needed
9194 (because the stack might be dynamically adjusted), if we are
9195 debugging, if we make calls, or if the sum of fp_save, gp_save,
9196 and local variables are more than the space needed to save all
9197 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9198 + 18*8 = 288 (GPR13 reserved).
9200 For V.4 we don't have the stack cushion that AIX uses, but assume
9201 that the debugger can handle stackless frames. */
9203 if (info_ptr->calls_p)
9204 info_ptr->push_p = 1;
9206 else if (abi == ABI_V4)
9207 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9210 info_ptr->push_p = (frame_pointer_needed
9211 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9212 || ((total_raw_size - info_ptr->fixed_size)
9213 > (TARGET_32BIT ? 220 : 288)));
9215 /* Zero offsets if we're not saving those registers. */
9216 if (info_ptr->fp_size == 0)
9217 info_ptr->fp_save_offset = 0;
9219 if (info_ptr->gp_size == 0)
9220 info_ptr->gp_save_offset = 0;
9222 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9223 info_ptr->altivec_save_offset = 0;
9225 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9226 info_ptr->vrsave_save_offset = 0;
9228 if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9229 info_ptr->spe_gp_save_offset = 0;
9231 if (! info_ptr->lr_save_p)
9232 info_ptr->lr_save_offset = 0;
9234 if (! info_ptr->cr_save_p)
9235 info_ptr->cr_save_offset = 0;
9237 if (! info_ptr->toc_save_p)
9238 info_ptr->toc_save_offset = 0;
9244 debug_stack_info (info)
9245 rs6000_stack_t *info;
9247 const char *abi_string;
9250 info = rs6000_stack_info ();
9252 fprintf (stderr, "\nStack information for function %s:\n",
9253 ((current_function_decl && DECL_NAME (current_function_decl))
9254 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9259 default: abi_string = "Unknown"; break;
9260 case ABI_NONE: abi_string = "NONE"; break;
9262 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9263 case ABI_DARWIN: abi_string = "Darwin"; break;
9264 case ABI_V4: abi_string = "V.4"; break;
9267 fprintf (stderr, "\tABI = %5s\n", abi_string);
9269 if (TARGET_ALTIVEC_ABI)
9270 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9273 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9275 if (info->first_gp_reg_save != 32)
9276 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9278 if (info->first_fp_reg_save != 64)
9279 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9281 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9282 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9283 info->first_altivec_reg_save);
9285 if (info->lr_save_p)
9286 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9288 if (info->cr_save_p)
9289 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9291 if (info->toc_save_p)
9292 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9294 if (info->vrsave_mask)
9295 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9298 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9301 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9303 if (info->gp_save_offset)
9304 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9306 if (info->fp_save_offset)
9307 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9309 if (info->altivec_save_offset)
9310 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9311 info->altivec_save_offset);
9313 if (info->spe_gp_save_offset)
9314 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9315 info->spe_gp_save_offset);
9317 if (info->vrsave_save_offset)
9318 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9319 info->vrsave_save_offset);
9321 if (info->lr_save_offset)
9322 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9324 if (info->cr_save_offset)
9325 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9327 if (info->toc_save_offset)
9328 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9330 if (info->varargs_save_offset)
9331 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9333 if (info->total_size)
9334 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9336 if (info->varargs_size)
9337 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9339 if (info->vars_size)
9340 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9342 if (info->parm_size)
9343 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9345 if (info->fixed_size)
9346 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9349 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9351 if (info->spe_gp_size)
9352 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9355 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9357 if (info->altivec_size)
9358 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9360 if (info->vrsave_size)
9361 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9363 if (info->altivec_padding_size)
9364 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9365 info->altivec_padding_size);
9367 if (info->spe_padding_size)
9368 fprintf (stderr, "\tspe_padding_size = %5d\n",
9369 info->spe_padding_size);
9372 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9375 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9378 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9380 if (info->save_size)
9381 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9383 if (info->reg_size != 4)
9384 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9386 fprintf (stderr, "\n");
9390 rs6000_return_addr (count, frame)
9394 /* Currently we don't optimize very well between prolog and body
9395 code and for PIC code the code can be actually quite bad, so
9396 don't try to be too clever here. */
9397 if (count != 0 || flag_pic != 0)
9399 cfun->machine->ra_needs_full_frame = 1;
9406 plus_constant (copy_to_reg
9407 (gen_rtx_MEM (Pmode,
9408 memory_address (Pmode, frame))),
9409 RETURN_ADDRESS_OFFSET)));
9412 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9415 /* Say whether a function is a candidate for sibcall handling or not.
9416 We do not allow indirect calls to be optimized into sibling calls.
9417 Also, we can't do it if there are any vector parameters; there's
9418 nowhere to put the VRsave code so it works; note that functions with
9419 vector parameters are required to have a prototype, so the argument
9420 type info must be available here. (The tail recursion case can work
9421 with vector parameters, but there's no way to distinguish here.) */
9423 function_ok_for_sibcall (fndecl)
9429 if (TARGET_ALTIVEC_VRSAVE)
9431 for (type = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
9432 type; type = TREE_CHAIN (type))
9434 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9438 if (DEFAULT_ABI == ABI_DARWIN
9439 || (TREE_ASM_WRITTEN (fndecl) && !flag_pic) || !TREE_PUBLIC (fndecl))
9445 /* function rewritten to handle sibcalls */
9447 rs6000_ra_ever_killed ()
9453 #ifdef ASM_OUTPUT_MI_THUNK
9454 if (current_function_is_thunk)
9457 /* regs_ever_live has LR marked as used if any sibcalls
9458 are present. Which it is, but this should not force
9459 saving and restoring in the prologue/epilog. Likewise,
9460 reg_set_between_p thinks a sibcall clobbers LR, so
9461 that is inappropriate. */
9462 /* Also, the prologue can generate a store into LR that
9463 doesn't really count, like this:
9465 bcl to set PIC register
9468 When we're called from the epilog, we need to avoid counting
9469 this as a store; thus we ignore any insns with a REG_MAYBE_DEAD note. */
9471 push_topmost_sequence ();
9473 pop_topmost_sequence ();
9474 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9476 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9480 if (FIND_REG_INC_NOTE (insn, reg))
9482 else if (GET_CODE (insn) == CALL_INSN
9483 && !SIBLING_CALL_P (insn))
9485 else if (set_of (reg, insn) != NULL_RTX
9486 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
9493 /* Add a REG_MAYBE_DEAD note to the insn. */
9495 rs6000_maybe_dead (insn)
9498 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9503 /* Emit instructions needed to load the TOC register.
9504 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9505 a constant pool; or for SVR4 -fpic. */
9508 rs6000_emit_load_toc_table (fromprolog)
9512 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9514 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
9516 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9518 rtx temp = (fromprolog
9519 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9520 : gen_reg_rtx (Pmode));
9521 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
9522 rs6000_maybe_dead (emit_move_insn (dest, temp));
9524 else if (flag_pic == 2)
9527 rtx tempLR = (fromprolog
9528 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9529 : gen_reg_rtx (Pmode));
9530 rtx temp0 = (fromprolog
9531 ? gen_rtx_REG (Pmode, 0)
9532 : gen_reg_rtx (Pmode));
9535 /* possibly create the toc section */
9536 if (! toc_initialized)
9539 function_section (current_function_decl);
9546 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9547 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9549 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9550 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9552 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9554 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9555 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9562 static int reload_toc_labelno = 0;
9564 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9566 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9567 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9569 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
9572 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9573 rs6000_maybe_dead (emit_move_insn (temp0,
9574 gen_rtx_MEM (Pmode, dest)));
9576 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
9578 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
9580 /* This is for AIX code running in non-PIC ELF. */
9583 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9584 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9586 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
9587 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
9595 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
9597 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
9602 get_TOC_alias_set ()
9604 static int set = -1;
9606 set = new_alias_set ();
9610 /* This retuns nonzero if the current function uses the TOC. This is
9611 determined by the presence of (unspec ... 7), which is generated by
9612 the various load_toc_* patterns. */
9619 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9622 rtx pat = PATTERN (insn);
9625 if (GET_CODE (pat) == PARALLEL)
9626 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9627 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9628 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9635 create_TOC_reference (symbol)
9638 return gen_rtx_PLUS (Pmode,
9639 gen_rtx_REG (Pmode, TOC_REGISTER),
9640 gen_rtx_CONST (Pmode,
9641 gen_rtx_MINUS (Pmode, symbol,
9642 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9646 /* __throw will restore its own return address to be the same as the
9647 return address of the function that the throw is being made to.
9648 This is unfortunate, because we want to check the original
9649 return address to see if we need to restore the TOC.
9650 So we have to squirrel it away here.
9651 This is used only in compiling __throw and __rethrow.
9653 Most of this code should be removed by CSE. */
9654 static rtx insn_after_throw;
9656 /* This does the saving... */
9658 rs6000_aix_emit_builtin_unwind_init ()
9661 rtx stack_top = gen_reg_rtx (Pmode);
9662 rtx opcode_addr = gen_reg_rtx (Pmode);
9664 insn_after_throw = gen_reg_rtx (SImode);
9666 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9667 emit_move_insn (stack_top, mem);
9669 mem = gen_rtx_MEM (Pmode,
9670 gen_rtx_PLUS (Pmode, stack_top,
9671 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9672 emit_move_insn (opcode_addr, mem);
9673 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9676 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9677 in _eh.o). Only used on AIX.
9679 The idea is that on AIX, function calls look like this:
9680 bl somefunction-trampoline
9684 somefunction-trampoline:
9686 ... load function address in the count register ...
9688 or like this, if the linker determines that this is not a cross-module call
9689 and so the TOC need not be restored:
9692 or like this, if the compiler could determine that this is not a
9695 now, the tricky bit here is that register 2 is saved and restored
9696 by the _linker_, so we can't readily generate debugging information
9697 for it. So we need to go back up the call chain looking at the
9698 insns at return addresses to see which calls saved the TOC register
9699 and so see where it gets restored from.
9701 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9702 just before the actual epilogue.
9704 On the bright side, this incurs no space or time overhead unless an
9705 exception is thrown, except for the extra code in libgcc.a.
9707 The parameter STACKSIZE is a register containing (at runtime)
9708 the amount to be popped off the stack in addition to the stack frame
9709 of this routine (which will be __throw or __rethrow, and so is
9710 guaranteed to have a stack frame). */
9713 rs6000_emit_eh_toc_restore (stacksize)
9717 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9718 rtx tocompare = gen_reg_rtx (SImode);
9719 rtx opcode = gen_reg_rtx (SImode);
9720 rtx opcode_addr = gen_reg_rtx (Pmode);
9722 rtx loop_start = gen_label_rtx ();
9723 rtx no_toc_restore_needed = gen_label_rtx ();
9724 rtx loop_exit = gen_label_rtx ();
9726 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9727 set_mem_alias_set (mem, rs6000_sr_alias_set);
9728 emit_move_insn (bottom_of_stack, mem);
9730 top_of_stack = expand_binop (Pmode, add_optab,
9731 bottom_of_stack, stacksize,
9732 NULL_RTX, 1, OPTAB_WIDEN);
9734 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9735 : 0xE8410028, SImode));
9737 if (insn_after_throw == NULL_RTX)
9739 emit_move_insn (opcode, insn_after_throw);
9741 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9742 emit_label (loop_start);
9744 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9745 SImode, NULL_RTX, NULL_RTX,
9746 no_toc_restore_needed);
9748 mem = gen_rtx_MEM (Pmode,
9749 gen_rtx_PLUS (Pmode, bottom_of_stack,
9750 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9751 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9753 emit_label (no_toc_restore_needed);
9754 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9755 Pmode, NULL_RTX, NULL_RTX,
9758 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9759 set_mem_alias_set (mem, rs6000_sr_alias_set);
9760 emit_move_insn (bottom_of_stack, mem);
9762 mem = gen_rtx_MEM (Pmode,
9763 gen_rtx_PLUS (Pmode, bottom_of_stack,
9764 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9765 emit_move_insn (opcode_addr, mem);
9766 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9768 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9769 emit_jump (loop_start);
9770 emit_note (NULL, NOTE_INSN_LOOP_END);
9771 emit_label (loop_exit);
9773 #endif /* TARGET_AIX */
9775 /* This ties together stack memory (MEM with an alias set of
9776 rs6000_sr_alias_set) and the change to the stack pointer. */
9779 rs6000_emit_stack_tie ()
9781 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9783 set_mem_alias_set (mem, rs6000_sr_alias_set);
9784 emit_insn (gen_stack_tie (mem));
9787 /* Emit the correct code for allocating stack space, as insns.
9788 If COPY_R12, make sure a copy of the old frame is left in r12.
9789 The generated code may use hard register 0 as a temporary. */
9792 rs6000_emit_allocate_stack (size, copy_r12)
9797 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9798 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9799 rtx todec = GEN_INT (-size);
9801 if (current_function_limit_stack)
9803 if (REG_P (stack_limit_rtx)
9804 && REGNO (stack_limit_rtx) > 1
9805 && REGNO (stack_limit_rtx) <= 31)
9807 emit_insn (Pmode == SImode
9808 ? gen_addsi3 (tmp_reg,
9811 : gen_adddi3 (tmp_reg,
9815 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9818 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9820 && DEFAULT_ABI == ABI_V4)
9822 rtx toload = gen_rtx_CONST (VOIDmode,
9823 gen_rtx_PLUS (Pmode,
9827 emit_insn (gen_elf_high (tmp_reg, toload));
9828 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
9829 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9833 warning ("stack limit expression is not supported");
9836 if (copy_r12 || ! TARGET_UPDATE)
9837 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
9843 /* Need a note here so that try_split doesn't get confused. */
9844 if (get_last_insn() == NULL_RTX)
9845 emit_note (0, NOTE_INSN_DELETED);
9846 insn = emit_move_insn (tmp_reg, todec);
9847 try_split (PATTERN (insn), insn, 0);
9851 if (Pmode == SImode)
9852 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
9855 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
9860 if (Pmode == SImode)
9861 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
9863 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
9864 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
9865 gen_rtx_REG (Pmode, 12));
9868 RTX_FRAME_RELATED_P (insn) = 1;
9870 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9871 gen_rtx_SET (VOIDmode, stack_reg,
9872 gen_rtx_PLUS (Pmode, stack_reg,
9877 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9880 (mem (plus (blah) (regXX)))
9884 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9887 altivec_frame_fixup (insn, reg, val)
9893 real = copy_rtx (PATTERN (insn));
9895 real = replace_rtx (real, reg, GEN_INT (val));
9897 RTX_FRAME_RELATED_P (insn) = 1;
9898 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9903 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
9904 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
9905 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
9906 deduce these equivalences by itself so it wasn't necessary to hold
9907 its hand so much. */
9910 rs6000_frame_related (insn, reg, val, reg2, rreg)
9919 /* copy_rtx will not make unique copies of registers, so we need to
9920 ensure we don't have unwanted sharing here. */
9922 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9925 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9927 real = copy_rtx (PATTERN (insn));
9929 if (reg2 != NULL_RTX)
9930 real = replace_rtx (real, reg2, rreg);
9932 real = replace_rtx (real, reg,
9933 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
9934 STACK_POINTER_REGNUM),
9937 /* We expect that 'real' is either a SET or a PARALLEL containing
9938 SETs (and possibly other stuff). In a PARALLEL, all the SETs
9939 are important so they all have to be marked RTX_FRAME_RELATED_P. */
9941 if (GET_CODE (real) == SET)
9945 temp = simplify_rtx (SET_SRC (set));
9947 SET_SRC (set) = temp;
9948 temp = simplify_rtx (SET_DEST (set));
9950 SET_DEST (set) = temp;
9951 if (GET_CODE (SET_DEST (set)) == MEM)
9953 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9955 XEXP (SET_DEST (set), 0) = temp;
9958 else if (GET_CODE (real) == PARALLEL)
9961 for (i = 0; i < XVECLEN (real, 0); i++)
9962 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
9964 rtx set = XVECEXP (real, 0, i);
9966 temp = simplify_rtx (SET_SRC (set));
9968 SET_SRC (set) = temp;
9969 temp = simplify_rtx (SET_DEST (set));
9971 SET_DEST (set) = temp;
9972 if (GET_CODE (SET_DEST (set)) == MEM)
9974 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9976 XEXP (SET_DEST (set), 0) = temp;
9978 RTX_FRAME_RELATED_P (set) = 1;
9984 RTX_FRAME_RELATED_P (insn) = 1;
9985 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9990 /* Returns an insn that has a vrsave set operation with the
9991 appropriate CLOBBERs. */
9994 generate_set_vrsave (reg, info, epiloguep)
9996 rs6000_stack_t *info;
10000 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10001 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10004 = gen_rtx_SET (VOIDmode,
10006 gen_rtx_UNSPEC_VOLATILE (SImode,
10007 gen_rtvec (2, reg, vrsave),
10012 /* We need to clobber the registers in the mask so the scheduler
10013 does not move sets to VRSAVE before sets of AltiVec registers.
10015 However, if the function receives nonlocal gotos, reload will set
10016 all call saved registers live. We will end up with:
10018 (set (reg 999) (mem))
10019 (parallel [ (set (reg vrsave) (unspec blah))
10020 (clobber (reg 999))])
10022 The clobber will cause the store into reg 999 to be dead, and
10023 flow will attempt to delete an epilogue insn. In this case, we
10024 need an unspec use/set of the register. */
10026 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10027 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10029 if (!epiloguep || call_used_regs [i])
10030 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10031 gen_rtx_REG (V4SImode, i));
10034 rtx reg = gen_rtx_REG (V4SImode, i);
10037 = gen_rtx_SET (VOIDmode,
10039 gen_rtx_UNSPEC (V4SImode,
10040 gen_rtvec (1, reg), 27));
10044 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10046 for (i = 0; i < nclobs; ++i)
10047 XVECEXP (insn, 0, i) = clobs[i];
10052 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10053 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10056 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10059 enum machine_mode mode;
10060 unsigned int regno;
10064 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10065 rtx replacea, replaceb;
10067 int_rtx = GEN_INT (offset);
10069 /* Some cases that need register indexed addressing. */
10070 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10072 && SPE_VECTOR_MODE (mode)
10073 && !SPE_CONST_OFFSET_OK (offset)))
10075 /* Whomever calls us must make sure r11 is available in the
10076 flow path of instructions in the prologue. */
10077 offset_rtx = gen_rtx_REG (Pmode, 11);
10078 emit_move_insn (offset_rtx, int_rtx);
10080 replacea = offset_rtx;
10081 replaceb = int_rtx;
10085 offset_rtx = int_rtx;
10086 replacea = NULL_RTX;
10087 replaceb = NULL_RTX;
10090 reg = gen_rtx_REG (mode, regno);
10091 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10092 mem = gen_rtx_MEM (mode, addr);
10093 set_mem_alias_set (mem, rs6000_sr_alias_set);
10095 insn = emit_move_insn (mem, reg);
10097 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10100 /* Emit an offset memory reference suitable for a frame store, while
10101 converting to a valid addressing mode. */
10104 gen_frame_mem_offset (mode, reg, offset)
10105 enum machine_mode mode;
10109 rtx int_rtx, offset_rtx;
10111 int_rtx = GEN_INT (offset);
10113 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10115 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10116 emit_move_insn (offset_rtx, int_rtx);
10119 offset_rtx = int_rtx;
10121 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10124 /* Emit function prologue as insns. */
10127 rs6000_emit_prologue ()
10129 rs6000_stack_t *info = rs6000_stack_info ();
10130 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10131 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10132 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10133 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10134 rtx frame_reg_rtx = sp_reg_rtx;
10135 rtx cr_save_rtx = NULL;
10137 int saving_FPRs_inline;
10138 int using_store_multiple;
10139 HOST_WIDE_INT sp_offset = 0;
10141 if (TARGET_SPE_ABI)
10143 reg_mode = V2SImode;
10147 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10149 && info->first_gp_reg_save < 31);
10150 saving_FPRs_inline = (info->first_fp_reg_save == 64
10151 || FP_SAVE_INLINE (info->first_fp_reg_save));
10153 /* For V.4, update stack before we do any saving and set back pointer. */
10154 if (info->push_p && DEFAULT_ABI == ABI_V4)
10156 if (info->total_size < 32767)
10157 sp_offset = info->total_size;
10159 frame_reg_rtx = frame_ptr_rtx;
10160 rs6000_emit_allocate_stack (info->total_size,
10161 (frame_reg_rtx != sp_reg_rtx
10162 && (info->cr_save_p
10164 || info->first_fp_reg_save < 64
10165 || info->first_gp_reg_save < 32
10167 if (frame_reg_rtx != sp_reg_rtx)
10168 rs6000_emit_stack_tie ();
10171 /* Save AltiVec registers if needed. */
10172 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10176 /* There should be a non inline version of this, for when we
10177 are saving lots of vector registers. */
10178 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10179 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10181 rtx areg, savereg, mem;
10184 offset = info->altivec_save_offset + sp_offset
10185 + 16 * (i - info->first_altivec_reg_save);
10187 savereg = gen_rtx_REG (V4SImode, i);
10189 areg = gen_rtx_REG (Pmode, 0);
10190 emit_move_insn (areg, GEN_INT (offset));
10192 /* AltiVec addressing mode is [reg+reg]. */
10193 mem = gen_rtx_MEM (V4SImode,
10194 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10196 set_mem_alias_set (mem, rs6000_sr_alias_set);
10198 insn = emit_move_insn (mem, savereg);
10200 altivec_frame_fixup (insn, areg, offset);
10204 /* VRSAVE is a bit vector representing which AltiVec registers
10205 are used. The OS uses this to determine which vector
10206 registers to save on a context switch. We need to save
10207 VRSAVE on the stack frame, add whatever AltiVec registers we
10208 used in this function, and do the corresponding magic in the
10211 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10213 rtx reg, mem, vrsave;
10216 /* Get VRSAVE onto a GPR. */
10217 reg = gen_rtx_REG (SImode, 12);
10218 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10220 emit_insn (gen_get_vrsave_internal (reg));
10222 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10225 offset = info->vrsave_save_offset + sp_offset;
10227 = gen_rtx_MEM (SImode,
10228 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10229 set_mem_alias_set (mem, rs6000_sr_alias_set);
10230 insn = emit_move_insn (mem, reg);
10232 /* Include the registers in the mask. */
10233 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10235 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10238 /* If we use the link register, get it into r0. */
10239 if (info->lr_save_p)
10240 emit_move_insn (gen_rtx_REG (Pmode, 0),
10241 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10243 /* If we need to save CR, put it into r12. */
10244 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10246 cr_save_rtx = gen_rtx_REG (SImode, 12);
10247 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10250 /* Do any required saving of fpr's. If only one or two to save, do
10251 it ourselves. Otherwise, call function. */
10252 if (saving_FPRs_inline)
10255 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10256 if ((regs_ever_live[info->first_fp_reg_save+i]
10257 && ! call_used_regs[info->first_fp_reg_save+i]))
10258 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10259 info->first_fp_reg_save + i,
10260 info->fp_save_offset + sp_offset + 8 * i,
10263 else if (info->first_fp_reg_save != 64)
10267 const char *alloc_rname;
10269 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10271 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10272 gen_rtx_REG (Pmode,
10273 LINK_REGISTER_REGNUM));
10274 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10275 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10276 alloc_rname = ggc_strdup (rname);
10277 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10278 gen_rtx_SYMBOL_REF (Pmode,
10280 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10282 rtx addr, reg, mem;
10283 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10284 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10285 GEN_INT (info->fp_save_offset
10286 + sp_offset + 8*i));
10287 mem = gen_rtx_MEM (DFmode, addr);
10288 set_mem_alias_set (mem, rs6000_sr_alias_set);
10290 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10292 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10293 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10294 NULL_RTX, NULL_RTX);
10297 /* Save GPRs. This is done as a PARALLEL if we are using
10298 the store-multiple instructions. */
10299 if (using_store_multiple)
10303 p = rtvec_alloc (32 - info->first_gp_reg_save);
10304 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10306 rtx addr, reg, mem;
10307 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10308 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10309 GEN_INT (info->gp_save_offset
10312 mem = gen_rtx_MEM (reg_mode, addr);
10313 set_mem_alias_set (mem, rs6000_sr_alias_set);
10315 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10317 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10318 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10319 NULL_RTX, NULL_RTX);
10324 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10325 if ((regs_ever_live[info->first_gp_reg_save+i]
10326 && ! call_used_regs[info->first_gp_reg_save+i])
10327 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10328 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10329 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10331 rtx addr, reg, mem;
10332 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10334 if (TARGET_SPE_ABI)
10336 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10339 if (!SPE_CONST_OFFSET_OK (offset))
10341 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10342 emit_move_insn (b, GEN_INT (offset));
10345 b = GEN_INT (offset);
10347 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10348 mem = gen_rtx_MEM (V2SImode, addr);
10349 set_mem_alias_set (mem, rs6000_sr_alias_set);
10350 insn = emit_move_insn (mem, reg);
10352 if (GET_CODE (b) == CONST_INT)
10353 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10354 NULL_RTX, NULL_RTX);
10356 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10357 b, GEN_INT (offset));
10361 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10362 GEN_INT (info->gp_save_offset
10365 mem = gen_rtx_MEM (reg_mode, addr);
10366 set_mem_alias_set (mem, rs6000_sr_alias_set);
10368 insn = emit_move_insn (mem, reg);
10369 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10370 NULL_RTX, NULL_RTX);
10375 /* ??? There's no need to emit actual instructions here, but it's the
10376 easiest way to get the frame unwind information emitted. */
10377 if (current_function_calls_eh_return)
10379 unsigned int i, regno;
10383 regno = EH_RETURN_DATA_REGNO (i);
10384 if (regno == INVALID_REGNUM)
10387 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10388 info->ehrd_offset + sp_offset
10389 + reg_size * (int) i,
10394 /* Save lr if we used it. */
10395 if (info->lr_save_p)
10397 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10398 GEN_INT (info->lr_save_offset + sp_offset));
10399 rtx reg = gen_rtx_REG (Pmode, 0);
10400 rtx mem = gen_rtx_MEM (Pmode, addr);
10401 /* This should not be of rs6000_sr_alias_set, because of
10402 __builtin_return_address. */
10404 insn = emit_move_insn (mem, reg);
10405 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10406 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10409 /* Save CR if we use any that must be preserved. */
10410 if (info->cr_save_p)
10412 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10413 GEN_INT (info->cr_save_offset + sp_offset));
10414 rtx mem = gen_rtx_MEM (SImode, addr);
10416 set_mem_alias_set (mem, rs6000_sr_alias_set);
10418 /* If r12 was used to hold the original sp, copy cr into r0 now
10420 if (REGNO (frame_reg_rtx) == 12)
10422 cr_save_rtx = gen_rtx_REG (SImode, 0);
10423 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10425 insn = emit_move_insn (mem, cr_save_rtx);
10427 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10428 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10429 OK. All we have to do is specify that _one_ condition code
10430 register is saved in this stack slot. The thrower's epilogue
10431 will then restore all the call-saved registers.
10432 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10433 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10434 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10437 /* Update stack and set back pointer unless this is V.4,
10438 for which it was done previously. */
10439 if (info->push_p && DEFAULT_ABI != ABI_V4)
10440 rs6000_emit_allocate_stack (info->total_size, FALSE);
10442 /* Set frame pointer, if needed. */
10443 if (frame_pointer_needed)
10445 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10447 RTX_FRAME_RELATED_P (insn) = 1;
10450 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10451 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10452 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10453 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10455 /* If emit_load_toc_table will use the link register, we need to save
10456 it. We use R11 for this purpose because emit_load_toc_table
10457 can use register 0. This allows us to use a plain 'blr' to return
10458 from the procedure more often. */
10459 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10460 && ! info->lr_save_p
10461 && EXIT_BLOCK_PTR->pred != NULL);
10462 if (save_LR_around_toc_setup)
10463 emit_move_insn (gen_rtx_REG (Pmode, 11),
10464 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10466 rs6000_emit_load_toc_table (TRUE);
10468 if (save_LR_around_toc_setup)
10469 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10470 gen_rtx_REG (Pmode, 11));
10473 if (DEFAULT_ABI == ABI_DARWIN
10474 && flag_pic && current_function_uses_pic_offset_table)
10476 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10478 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
10480 rs6000_maybe_dead (
10481 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10482 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10486 /* Write function prologue. */
10489 rs6000_output_function_prologue (file, size)
10491 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10493 rs6000_stack_t *info = rs6000_stack_info ();
10495 if (TARGET_DEBUG_STACK)
10496 debug_stack_info (info);
10498 /* Write .extern for any function we will call to save and restore
10500 if (info->first_fp_reg_save < 64
10501 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10502 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10503 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10504 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10505 RESTORE_FP_SUFFIX);
10507 /* Write .extern for AIX common mode routines, if needed. */
10508 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10510 fputs ("\t.extern __mulh\n", file);
10511 fputs ("\t.extern __mull\n", file);
10512 fputs ("\t.extern __divss\n", file);
10513 fputs ("\t.extern __divus\n", file);
10514 fputs ("\t.extern __quoss\n", file);
10515 fputs ("\t.extern __quous\n", file);
10516 common_mode_defined = 1;
10519 if (! HAVE_prologue)
10523 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10524 the "toplevel" insn chain. */
10525 emit_note (0, NOTE_INSN_DELETED);
10526 rs6000_emit_prologue ();
10527 emit_note (0, NOTE_INSN_DELETED);
10529 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10533 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10535 INSN_ADDRESSES_NEW (insn, addr);
10540 if (TARGET_DEBUG_STACK)
10541 debug_rtx_list (get_insns (), 100);
10542 final (get_insns (), file, FALSE, FALSE);
10546 rs6000_pic_labelno++;
10549 /* Emit function epilogue as insns.
10551 At present, dwarf2out_frame_debug_expr doesn't understand
10552 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10553 anywhere in the epilogue. Most of the insns below would in any case
10554 need special notes to explain where r11 is in relation to the stack. */
10557 rs6000_emit_epilogue (sibcall)
10560 rs6000_stack_t *info;
10561 int restoring_FPRs_inline;
10562 int using_load_multiple;
10563 int using_mfcr_multiple;
10564 int use_backchain_to_restore_sp;
10566 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10567 rtx frame_reg_rtx = sp_reg_rtx;
10568 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10569 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10572 if (TARGET_SPE_ABI)
10574 reg_mode = V2SImode;
10578 info = rs6000_stack_info ();
10579 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10581 && info->first_gp_reg_save < 31);
10582 restoring_FPRs_inline = (sibcall
10583 || current_function_calls_eh_return
10584 || info->first_fp_reg_save == 64
10585 || FP_SAVE_INLINE (info->first_fp_reg_save));
10586 use_backchain_to_restore_sp = (frame_pointer_needed
10587 || current_function_calls_alloca
10588 || info->total_size > 32767);
10589 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10590 || rs6000_cpu == PROCESSOR_PPC603
10591 || rs6000_cpu == PROCESSOR_PPC750
10594 /* If we have a frame pointer, a call to alloca, or a large stack
10595 frame, restore the old stack pointer using the backchain. Otherwise,
10596 we know what size to update it with. */
10597 if (use_backchain_to_restore_sp)
10599 /* Under V.4, don't reset the stack pointer until after we're done
10600 loading the saved registers. */
10601 if (DEFAULT_ABI == ABI_V4)
10602 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10604 emit_move_insn (frame_reg_rtx,
10605 gen_rtx_MEM (Pmode, sp_reg_rtx));
10608 else if (info->push_p)
10610 if (DEFAULT_ABI == ABI_V4)
10611 sp_offset = info->total_size;
10614 emit_insn (TARGET_32BIT
10615 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10616 GEN_INT (info->total_size))
10617 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10618 GEN_INT (info->total_size)));
10622 /* Restore AltiVec registers if needed. */
10623 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10627 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10628 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10630 rtx addr, areg, mem;
10632 areg = gen_rtx_REG (Pmode, 0);
10634 (areg, GEN_INT (info->altivec_save_offset
10636 + 16 * (i - info->first_altivec_reg_save)));
10638 /* AltiVec addressing mode is [reg+reg]. */
10639 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10640 mem = gen_rtx_MEM (V4SImode, addr);
10641 set_mem_alias_set (mem, rs6000_sr_alias_set);
10643 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10647 /* Restore VRSAVE if needed. */
10648 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10650 rtx addr, mem, reg;
10652 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10653 GEN_INT (info->vrsave_save_offset + sp_offset));
10654 mem = gen_rtx_MEM (SImode, addr);
10655 set_mem_alias_set (mem, rs6000_sr_alias_set);
10656 reg = gen_rtx_REG (SImode, 12);
10657 emit_move_insn (reg, mem);
10659 emit_insn (generate_set_vrsave (reg, info, 1));
10662 /* Get the old lr if we saved it. */
10663 if (info->lr_save_p)
10665 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10666 info->lr_save_offset + sp_offset);
10668 set_mem_alias_set (mem, rs6000_sr_alias_set);
10670 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10673 /* Get the old cr if we saved it. */
10674 if (info->cr_save_p)
10676 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10677 GEN_INT (info->cr_save_offset + sp_offset));
10678 rtx mem = gen_rtx_MEM (SImode, addr);
10680 set_mem_alias_set (mem, rs6000_sr_alias_set);
10682 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10685 /* Set LR here to try to overlap restores below. */
10686 if (info->lr_save_p)
10687 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10688 gen_rtx_REG (Pmode, 0));
10690 /* Load exception handler data registers, if needed. */
10691 if (current_function_calls_eh_return)
10693 unsigned int i, regno;
10699 regno = EH_RETURN_DATA_REGNO (i);
10700 if (regno == INVALID_REGNUM)
10703 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10704 info->ehrd_offset + sp_offset
10705 + reg_size * (int) i);
10706 set_mem_alias_set (mem, rs6000_sr_alias_set);
10708 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10712 /* Restore GPRs. This is done as a PARALLEL if we are using
10713 the load-multiple instructions. */
10714 if (using_load_multiple)
10717 p = rtvec_alloc (32 - info->first_gp_reg_save);
10718 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10720 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10721 GEN_INT (info->gp_save_offset
10724 rtx mem = gen_rtx_MEM (reg_mode, addr);
10726 set_mem_alias_set (mem, rs6000_sr_alias_set);
10729 gen_rtx_SET (VOIDmode,
10730 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10733 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10736 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10737 if ((regs_ever_live[info->first_gp_reg_save+i]
10738 && ! call_used_regs[info->first_gp_reg_save+i])
10739 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10740 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10741 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10743 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10744 GEN_INT (info->gp_save_offset
10747 rtx mem = gen_rtx_MEM (reg_mode, addr);
10749 /* Restore 64-bit quantities for SPE. */
10750 if (TARGET_SPE_ABI)
10752 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10755 if (!SPE_CONST_OFFSET_OK (offset))
10757 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10758 emit_move_insn (b, GEN_INT (offset));
10761 b = GEN_INT (offset);
10763 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10764 mem = gen_rtx_MEM (V2SImode, addr);
10767 set_mem_alias_set (mem, rs6000_sr_alias_set);
10769 emit_move_insn (gen_rtx_REG (reg_mode,
10770 info->first_gp_reg_save + i), mem);
10773 /* Restore fpr's if we need to do it without calling a function. */
10774 if (restoring_FPRs_inline)
10775 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10776 if ((regs_ever_live[info->first_fp_reg_save+i]
10777 && ! call_used_regs[info->first_fp_reg_save+i]))
10780 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10781 GEN_INT (info->fp_save_offset
10784 mem = gen_rtx_MEM (DFmode, addr);
10785 set_mem_alias_set (mem, rs6000_sr_alias_set);
10787 emit_move_insn (gen_rtx_REG (DFmode,
10788 info->first_fp_reg_save + i),
10792 /* If we saved cr, restore it here. Just those that were used. */
10793 if (info->cr_save_p)
10795 rtx r12_rtx = gen_rtx_REG (SImode, 12);
10798 if (using_mfcr_multiple)
10800 for (i = 0; i < 8; i++)
10801 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10807 if (using_mfcr_multiple && count > 1)
10812 p = rtvec_alloc (count);
10815 for (i = 0; i < 8; i++)
10816 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10818 rtvec r = rtvec_alloc (2);
10819 RTVEC_ELT (r, 0) = r12_rtx;
10820 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
10821 RTVEC_ELT (p, ndx) =
10822 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
10823 gen_rtx_UNSPEC (CCmode, r, 20));
10826 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10831 for (i = 0; i < 8; i++)
10832 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10834 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
10840 /* If this is V.4, unwind the stack pointer after all of the loads
10841 have been done. We need to emit a block here so that sched
10842 doesn't decide to move the sp change before the register restores
10843 (which may not have any obvious dependency on the stack). This
10844 doesn't hurt performance, because there is no scheduling that can
10845 be done after this point. */
10846 if (DEFAULT_ABI == ABI_V4)
10848 if (frame_reg_rtx != sp_reg_rtx)
10849 rs6000_emit_stack_tie ();
10851 if (use_backchain_to_restore_sp)
10853 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
10855 else if (sp_offset != 0)
10857 emit_insn (Pmode == SImode
10858 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10859 GEN_INT (sp_offset))
10860 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10861 GEN_INT (sp_offset)));
10865 if (current_function_calls_eh_return)
10867 rtx sa = EH_RETURN_STACKADJ_RTX;
10868 emit_insn (Pmode == SImode
10869 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
10870 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
10876 if (! restoring_FPRs_inline)
10877 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
10879 p = rtvec_alloc (2);
10881 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
10882 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10883 gen_rtx_REG (Pmode,
10884 LINK_REGISTER_REGNUM));
10886 /* If we have to restore more than two FP registers, branch to the
10887 restore function. It will return to our caller. */
10888 if (! restoring_FPRs_inline)
10892 const char *alloc_rname;
10894 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
10895 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
10896 alloc_rname = ggc_strdup (rname);
10897 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
10898 gen_rtx_SYMBOL_REF (Pmode,
10901 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10904 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
10905 GEN_INT (info->fp_save_offset + 8*i));
10906 mem = gen_rtx_MEM (DFmode, addr);
10907 set_mem_alias_set (mem, rs6000_sr_alias_set);
10909 RTVEC_ELT (p, i+3) =
10910 gen_rtx_SET (VOIDmode,
10911 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
10916 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
10920 /* Write function epilogue. */
10923 rs6000_output_function_epilogue (file, size)
10925 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10927 rs6000_stack_t *info = rs6000_stack_info ();
10929 if (! HAVE_epilogue)
10931 rtx insn = get_last_insn ();
10932 /* If the last insn was a BARRIER, we don't have to write anything except
10933 the trace table. */
10934 if (GET_CODE (insn) == NOTE)
10935 insn = prev_nonnote_insn (insn);
10936 if (insn == 0 || GET_CODE (insn) != BARRIER)
10938 /* This is slightly ugly, but at least we don't have two
10939 copies of the epilogue-emitting code. */
10942 /* A NOTE_INSN_DELETED is supposed to be at the start
10943 and end of the "toplevel" insn chain. */
10944 emit_note (0, NOTE_INSN_DELETED);
10945 rs6000_emit_epilogue (FALSE);
10946 emit_note (0, NOTE_INSN_DELETED);
10948 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10952 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10954 INSN_ADDRESSES_NEW (insn, addr);
10959 if (TARGET_DEBUG_STACK)
10960 debug_rtx_list (get_insns (), 100);
10961 final (get_insns (), file, FALSE, FALSE);
10966 /* Output a traceback table here. See /usr/include/sys/debug.h for info
10969 We don't output a traceback table if -finhibit-size-directive was
10970 used. The documentation for -finhibit-size-directive reads
10971 ``don't output a @code{.size} assembler directive, or anything
10972 else that would cause trouble if the function is split in the
10973 middle, and the two halves are placed at locations far apart in
10974 memory.'' The traceback table has this property, since it
10975 includes the offset from the start of the function to the
10976 traceback table itself.
10978 System V.4 Powerpc's (and the embedded ABI derived from it) use a
10979 different traceback table. */
10980 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
10981 && rs6000_traceback != traceback_none)
10983 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10984 const char *language_string = lang_hooks.name;
10985 int fixed_parms = 0, float_parms = 0, parm_info = 0;
10987 int optional_tbtab;
10989 if (rs6000_traceback == traceback_full)
10990 optional_tbtab = 1;
10991 else if (rs6000_traceback == traceback_part)
10992 optional_tbtab = 0;
10994 optional_tbtab = !optimize_size && !TARGET_ELF;
10996 while (*fname == '.') /* V.4 encodes . in the name */
10999 /* Need label immediately before tbtab, so we can compute its offset
11000 from the function start. */
11003 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11004 ASM_OUTPUT_LABEL (file, fname);
11006 /* The .tbtab pseudo-op can only be used for the first eight
11007 expressions, since it can't handle the possibly variable
11008 length fields that follow. However, if you omit the optional
11009 fields, the assembler outputs zeros for all optional fields
11010 anyways, giving each variable length field is minimum length
11011 (as defined in sys/debug.h). Thus we can not use the .tbtab
11012 pseudo-op at all. */
11014 /* An all-zero word flags the start of the tbtab, for debuggers
11015 that have to find it by searching forward from the entry
11016 point or from the current pc. */
11017 fputs ("\t.long 0\n", file);
11019 /* Tbtab format type. Use format type 0. */
11020 fputs ("\t.byte 0,", file);
11022 /* Language type. Unfortunately, there doesn't seem to be any
11023 official way to get this info, so we use language_string. C
11024 is 0. C++ is 9. No number defined for Obj-C, so use the
11025 value for C for now. There is no official value for Java,
11026 although IBM appears to be using 13. There is no official value
11027 for Chill, so we've chosen 44 pseudo-randomly. */
11028 if (! strcmp (language_string, "GNU C")
11029 || ! strcmp (language_string, "GNU Objective-C"))
11031 else if (! strcmp (language_string, "GNU F77"))
11033 else if (! strcmp (language_string, "GNU Ada"))
11035 else if (! strcmp (language_string, "GNU Pascal"))
11037 else if (! strcmp (language_string, "GNU C++"))
11039 else if (! strcmp (language_string, "GNU Java"))
11041 else if (! strcmp (language_string, "GNU CHILL"))
11045 fprintf (file, "%d,", i);
11047 /* 8 single bit fields: global linkage (not set for C extern linkage,
11048 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11049 from start of procedure stored in tbtab, internal function, function
11050 has controlled storage, function has no toc, function uses fp,
11051 function logs/aborts fp operations. */
11052 /* Assume that fp operations are used if any fp reg must be saved. */
11053 fprintf (file, "%d,",
11054 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11056 /* 6 bitfields: function is interrupt handler, name present in
11057 proc table, function calls alloca, on condition directives
11058 (controls stack walks, 3 bits), saves condition reg, saves
11060 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11061 set up as a frame pointer, even when there is no alloca call. */
11062 fprintf (file, "%d,",
11063 ((optional_tbtab << 6)
11064 | ((optional_tbtab & frame_pointer_needed) << 5)
11065 | (info->cr_save_p << 1)
11066 | (info->lr_save_p)));
11068 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11070 fprintf (file, "%d,",
11071 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11073 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11074 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11076 if (optional_tbtab)
11078 /* Compute the parameter info from the function decl argument
11081 int next_parm_info_bit = 31;
11083 for (decl = DECL_ARGUMENTS (current_function_decl);
11084 decl; decl = TREE_CHAIN (decl))
11086 rtx parameter = DECL_INCOMING_RTL (decl);
11087 enum machine_mode mode = GET_MODE (parameter);
11089 if (GET_CODE (parameter) == REG)
11091 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11097 if (mode == SFmode)
11099 else if (mode == DFmode)
11104 /* If only one bit will fit, don't or in this entry. */
11105 if (next_parm_info_bit > 0)
11106 parm_info |= (bits << (next_parm_info_bit - 1));
11107 next_parm_info_bit -= 2;
11111 fixed_parms += ((GET_MODE_SIZE (mode)
11112 + (UNITS_PER_WORD - 1))
11114 next_parm_info_bit -= 1;
11120 /* Number of fixed point parameters. */
11121 /* This is actually the number of words of fixed point parameters; thus
11122 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11123 fprintf (file, "%d,", fixed_parms);
11125 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11127 /* This is actually the number of fp registers that hold parameters;
11128 and thus the maximum value is 13. */
11129 /* Set parameters on stack bit if parameters are not in their original
11130 registers, regardless of whether they are on the stack? Xlc
11131 seems to set the bit when not optimizing. */
11132 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11134 if (! optional_tbtab)
11137 /* Optional fields follow. Some are variable length. */
11139 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11140 11 double float. */
11141 /* There is an entry for each parameter in a register, in the order that
11142 they occur in the parameter list. Any intervening arguments on the
11143 stack are ignored. If the list overflows a long (max possible length
11144 34 bits) then completely leave off all elements that don't fit. */
11145 /* Only emit this long if there was at least one parameter. */
11146 if (fixed_parms || float_parms)
11147 fprintf (file, "\t.long %d\n", parm_info);
11149 /* Offset from start of code to tb table. */
11150 fputs ("\t.long ", file);
11151 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11153 RS6000_OUTPUT_BASENAME (file, fname);
11155 assemble_name (file, fname);
11157 fputs ("-.", file);
11159 RS6000_OUTPUT_BASENAME (file, fname);
11161 assemble_name (file, fname);
11165 /* Interrupt handler mask. */
11166 /* Omit this long, since we never set the interrupt handler bit
11169 /* Number of CTL (controlled storage) anchors. */
11170 /* Omit this long, since the has_ctl bit is never set above. */
11172 /* Displacement into stack of each CTL anchor. */
11173 /* Omit this list of longs, because there are no CTL anchors. */
11175 /* Length of function name. */
11176 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11178 /* Function name. */
11179 assemble_string (fname, strlen (fname));
11181 /* Register for alloca automatic storage; this is always reg 31.
11182 Only emit this if the alloca bit was set above. */
11183 if (frame_pointer_needed)
11184 fputs ("\t.byte 31\n", file);
11186 fputs ("\t.align 2\n", file);
11190 /* A C compound statement that outputs the assembler code for a thunk
11191 function, used to implement C++ virtual function calls with
11192 multiple inheritance. The thunk acts as a wrapper around a virtual
11193 function, adjusting the implicit object parameter before handing
11194 control off to the real function.
11196 First, emit code to add the integer DELTA to the location that
11197 contains the incoming first argument. Assume that this argument
11198 contains a pointer, and is the one used to pass the `this' pointer
11199 in C++. This is the incoming argument *before* the function
11200 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11201 values of all other incoming arguments.
11203 After the addition, emit code to jump to FUNCTION, which is a
11204 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11205 not touch the return address. Hence returning from FUNCTION will
11206 return to whoever called the current `thunk'.
11208 The effect must be as if FUNCTION had been called directly with the
11209 adjusted first argument. This macro is responsible for emitting
11210 all of the code for a thunk function; output_function_prologue()
11211 and output_function_epilogue() are not invoked.
11213 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11214 been extracted from it.) It might possibly be useful on some
11215 targets, but probably not.
11217 If you do not define this macro, the target-independent code in the
11218 C++ frontend will generate a less efficient heavyweight thunk that
11219 calls FUNCTION instead of jumping to it. The generic approach does
11220 not support varargs. */
11223 output_mi_thunk (file, thunk_fndecl, delta, function)
11225 tree thunk_fndecl ATTRIBUTE_UNUSED;
11229 const char *this_reg =
11230 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
11231 const char *prefix;
11233 const char *r0 = reg_names[0];
11234 const char *toc = reg_names[2];
11235 const char *schain = reg_names[11];
11236 const char *r12 = reg_names[12];
11238 static int labelno = 0;
11240 /* Small constants that can be done by one add instruction. */
11241 if (delta >= -32768 && delta <= 32767)
11243 if (! TARGET_NEW_MNEMONICS)
11244 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
11246 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
11249 /* Large constants that can be done by one addis instruction. */
11250 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
11251 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11254 /* 32-bit constants that can be done by an add and addis instruction. */
11255 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
11257 /* Break into two pieces, propagating the sign bit from the low
11258 word to the upper word. */
11259 int delta_high = delta >> 16;
11260 int delta_low = delta & 0xffff;
11261 if ((delta_low & 0x8000) != 0)
11264 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
11267 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11270 if (! TARGET_NEW_MNEMONICS)
11271 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
11273 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
11276 /* 64-bit constants, fixme */
11280 /* Get the prefix in front of the names. */
11281 switch (DEFAULT_ABI)
11291 case ABI_AIX_NODESC:
11296 /* If the function is compiled in this module, jump to it directly.
11297 Otherwise, load up its address and jump to it. */
11299 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
11301 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
11302 && (! lookup_attribute ("longcall",
11303 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11304 || lookup_attribute ("shortcall",
11305 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11308 fprintf (file, "\tb %s", prefix);
11309 assemble_name (file, fname);
11310 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
11316 switch (DEFAULT_ABI)
11322 /* Set up a TOC entry for the function. */
11323 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
11325 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
11328 if (TARGET_MINIMAL_TOC)
11329 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11332 fputs ("\t.tc ", file);
11333 assemble_name (file, fname);
11334 fputs ("[TC],", file);
11336 assemble_name (file, fname);
11339 function_section (current_function_decl);
11342 if (TARGET_MINIMAL_TOC)
11343 asm_fprintf (file, (TARGET_32BIT)
11344 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
11345 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
11346 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
11347 assemble_name (file, buf);
11348 if (TARGET_ELF && TARGET_MINIMAL_TOC)
11349 fputs ("-(.LCTOC1)", file);
11350 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
11352 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11356 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11359 asm_fprintf (file, "\tmtctr %s\n", r0);
11361 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11364 asm_fprintf (file, "\tbctr\n");
11367 case ABI_AIX_NODESC:
11369 fprintf (file, "\tb %s", prefix);
11370 assemble_name (file, fname);
11371 if (flag_pic) fputs ("@plt", file);
11377 fprintf (file, "\tb %s", prefix);
11378 if (flag_pic && !machopic_name_defined_p (fname))
11379 assemble_name (file, machopic_stub_name (fname));
11381 assemble_name (file, fname);
11390 /* A quick summary of the various types of 'constant-pool tables'
11393 Target Flags Name One table per
11394 AIX (none) AIX TOC object file
11395 AIX -mfull-toc AIX TOC object file
11396 AIX -mminimal-toc AIX minimal TOC translation unit
11397 SVR4/EABI (none) SVR4 SDATA object file
11398 SVR4/EABI -fpic SVR4 pic object file
11399 SVR4/EABI -fPIC SVR4 PIC translation unit
11400 SVR4/EABI -mrelocatable EABI TOC function
11401 SVR4/EABI -maix AIX TOC object file
11402 SVR4/EABI -maix -mminimal-toc
11403 AIX minimal TOC translation unit
11405 Name Reg. Set by entries contains:
11406 made by addrs? fp? sum?
11408 AIX TOC 2 crt0 as Y option option
11409 AIX minimal TOC 30 prolog gcc Y Y option
11410 SVR4 SDATA 13 crt0 gcc N Y N
11411 SVR4 pic 30 prolog ld Y not yet N
11412 SVR4 PIC 30 prolog gcc Y option option
11413 EABI TOC 30 prolog gcc Y option option
11417 /* Hash table stuff for keeping track of TOC entries. */
11419 struct toc_hash_struct
11421 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11422 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
11424 enum machine_mode key_mode;
11428 static htab_t toc_hash_table;
11430 /* Hash functions for the hash table. */
11433 rs6000_hash_constant (k)
11436 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
11437 const char *format = GET_RTX_FORMAT (GET_CODE (k));
11438 int flen = strlen (format);
11441 if (GET_CODE (k) == LABEL_REF)
11442 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11444 if (GET_CODE (k) == CODE_LABEL)
11449 for (; fidx < flen; fidx++)
11450 switch (format[fidx])
11455 const char *str = XSTR (k, fidx);
11456 len = strlen (str);
11457 result = result * 613 + len;
11458 for (i = 0; i < len; i++)
11459 result = result * 613 + (unsigned) str[i];
11464 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11468 result = result * 613 + (unsigned) XINT (k, fidx);
11471 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11472 result = result * 613 + (unsigned) XWINT (k, fidx);
11476 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11477 result = result * 613 + (unsigned) (XWINT (k, fidx)
11488 toc_hash_function (hash_entry)
11489 const void * hash_entry;
11491 const struct toc_hash_struct *thc =
11492 (const struct toc_hash_struct *) hash_entry;
11493 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11496 /* Compare H1 and H2 for equivalence. */
11499 toc_hash_eq (h1, h2)
11503 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11504 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11506 if (((const struct toc_hash_struct *) h1)->key_mode
11507 != ((const struct toc_hash_struct *) h2)->key_mode)
11510 return rtx_equal_p (r1, r2);
11513 /* Mark the hash table-entry HASH_ENTRY. */
11516 toc_hash_mark_entry (hash_slot, unused)
11518 void * unused ATTRIBUTE_UNUSED;
11520 const struct toc_hash_struct * hash_entry =
11521 *(const struct toc_hash_struct **) hash_slot;
11522 rtx r = hash_entry->key;
11523 ggc_set_mark (hash_entry);
11524 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
11525 if (GET_CODE (r) == LABEL_REF)
11528 ggc_set_mark (XEXP (r, 0));
11535 /* Mark all the elements of the TOC hash-table *HT. */
11538 toc_hash_mark_table (vht)
11543 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
11546 /* These are the names given by the C++ front-end to vtables, and
11547 vtable-like objects. Ideally, this logic should not be here;
11548 instead, there should be some programmatic way of inquiring as
11549 to whether or not an object is a vtable. */
11551 #define VTABLE_NAME_P(NAME) \
11552 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11553 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11554 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11555 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11558 rs6000_output_symbol_ref (file, x)
11562 /* Currently C++ toc references to vtables can be emitted before it
11563 is decided whether the vtable is public or private. If this is
11564 the case, then the linker will eventually complain that there is
11565 a reference to an unknown section. Thus, for vtables only,
11566 we emit the TOC reference to reference the symbol and not the
11568 const char *name = XSTR (x, 0);
11570 if (VTABLE_NAME_P (name))
11572 RS6000_OUTPUT_BASENAME (file, name);
11575 assemble_name (file, name);
11578 /* Output a TOC entry. We derive the entry name from what is being
11582 output_toc (file, x, labelno, mode)
11586 enum machine_mode mode;
11589 const char *name = buf;
11590 const char *real_name;
11597 /* When the linker won't eliminate them, don't output duplicate
11598 TOC entries (this happens on AIX if there is any kind of TOC,
11599 and on SVR4 under -fPIC or -mrelocatable). */
11602 struct toc_hash_struct *h;
11605 h = ggc_alloc (sizeof (*h));
11607 h->key_mode = mode;
11608 h->labelno = labelno;
11610 found = htab_find_slot (toc_hash_table, h, 1);
11611 if (*found == NULL)
11613 else /* This is indeed a duplicate.
11614 Set this label equal to that label. */
11616 fputs ("\t.set ", file);
11617 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11618 fprintf (file, "%d,", labelno);
11619 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11620 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11626 /* If we're going to put a double constant in the TOC, make sure it's
11627 aligned properly when strict alignment is on. */
11628 if (GET_CODE (x) == CONST_DOUBLE
11629 && STRICT_ALIGNMENT
11630 && GET_MODE_BITSIZE (mode) >= 64
11631 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11632 ASM_OUTPUT_ALIGN (file, 3);
11635 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
11637 /* Handle FP constants specially. Note that if we have a minimal
11638 TOC, things we put here aren't actually in the TOC, so we can allow
11640 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11642 REAL_VALUE_TYPE rv;
11645 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11646 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11650 if (TARGET_MINIMAL_TOC)
11651 fputs (DOUBLE_INT_ASM_OP, file);
11653 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11654 k[0] & 0xffffffff, k[1] & 0xffffffff);
11655 fprintf (file, "0x%lx%08lx\n",
11656 k[0] & 0xffffffff, k[1] & 0xffffffff);
11661 if (TARGET_MINIMAL_TOC)
11662 fputs ("\t.long ", file);
11664 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11665 k[0] & 0xffffffff, k[1] & 0xffffffff);
11666 fprintf (file, "0x%lx,0x%lx\n",
11667 k[0] & 0xffffffff, k[1] & 0xffffffff);
11671 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11673 REAL_VALUE_TYPE rv;
11676 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11677 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11681 if (TARGET_MINIMAL_TOC)
11682 fputs (DOUBLE_INT_ASM_OP, file);
11684 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11685 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11690 if (TARGET_MINIMAL_TOC)
11691 fputs ("\t.long ", file);
11693 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11694 fprintf (file, "0x%lx\n", l & 0xffffffff);
11698 else if (GET_MODE (x) == VOIDmode
11699 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11701 unsigned HOST_WIDE_INT low;
11702 HOST_WIDE_INT high;
11704 if (GET_CODE (x) == CONST_DOUBLE)
11706 low = CONST_DOUBLE_LOW (x);
11707 high = CONST_DOUBLE_HIGH (x);
11710 #if HOST_BITS_PER_WIDE_INT == 32
11713 high = (low & 0x80000000) ? ~0 : 0;
11717 low = INTVAL (x) & 0xffffffff;
11718 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11722 /* TOC entries are always Pmode-sized, but since this
11723 is a bigendian machine then if we're putting smaller
11724 integer constants in the TOC we have to pad them.
11725 (This is still a win over putting the constants in
11726 a separate constant pool, because then we'd have
11727 to have both a TOC entry _and_ the actual constant.)
11729 For a 32-bit target, CONST_INT values are loaded and shifted
11730 entirely within `low' and can be stored in one TOC entry. */
11732 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11733 abort ();/* It would be easy to make this work, but it doesn't now. */
11735 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11737 #if HOST_BITS_PER_WIDE_INT == 32
11738 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11739 POINTER_SIZE, &low, &high, 0);
11742 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
11743 high = (HOST_WIDE_INT) low >> 32;
11750 if (TARGET_MINIMAL_TOC)
11751 fputs (DOUBLE_INT_ASM_OP, file);
11753 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11754 (long) high & 0xffffffff, (long) low & 0xffffffff);
11755 fprintf (file, "0x%lx%08lx\n",
11756 (long) high & 0xffffffff, (long) low & 0xffffffff);
11761 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11763 if (TARGET_MINIMAL_TOC)
11764 fputs ("\t.long ", file);
11766 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11767 (long) high & 0xffffffff, (long) low & 0xffffffff);
11768 fprintf (file, "0x%lx,0x%lx\n",
11769 (long) high & 0xffffffff, (long) low & 0xffffffff);
11773 if (TARGET_MINIMAL_TOC)
11774 fputs ("\t.long ", file);
11776 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11777 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11783 if (GET_CODE (x) == CONST)
11785 if (GET_CODE (XEXP (x, 0)) != PLUS)
11788 base = XEXP (XEXP (x, 0), 0);
11789 offset = INTVAL (XEXP (XEXP (x, 0), 1));
11792 if (GET_CODE (base) == SYMBOL_REF)
11793 name = XSTR (base, 0);
11794 else if (GET_CODE (base) == LABEL_REF)
11795 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
11796 else if (GET_CODE (base) == CODE_LABEL)
11797 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
11801 real_name = (*targetm.strip_name_encoding) (name);
11802 if (TARGET_MINIMAL_TOC)
11803 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11806 fprintf (file, "\t.tc %s", real_name);
11809 fprintf (file, ".N%d", - offset);
11811 fprintf (file, ".P%d", offset);
11813 fputs ("[TC],", file);
11816 /* Currently C++ toc references to vtables can be emitted before it
11817 is decided whether the vtable is public or private. If this is
11818 the case, then the linker will eventually complain that there is
11819 a TOC reference to an unknown section. Thus, for vtables only,
11820 we emit the TOC reference to reference the symbol and not the
11822 if (VTABLE_NAME_P (name))
11824 RS6000_OUTPUT_BASENAME (file, name);
11826 fprintf (file, "%d", offset);
11827 else if (offset > 0)
11828 fprintf (file, "+%d", offset);
11831 output_addr_const (file, x);
11835 /* Output an assembler pseudo-op to write an ASCII string of N characters
11836 starting at P to FILE.
11838 On the RS/6000, we have to do this using the .byte operation and
11839 write out special characters outside the quoted string.
11840 Also, the assembler is broken; very long strings are truncated,
11841 so we must artificially break them up early. */
11844 output_ascii (file, p, n)
11850 int i, count_string;
11851 const char *for_string = "\t.byte \"";
11852 const char *for_decimal = "\t.byte ";
11853 const char *to_close = NULL;
11856 for (i = 0; i < n; i++)
11859 if (c >= ' ' && c < 0177)
11862 fputs (for_string, file);
11865 /* Write two quotes to get one. */
11873 for_decimal = "\"\n\t.byte ";
11877 if (count_string >= 512)
11879 fputs (to_close, file);
11881 for_string = "\t.byte \"";
11882 for_decimal = "\t.byte ";
11890 fputs (for_decimal, file);
11891 fprintf (file, "%d", c);
11893 for_string = "\n\t.byte \"";
11894 for_decimal = ", ";
11900 /* Now close the string if we have written one. Then end the line. */
11902 fputs (to_close, file);
11905 /* Generate a unique section name for FILENAME for a section type
11906 represented by SECTION_DESC. Output goes into BUF.
11908 SECTION_DESC can be any string, as long as it is different for each
11909 possible section type.
11911 We name the section in the same manner as xlc. The name begins with an
11912 underscore followed by the filename (after stripping any leading directory
11913 names) with the last period replaced by the string SECTION_DESC. If
11914 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11918 rs6000_gen_section_name (buf, filename, section_desc)
11920 const char *filename;
11921 const char *section_desc;
11923 const char *q, *after_last_slash, *last_period = 0;
11927 after_last_slash = filename;
11928 for (q = filename; *q; q++)
11931 after_last_slash = q + 1;
11932 else if (*q == '.')
11936 len = strlen (after_last_slash) + strlen (section_desc) + 2;
11937 *buf = (char *) xmalloc (len);
11942 for (q = after_last_slash; *q; q++)
11944 if (q == last_period)
11946 strcpy (p, section_desc);
11947 p += strlen (section_desc);
11950 else if (ISALNUM (*q))
11954 if (last_period == 0)
11955 strcpy (p, section_desc);
11960 /* Emit profile function. */
11963 output_profile_hook (labelno)
11964 int labelno ATTRIBUTE_UNUSED;
11966 if (DEFAULT_ABI == ABI_AIX)
11968 #ifdef NO_PROFILE_COUNTERS
11969 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
11972 const char *label_name;
11975 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
11976 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
11977 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
11979 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
11983 else if (DEFAULT_ABI == ABI_DARWIN)
11985 const char *mcount_name = RS6000_MCOUNT;
11986 int caller_addr_regno = LINK_REGISTER_REGNUM;
11988 /* Be conservative and always set this, at least for now. */
11989 current_function_uses_pic_offset_table = 1;
11992 /* For PIC code, set up a stub and collect the caller's address
11993 from r0, which is where the prologue puts it. */
11996 mcount_name = machopic_stub_name (mcount_name);
11997 if (current_function_uses_pic_offset_table)
11998 caller_addr_regno = 0;
12001 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12003 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12007 /* Write function profiler code. */
12010 output_function_profiler (file, labelno)
12017 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12018 switch (DEFAULT_ABI)
12025 /* Fall through. */
12027 case ABI_AIX_NODESC:
12030 warning ("no profiling of 64-bit code for this ABI");
12033 fprintf (file, "\tmflr %s\n", reg_names[0]);
12036 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12037 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12038 reg_names[0], save_lr, reg_names[1]);
12039 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12040 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12041 assemble_name (file, buf);
12042 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12044 else if (flag_pic > 1)
12046 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12047 reg_names[0], save_lr, reg_names[1]);
12048 /* Now, we need to get the address of the label. */
12049 fputs ("\tbl 1f\n\t.long ", file);
12050 assemble_name (file, buf);
12051 fputs ("-.\n1:", file);
12052 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12053 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12054 reg_names[0], reg_names[11]);
12055 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12056 reg_names[0], reg_names[0], reg_names[11]);
12060 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12061 assemble_name (file, buf);
12062 fputs ("@ha\n", file);
12063 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12064 reg_names[0], save_lr, reg_names[1]);
12065 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12066 assemble_name (file, buf);
12067 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12070 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12072 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12073 reg_names[STATIC_CHAIN_REGNUM],
12075 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12076 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12077 reg_names[STATIC_CHAIN_REGNUM],
12081 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12082 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12087 /* Don't do anything, done in output_profile_hook (). */
12092 /* Adjust the cost of a scheduling dependency. Return the new cost of
12093 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12096 rs6000_adjust_cost (insn, link, dep_insn, cost)
12099 rtx dep_insn ATTRIBUTE_UNUSED;
12102 if (! recog_memoized (insn))
12105 if (REG_NOTE_KIND (link) != 0)
12108 if (REG_NOTE_KIND (link) == 0)
12110 /* Data dependency; DEP_INSN writes a register that INSN reads
12111 some cycles later. */
12112 switch (get_attr_type (insn))
12115 /* Tell the first scheduling pass about the latency between
12116 a mtctr and bctr (and mtlr and br/blr). The first
12117 scheduling pass will not know about this latency since
12118 the mtctr instruction, which has the latency associated
12119 to it, will be generated by reload. */
12120 return TARGET_POWER ? 5 : 4;
12122 /* Leave some extra cycles between a compare and its
12123 dependent branch, to inhibit expensive mispredicts. */
12124 if ((rs6000_cpu_attr == CPU_PPC603
12125 || rs6000_cpu_attr == CPU_PPC604
12126 || rs6000_cpu_attr == CPU_PPC604E
12127 || rs6000_cpu_attr == CPU_PPC620
12128 || rs6000_cpu_attr == CPU_PPC630
12129 || rs6000_cpu_attr == CPU_PPC750
12130 || rs6000_cpu_attr == CPU_PPC7400
12131 || rs6000_cpu_attr == CPU_PPC7450
12132 || rs6000_cpu_attr == CPU_POWER4)
12133 && recog_memoized (dep_insn)
12134 && (INSN_CODE (dep_insn) >= 0)
12135 && (get_attr_type (dep_insn) == TYPE_COMPARE
12136 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12137 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12138 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
12143 /* Fall out to return default cost. */
12149 /* A C statement (sans semicolon) to update the integer scheduling
12150 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12151 INSN earlier, increase the priority to execute INSN later. Do not
12152 define this macro if you do not need to adjust the scheduling
12153 priorities of insns. */
12156 rs6000_adjust_priority (insn, priority)
12157 rtx insn ATTRIBUTE_UNUSED;
12160 /* On machines (like the 750) which have asymmetric integer units,
12161 where one integer unit can do multiply and divides and the other
12162 can't, reduce the priority of multiply/divide so it is scheduled
12163 before other integer operations. */
12166 if (! INSN_P (insn))
12169 if (GET_CODE (PATTERN (insn)) == USE)
12172 switch (rs6000_cpu_attr) {
12174 switch (get_attr_type (insn))
12181 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12182 priority, priority);
12183 if (priority >= 0 && priority < 0x01000000)
12193 /* Return how many instructions the machine can issue per cycle. */
12196 rs6000_issue_rate ()
12198 switch (rs6000_cpu_attr) {
12199 case CPU_RIOS1: /* ? */
12201 case CPU_PPC601: /* ? */
12221 /* Length in units of the trampoline for entering a nested function. */
12224 rs6000_trampoline_size ()
12228 switch (DEFAULT_ABI)
12234 ret = (TARGET_32BIT) ? 12 : 24;
12239 case ABI_AIX_NODESC:
12240 ret = (TARGET_32BIT) ? 40 : 48;
12247 /* Emit RTL insns to initialize the variable parts of a trampoline.
12248 FNADDR is an RTX for the address of the function's pure code.
12249 CXT is an RTX for the static chain value for the function. */
12252 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12257 enum machine_mode pmode = Pmode;
12258 int regsize = (TARGET_32BIT) ? 4 : 8;
12259 rtx ctx_reg = force_reg (pmode, cxt);
12261 switch (DEFAULT_ABI)
12266 /* Macros to shorten the code expansions below. */
12267 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12268 #define MEM_PLUS(addr,offset) \
12269 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12271 /* Under AIX, just build the 3 word function descriptor */
12274 rtx fn_reg = gen_reg_rtx (pmode);
12275 rtx toc_reg = gen_reg_rtx (pmode);
12276 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12277 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12278 emit_move_insn (MEM_DEREF (addr), fn_reg);
12279 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12280 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12284 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12287 case ABI_AIX_NODESC:
12288 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12289 FALSE, VOIDmode, 4,
12291 GEN_INT (rs6000_trampoline_size ()), SImode,
12301 /* Table of valid machine attributes. */
12303 const struct attribute_spec rs6000_attribute_table[] =
12305 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12306 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12307 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12308 { NULL, 0, 0, false, false, false, NULL }
12311 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12312 struct attribute_spec.handler. */
12315 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12318 tree args ATTRIBUTE_UNUSED;
12319 int flags ATTRIBUTE_UNUSED;
12320 bool *no_add_attrs;
12322 if (TREE_CODE (*node) != FUNCTION_TYPE
12323 && TREE_CODE (*node) != FIELD_DECL
12324 && TREE_CODE (*node) != TYPE_DECL)
12326 warning ("`%s' attribute only applies to functions",
12327 IDENTIFIER_POINTER (name));
12328 *no_add_attrs = true;
12334 /* Set longcall attributes on all functions declared when
12335 rs6000_default_long_calls is true. */
12337 rs6000_set_default_type_attributes (type)
12340 if (rs6000_default_long_calls
12341 && (TREE_CODE (type) == FUNCTION_TYPE
12342 || TREE_CODE (type) == METHOD_TYPE))
12343 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12345 TYPE_ATTRIBUTES (type));
12348 /* Return a reference suitable for calling a function with the
12349 longcall attribute. */
12352 rs6000_longcall_ref (call_ref)
12355 const char *call_name;
12358 if (GET_CODE (call_ref) != SYMBOL_REF)
12361 /* System V adds '.' to the internal name, so skip them. */
12362 call_name = XSTR (call_ref, 0);
12363 if (*call_name == '.')
12365 while (*call_name == '.')
12368 node = get_identifier (call_name);
12369 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12372 return force_reg (Pmode, call_ref);
12376 #ifdef USING_ELFOS_H
12378 /* A C statement or statements to switch to the appropriate section
12379 for output of RTX in mode MODE. You can assume that RTX is some
12380 kind of constant in RTL. The argument MODE is redundant except in
12381 the case of a `const_int' rtx. Select the section by calling
12382 `text_section' or one of the alternatives for other sections.
12384 Do not define this macro if you put all constants in the read-only
12388 rs6000_elf_select_rtx_section (mode, x, align)
12389 enum machine_mode mode;
12391 unsigned HOST_WIDE_INT align;
12393 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12396 default_elf_select_rtx_section (mode, x, align);
12399 /* A C statement or statements to switch to the appropriate
12400 section for output of DECL. DECL is either a `VAR_DECL' node
12401 or a constant of some sort. RELOC indicates whether forming
12402 the initial value of DECL requires link-time relocations. */
12405 rs6000_elf_select_section (decl, reloc, align)
12408 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
12410 int size = int_size_in_bytes (TREE_TYPE (decl));
12413 static void (* const sec_funcs[4]) PARAMS ((void)) = {
12414 &readonly_data_section,
12420 needs_sdata = (size > 0
12421 && size <= g_switch_value
12422 && rs6000_sdata != SDATA_NONE
12423 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
12425 if (TREE_CODE (decl) == STRING_CST)
12426 readonly = ! flag_writable_strings;
12427 else if (TREE_CODE (decl) == VAR_DECL)
12428 readonly = (! (flag_pic && reloc)
12429 && TREE_READONLY (decl)
12430 && ! TREE_SIDE_EFFECTS (decl)
12431 && DECL_INITIAL (decl)
12432 && DECL_INITIAL (decl) != error_mark_node
12433 && TREE_CONSTANT (DECL_INITIAL (decl)));
12434 else if (TREE_CODE (decl) == CONSTRUCTOR)
12435 readonly = (! (flag_pic && reloc)
12436 && ! TREE_SIDE_EFFECTS (decl)
12437 && TREE_CONSTANT (decl));
12440 if (needs_sdata && rs6000_sdata != SDATA_EABI)
12443 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
12446 /* A C statement to build up a unique section name, expressed as a
12447 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12448 RELOC indicates whether the initial value of EXP requires
12449 link-time relocations. If you do not define this macro, GCC will use
12450 the symbol name prefixed by `.' as the section name. Note - this
12451 macro can now be called for uninitialized data items as well as
12452 initialised data and functions. */
12455 rs6000_elf_unique_section (decl, reloc)
12463 const char *prefix;
12465 static const char *const prefixes[7][2] =
12467 { ".rodata.", ".gnu.linkonce.r." },
12468 { ".sdata2.", ".gnu.linkonce.s2." },
12469 { ".data.", ".gnu.linkonce.d." },
12470 { ".sdata.", ".gnu.linkonce.s." },
12471 { ".bss.", ".gnu.linkonce.b." },
12472 { ".sbss.", ".gnu.linkonce.sb." },
12473 { ".text.", ".gnu.linkonce.t." }
12476 if (TREE_CODE (decl) == FUNCTION_DECL)
12485 if (TREE_CODE (decl) == STRING_CST)
12486 readonly = ! flag_writable_strings;
12487 else if (TREE_CODE (decl) == VAR_DECL)
12488 readonly = (! (flag_pic && reloc)
12489 && TREE_READONLY (decl)
12490 && ! TREE_SIDE_EFFECTS (decl)
12491 && TREE_CONSTANT (DECL_INITIAL (decl)));
12493 size = int_size_in_bytes (TREE_TYPE (decl));
12494 needs_sdata = (size > 0
12495 && size <= g_switch_value
12496 && rs6000_sdata != SDATA_NONE
12497 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
12499 if (DECL_INITIAL (decl) == 0
12500 || DECL_INITIAL (decl) == error_mark_node)
12502 else if (! readonly)
12509 /* .sdata2 is only for EABI. */
12510 if (sec == 0 && rs6000_sdata != SDATA_EABI)
12516 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
12517 name = (*targetm.strip_name_encoding) (name);
12518 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
12519 len = strlen (name) + strlen (prefix);
12520 string = alloca (len + 1);
12522 sprintf (string, "%s%s", prefix, name);
12524 DECL_SECTION_NAME (decl) = build_string (len, string);
12528 /* If we are referencing a function that is static or is known to be
12529 in this file, make the SYMBOL_REF special. We can use this to indicate
12530 that we can branch to this function without emitting a no-op after the
12531 call. For real AIX calling sequences, we also replace the
12532 function name with the real name (1 or 2 leading .'s), rather than
12533 the function descriptor name. This saves a lot of overriding code
12534 to read the prefixes. */
12537 rs6000_elf_encode_section_info (decl, first)
12544 if (TREE_CODE (decl) == FUNCTION_DECL)
12546 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12547 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
12548 && ! DECL_WEAK (decl))
12549 SYMBOL_REF_FLAG (sym_ref) = 1;
12551 if (DEFAULT_ABI == ABI_AIX)
12553 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12554 size_t len2 = strlen (XSTR (sym_ref, 0));
12555 char *str = alloca (len1 + len2 + 1);
12558 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12560 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12563 else if (rs6000_sdata != SDATA_NONE
12564 && DEFAULT_ABI == ABI_V4
12565 && TREE_CODE (decl) == VAR_DECL)
12567 int size = int_size_in_bytes (TREE_TYPE (decl));
12568 tree section_name = DECL_SECTION_NAME (decl);
12569 const char *name = (char *)0;
12574 if (TREE_CODE (section_name) == STRING_CST)
12576 name = TREE_STRING_POINTER (section_name);
12577 len = TREE_STRING_LENGTH (section_name);
12583 if ((size > 0 && size <= g_switch_value)
12585 && ((len == sizeof (".sdata") - 1
12586 && strcmp (name, ".sdata") == 0)
12587 || (len == sizeof (".sdata2") - 1
12588 && strcmp (name, ".sdata2") == 0)
12589 || (len == sizeof (".sbss") - 1
12590 && strcmp (name, ".sbss") == 0)
12591 || (len == sizeof (".sbss2") - 1
12592 && strcmp (name, ".sbss2") == 0)
12593 || (len == sizeof (".PPC.EMB.sdata0") - 1
12594 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12595 || (len == sizeof (".PPC.EMB.sbss0") - 1
12596 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
12598 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12599 size_t len = strlen (XSTR (sym_ref, 0));
12600 char *str = alloca (len + 2);
12603 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12604 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12609 static const char *
12610 rs6000_elf_strip_name_encoding (str)
12613 while (*str == '*' || *str == '@')
12618 #endif /* USING_ELFOS_H */
12621 /* Return a REG that occurs in ADDR with coefficient 1.
12622 ADDR can be effectively incremented by incrementing REG.
12624 r0 is special and we must not select it as an address
12625 register by this routine since our caller will try to
12626 increment the returned register via an "la" instruction. */
12629 find_addr_reg (addr)
12632 while (GET_CODE (addr) == PLUS)
12634 if (GET_CODE (XEXP (addr, 0)) == REG
12635 && REGNO (XEXP (addr, 0)) != 0)
12636 addr = XEXP (addr, 0);
12637 else if (GET_CODE (XEXP (addr, 1)) == REG
12638 && REGNO (XEXP (addr, 1)) != 0)
12639 addr = XEXP (addr, 1);
12640 else if (CONSTANT_P (XEXP (addr, 0)))
12641 addr = XEXP (addr, 1);
12642 else if (CONSTANT_P (XEXP (addr, 1)))
12643 addr = XEXP (addr, 0);
12647 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12653 rs6000_fatal_bad_address (op)
12656 fatal_insn ("bad address", op);
12659 /* Called to register all of our global variables with the garbage
12663 rs6000_add_gc_roots ()
12665 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
12666 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
12667 toc_hash_mark_table);
12673 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12674 reference and a constant. */
12677 symbolic_operand (op)
12680 switch (GET_CODE (op))
12687 return (GET_CODE (op) == SYMBOL_REF ||
12688 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12689 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12690 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12697 #ifdef RS6000_LONG_BRANCH
12699 static tree stub_list = 0;
12701 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12702 procedure calls to the linked list. */
12705 add_compiler_stub (label_name, function_name, line_number)
12707 tree function_name;
12710 tree stub = build_tree_list (function_name, label_name);
12711 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12712 TREE_CHAIN (stub) = stub_list;
12716 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12717 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12718 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12720 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12721 handling procedure calls from the linked list and initializes the
12725 output_compiler_stub ()
12728 char label_buf[256];
12732 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12734 fprintf (asm_out_file,
12735 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12737 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12738 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12739 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12740 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12742 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12744 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12747 label_buf[0] = '_';
12748 strcpy (label_buf+1,
12749 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12752 strcpy (tmp_buf, "lis r12,hi16(");
12753 strcat (tmp_buf, label_buf);
12754 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12755 strcat (tmp_buf, label_buf);
12756 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12757 output_asm_insn (tmp_buf, 0);
12759 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12760 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12761 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12762 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12768 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12769 already there or not. */
12772 no_previous_def (function_name)
12773 tree function_name;
12776 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12777 if (function_name == STUB_FUNCTION_NAME (stub))
12782 /* GET_PREV_LABEL gets the label name from the previous definition of
12786 get_prev_label (function_name)
12787 tree function_name;
12790 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12791 if (function_name == STUB_FUNCTION_NAME (stub))
12792 return STUB_LABEL_NAME (stub);
12796 /* INSN is either a function call or a millicode call. It may have an
12797 unconditional jump in its delay slot.
12799 CALL_DEST is the routine we are calling. */
12802 output_call (insn, call_dest, operand_number)
12805 int operand_number;
12807 static char buf[256];
12808 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12811 tree funname = get_identifier (XSTR (call_dest, 0));
12813 if (no_previous_def (funname))
12815 int line_number = 0;
12816 rtx label_rtx = gen_label_rtx ();
12817 char *label_buf, temp_buf[256];
12818 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12819 CODE_LABEL_NUMBER (label_rtx));
12820 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12821 labelname = get_identifier (label_buf);
12822 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12824 line_number = NOTE_LINE_NUMBER (insn);
12825 add_compiler_stub (labelname, funname, line_number);
12828 labelname = get_prev_label (funname);
12830 sprintf (buf, "jbsr %%z%d,%.246s",
12831 operand_number, IDENTIFIER_POINTER (labelname));
12836 sprintf (buf, "bl %%z%d", operand_number);
12841 #endif /* RS6000_LONG_BRANCH */
12843 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12845 const char *const symbol_ = (SYMBOL); \
12846 char *buffer_ = (BUF); \
12847 if (symbol_[0] == '"') \
12849 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12851 else if (name_needs_quotes(symbol_)) \
12853 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12857 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12862 /* Generate PIC and indirect symbol stubs. */
12865 machopic_output_stub (file, symb, stub)
12867 const char *symb, *stub;
12869 unsigned int length;
12870 char *symbol_name, *lazy_ptr_name;
12871 char *local_label_0;
12872 static int label = 0;
12874 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12875 symb = (*targetm.strip_name_encoding) (symb);
12879 length = strlen (symb);
12880 symbol_name = alloca (length + 32);
12881 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
12883 lazy_ptr_name = alloca (length + 32);
12884 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
12886 local_label_0 = alloca (length + 32);
12887 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
12890 machopic_picsymbol_stub_section ();
12892 machopic_symbol_stub_section ();
12894 fprintf (file, "%s:\n", stub);
12895 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12899 fprintf (file, "\tmflr r0\n");
12900 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
12901 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
12902 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
12903 lazy_ptr_name, local_label_0);
12904 fprintf (file, "\tmtlr r0\n");
12905 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
12906 lazy_ptr_name, local_label_0);
12907 fprintf (file, "\tmtctr r12\n");
12908 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
12909 lazy_ptr_name, local_label_0);
12910 fprintf (file, "\tbctr\n");
12913 fprintf (file, "non-pure not supported\n");
12915 machopic_lazy_symbol_ptr_section ();
12916 fprintf (file, "%s:\n", lazy_ptr_name);
12917 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12918 fprintf (file, "\t.long dyld_stub_binding_helper\n");
12921 /* Legitimize PIC addresses. If the address is already
12922 position-independent, we return ORIG. Newly generated
12923 position-independent addresses go into a reg. This is REG if non
12924 zero, otherwise we allocate register(s) as necessary. */
12926 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12929 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
12931 enum machine_mode mode;
12936 if (reg == NULL && ! reload_in_progress && ! reload_completed)
12937 reg = gen_reg_rtx (Pmode);
12939 if (GET_CODE (orig) == CONST)
12941 if (GET_CODE (XEXP (orig, 0)) == PLUS
12942 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
12945 if (GET_CODE (XEXP (orig, 0)) == PLUS)
12948 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
12951 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
12957 if (GET_CODE (offset) == CONST_INT)
12959 if (SMALL_INT (offset))
12960 return plus_constant (base, INTVAL (offset));
12961 else if (! reload_in_progress && ! reload_completed)
12962 offset = force_reg (Pmode, offset);
12965 rtx mem = force_const_mem (Pmode, orig);
12966 return machopic_legitimize_pic_address (mem, Pmode, reg);
12969 return gen_rtx (PLUS, Pmode, base, offset);
12972 /* Fall back on generic machopic code. */
12973 return machopic_legitimize_pic_address (orig, mode, reg);
12976 /* This is just a placeholder to make linking work without having to
12977 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
12978 ever needed for Darwin (not too likely!) this would have to get a
12979 real definition. */
12986 #endif /* TARGET_MACHO */
12989 static unsigned int
12990 rs6000_elf_section_type_flags (decl, name, reloc)
12995 unsigned int flags = default_section_type_flags (decl, name, reloc);
12997 if (TARGET_RELOCATABLE)
12998 flags |= SECTION_WRITE;
13003 /* Record an element in the table of global constructors. SYMBOL is
13004 a SYMBOL_REF of the function to be called; PRIORITY is a number
13005 between 0 and MAX_INIT_PRIORITY.
13007 This differs from default_named_section_asm_out_constructor in
13008 that we have special handling for -mrelocatable. */
13011 rs6000_elf_asm_out_constructor (symbol, priority)
13015 const char *section = ".ctors";
13018 if (priority != DEFAULT_INIT_PRIORITY)
13020 sprintf (buf, ".ctors.%.5u",
13021 /* Invert the numbering so the linker puts us in the proper
13022 order; constructors are run from right to left, and the
13023 linker sorts in increasing order. */
13024 MAX_INIT_PRIORITY - priority);
13028 named_section_flags (section, SECTION_WRITE);
13029 assemble_align (POINTER_SIZE);
13031 if (TARGET_RELOCATABLE)
13033 fputs ("\t.long (", asm_out_file);
13034 output_addr_const (asm_out_file, symbol);
13035 fputs (")@fixup\n", asm_out_file);
13038 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13042 rs6000_elf_asm_out_destructor (symbol, priority)
13046 const char *section = ".dtors";
13049 if (priority != DEFAULT_INIT_PRIORITY)
13051 sprintf (buf, ".dtors.%.5u",
13052 /* Invert the numbering so the linker puts us in the proper
13053 order; constructors are run from right to left, and the
13054 linker sorts in increasing order. */
13055 MAX_INIT_PRIORITY - priority);
13059 named_section_flags (section, SECTION_WRITE);
13060 assemble_align (POINTER_SIZE);
13062 if (TARGET_RELOCATABLE)
13064 fputs ("\t.long (", asm_out_file);
13065 output_addr_const (asm_out_file, symbol);
13066 fputs (")@fixup\n", asm_out_file);
13069 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13075 xcoff_asm_named_section (name, flags)
13077 unsigned int flags ATTRIBUTE_UNUSED;
13079 fprintf (asm_out_file, "\t.csect %s\n", name);
13083 rs6000_xcoff_select_section (exp, reloc, align)
13086 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13088 if ((TREE_CODE (exp) == STRING_CST
13089 && ! flag_writable_strings)
13090 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
13091 && TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)
13092 && DECL_INITIAL (exp)
13093 && (DECL_INITIAL (exp) == error_mark_node
13094 || TREE_CONSTANT (DECL_INITIAL (exp)))
13097 if (TREE_PUBLIC (exp))
13098 read_only_data_section ();
13100 read_only_private_data_section ();
13104 if (TREE_PUBLIC (exp))
13107 private_data_section ();
13112 rs6000_xcoff_unique_section (decl, reloc)
13114 int reloc ATTRIBUTE_UNUSED;
13120 if (TREE_CODE (decl) == FUNCTION_DECL)
13122 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13123 len = strlen (name) + 5;
13124 string = alloca (len + 1);
13125 sprintf (string, ".%s[PR]", name);
13126 DECL_SECTION_NAME (decl) = build_string (len, string);
13130 /* Select section for constant in constant pool.
13132 On RS/6000, all constants are in the private read-only data area.
13133 However, if this is being placed in the TOC it must be output as a
13137 rs6000_xcoff_select_rtx_section (mode, x, align)
13138 enum machine_mode mode;
13140 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13142 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13145 read_only_private_data_section ();
13148 /* Remove any trailing [DS] or the like from the symbol name. */
13150 static const char *
13151 rs6000_xcoff_strip_name_encoding (name)
13157 len = strlen (name);
13158 if (name[len - 1] == ']')
13159 return ggc_alloc_string (name, len - 4);
13164 #endif /* TARGET_XCOFF */
13166 /* Note that this is also used for ELF64. */
13169 rs6000_xcoff_encode_section_info (decl, first)
13171 int first ATTRIBUTE_UNUSED;
13173 if (TREE_CODE (decl) == FUNCTION_DECL
13174 && (TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
13175 && ! DECL_WEAK (decl))
13176 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;