1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined;
80 /* Save information from a "cmpxx" operation until the branch or scc is
82 rtx rs6000_compare_op0, rs6000_compare_op1;
83 int rs6000_compare_fp_p;
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno;
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name = RS6000_ABI_NAME;
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
96 /* Which small data model to use */
97 const char *rs6000_sdata_name = (char *)0;
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno = 0;
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi;
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string;
110 const char *rs6000_debug_name;
111 int rs6000_debug_stack; /* debug stack applications */
112 int rs6000_debug_arg; /* debug argument handling */
114 /* Flag to say the TOC is initialized */
116 char toc_label_name[10];
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set;
121 static void rs6000_add_gc_roots PARAMS ((void));
122 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code, enum machine_mode));
126 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127 static void rs6000_maybe_dead PARAMS ((rtx));
128 static void rs6000_emit_stack_tie PARAMS ((void));
129 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131 static unsigned rs6000_hash_constant PARAMS ((rtx));
132 static unsigned toc_hash_function PARAMS ((const void *));
133 static int toc_hash_eq PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry PARAMS ((void **, void *));
135 static void toc_hash_mark_table PARAMS ((void *));
136 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137 static void rs6000_free_machine_status PARAMS ((struct function *));
138 static void rs6000_init_machine_status PARAMS ((struct function *));
139 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
167 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
168 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
169 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
170 static void rs6000_parse_abi_options PARAMS ((void));
171 static int first_altivec_reg_to_save PARAMS ((void));
172 static unsigned int compute_vrsave_mask PARAMS ((void));
173 static void is_altivec_return_reg PARAMS ((rtx, void *));
174 int vrsave_operation PARAMS ((rtx, enum machine_mode));
175 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
176 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
177 static int easy_vector_constant PARAMS ((rtx));
179 /* Default register names. */
180 char rs6000_reg_names[][8] =
182 "0", "1", "2", "3", "4", "5", "6", "7",
183 "8", "9", "10", "11", "12", "13", "14", "15",
184 "16", "17", "18", "19", "20", "21", "22", "23",
185 "24", "25", "26", "27", "28", "29", "30", "31",
186 "0", "1", "2", "3", "4", "5", "6", "7",
187 "8", "9", "10", "11", "12", "13", "14", "15",
188 "16", "17", "18", "19", "20", "21", "22", "23",
189 "24", "25", "26", "27", "28", "29", "30", "31",
190 "mq", "lr", "ctr","ap",
191 "0", "1", "2", "3", "4", "5", "6", "7",
193 /* AltiVec registers. */
194 "0", "1", "2", "3", "4", "5", "6", "7",
195 "8", "9", "10", "11", "12", "13", "14", "15",
196 "16", "17", "18", "19", "20", "21", "22", "23",
197 "24", "25", "26", "27", "28", "29", "30", "31",
201 #ifdef TARGET_REGNAMES
202 static const char alt_reg_names[][8] =
204 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
205 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
206 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
207 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
208 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
209 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
210 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
211 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
212 "mq", "lr", "ctr", "ap",
213 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
215 /* AltiVec registers. */
216 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
217 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
218 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
219 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
224 #ifndef MASK_STRICT_ALIGN
225 #define MASK_STRICT_ALIGN 0
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ATTRIBUTE_TABLE
230 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
232 #undef TARGET_ASM_ALIGNED_DI_OP
233 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
235 /* Default unaligned ops are only provided for ELF. Find the ops needed
236 for non-ELF systems. */
237 #ifndef OBJECT_FORMAT_ELF
238 #ifdef OBJECT_FORMAT_COFF
239 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
241 #undef TARGET_ASM_UNALIGNED_HI_OP
242 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
243 #undef TARGET_ASM_UNALIGNED_SI_OP
244 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
245 #undef TARGET_ASM_UNALIGNED_DI_OP
246 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
249 #undef TARGET_ASM_UNALIGNED_HI_OP
250 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
251 #undef TARGET_ASM_UNALIGNED_SI_OP
252 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
256 /* This hook deals with fixups for relocatable code and DI-mode objects
258 #undef TARGET_ASM_INTEGER
259 #define TARGET_ASM_INTEGER rs6000_assemble_integer
261 #undef TARGET_ASM_FUNCTION_PROLOGUE
262 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
263 #undef TARGET_ASM_FUNCTION_EPILOGUE
264 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
267 #undef TARGET_SECTION_TYPE_FLAGS
268 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
271 #undef TARGET_SCHED_ISSUE_RATE
272 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
273 #undef TARGET_SCHED_ADJUST_COST
274 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
275 #undef TARGET_SCHED_ADJUST_PRIORITY
276 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
278 #undef TARGET_INIT_BUILTINS
279 #define TARGET_INIT_BUILTINS rs6000_init_builtins
281 #undef TARGET_EXPAND_BUILTIN
282 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
284 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
285 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
287 struct gcc_target targetm = TARGET_INITIALIZER;
289 /* Override command line options. Mostly we process the processor
290 type and sometimes adjust other TARGET_ options. */
293 rs6000_override_options (default_cpu)
294 const char *default_cpu;
297 struct rs6000_cpu_select *ptr;
299 /* Simplify the entries below by making a mask for any POWER
300 variant and any PowerPC variant. */
302 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
303 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
304 | MASK_PPC_GFXOPT | MASK_POWERPC64)
305 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
309 const char *const name; /* Canonical processor name. */
310 const enum processor_type processor; /* Processor type enum value. */
311 const int target_enable; /* Target flags to enable. */
312 const int target_disable; /* Target flags to disable. */
313 } const processor_target_table[]
314 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
315 POWER_MASKS | POWERPC_MASKS},
316 {"power", PROCESSOR_POWER,
317 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
318 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
319 {"power2", PROCESSOR_POWER,
320 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
321 POWERPC_MASKS | MASK_NEW_MNEMONICS},
322 {"power3", PROCESSOR_PPC630,
323 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
324 POWER_MASKS | MASK_PPC_GPOPT},
325 {"powerpc", PROCESSOR_POWERPC,
326 MASK_POWERPC | MASK_NEW_MNEMONICS,
327 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
328 {"powerpc64", PROCESSOR_POWERPC64,
329 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
330 POWER_MASKS | POWERPC_OPT_MASKS},
331 {"rios", PROCESSOR_RIOS1,
332 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
333 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
334 {"rios1", PROCESSOR_RIOS1,
335 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
336 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
337 {"rsc", PROCESSOR_PPC601,
338 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
339 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
340 {"rsc1", PROCESSOR_PPC601,
341 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
342 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
343 {"rios2", PROCESSOR_RIOS2,
344 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
345 POWERPC_MASKS | MASK_NEW_MNEMONICS},
346 {"rs64a", PROCESSOR_RS64A,
347 MASK_POWERPC | MASK_NEW_MNEMONICS,
348 POWER_MASKS | POWERPC_OPT_MASKS},
349 {"401", PROCESSOR_PPC403,
350 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
351 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
352 {"403", PROCESSOR_PPC403,
353 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
354 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
355 {"405", PROCESSOR_PPC405,
356 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
357 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
358 {"505", PROCESSOR_MPCCORE,
359 MASK_POWERPC | MASK_NEW_MNEMONICS,
360 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
361 {"601", PROCESSOR_PPC601,
362 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
363 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
364 {"602", PROCESSOR_PPC603,
365 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
366 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
367 {"603", PROCESSOR_PPC603,
368 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
369 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
370 {"603e", PROCESSOR_PPC603,
371 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
372 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
373 {"ec603e", PROCESSOR_PPC603,
374 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
375 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
376 {"604", PROCESSOR_PPC604,
377 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
378 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
379 {"604e", PROCESSOR_PPC604e,
380 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
381 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
382 {"620", PROCESSOR_PPC620,
383 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
384 POWER_MASKS | MASK_PPC_GPOPT},
385 {"630", PROCESSOR_PPC630,
386 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
387 POWER_MASKS | MASK_PPC_GPOPT},
388 {"740", PROCESSOR_PPC750,
389 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
390 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
391 {"750", PROCESSOR_PPC750,
392 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
393 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
394 {"7400", PROCESSOR_PPC7400,
395 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
396 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
397 {"7450", PROCESSOR_PPC7450,
398 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
399 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
400 {"801", PROCESSOR_MPCCORE,
401 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
402 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
403 {"821", PROCESSOR_MPCCORE,
404 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
405 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
406 {"823", PROCESSOR_MPCCORE,
407 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
408 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
409 {"860", PROCESSOR_MPCCORE,
410 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
411 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
413 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
415 /* Save current -mmultiple/-mno-multiple status. */
416 int multiple = TARGET_MULTIPLE;
417 /* Save current -mstring/-mno-string status. */
418 int string = TARGET_STRING;
420 /* Identify the processor type. */
421 rs6000_select[0].string = default_cpu;
422 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
424 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
426 ptr = &rs6000_select[i];
427 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
429 for (j = 0; j < ptt_size; j++)
430 if (! strcmp (ptr->string, processor_target_table[j].name))
433 rs6000_cpu = processor_target_table[j].processor;
437 target_flags |= processor_target_table[j].target_enable;
438 target_flags &= ~processor_target_table[j].target_disable;
444 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
448 /* If we are optimizing big endian systems for space, use the store
449 multiple instructions. */
450 if (BYTES_BIG_ENDIAN && optimize_size)
451 target_flags |= MASK_MULTIPLE;
453 /* If -mmultiple or -mno-multiple was explicitly used, don't
454 override with the processor default */
455 if (TARGET_MULTIPLE_SET)
456 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
458 /* If -mstring or -mno-string was explicitly used, don't override
459 with the processor default. */
460 if (TARGET_STRING_SET)
461 target_flags = (target_flags & ~MASK_STRING) | string;
463 /* Don't allow -mmultiple or -mstring on little endian systems
464 unless the cpu is a 750, because the hardware doesn't support the
465 instructions used in little endian mode, and causes an alignment
466 trap. The 750 does not cause an alignment trap (except when the
467 target is unaligned). */
469 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
473 target_flags &= ~MASK_MULTIPLE;
474 if (TARGET_MULTIPLE_SET)
475 warning ("-mmultiple is not supported on little endian systems");
480 target_flags &= ~MASK_STRING;
481 if (TARGET_STRING_SET)
482 warning ("-mstring is not supported on little endian systems");
486 if (flag_pic && DEFAULT_ABI == ABI_AIX && extra_warnings)
488 warning ("-f%s ignored (all code is position independent)",
489 (flag_pic > 1) ? "PIC" : "pic");
493 #ifdef XCOFF_DEBUGGING_INFO
494 if (flag_function_sections && (write_symbols != NO_DEBUG)
495 && DEFAULT_ABI == ABI_AIX)
497 warning ("-ffunction-sections disabled on AIX when debugging");
498 flag_function_sections = 0;
501 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
503 warning ("-fdata-sections not supported on AIX");
504 flag_data_sections = 0;
508 /* Set debug flags */
509 if (rs6000_debug_name)
511 if (! strcmp (rs6000_debug_name, "all"))
512 rs6000_debug_stack = rs6000_debug_arg = 1;
513 else if (! strcmp (rs6000_debug_name, "stack"))
514 rs6000_debug_stack = 1;
515 else if (! strcmp (rs6000_debug_name, "arg"))
516 rs6000_debug_arg = 1;
518 error ("unknown -mdebug-%s switch", rs6000_debug_name);
521 /* Set size of long double */
522 rs6000_long_double_type_size = 64;
523 if (rs6000_long_double_size_string)
526 int size = strtol (rs6000_long_double_size_string, &tail, 10);
527 if (*tail != '\0' || (size != 64 && size != 128))
528 error ("Unknown switch -mlong-double-%s",
529 rs6000_long_double_size_string);
531 rs6000_long_double_type_size = size;
534 /* Handle -mabi= options. */
535 rs6000_parse_abi_options ();
537 #ifdef TARGET_REGNAMES
538 /* If the user desires alternate register names, copy in the
539 alternate names now. */
541 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
544 #ifdef SUBTARGET_OVERRIDE_OPTIONS
545 SUBTARGET_OVERRIDE_OPTIONS;
547 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
548 SUBSUBTARGET_OVERRIDE_OPTIONS;
551 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
552 If -maix-struct-return or -msvr4-struct-return was explicitly
553 used, don't override with the ABI default. */
554 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
556 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
557 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
559 target_flags |= MASK_AIX_STRUCT_RET;
562 /* Register global variables with the garbage collector. */
563 rs6000_add_gc_roots ();
565 /* Allocate an alias set for register saves & restores from stack. */
566 rs6000_sr_alias_set = new_alias_set ();
569 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
571 /* We can only guarantee the availability of DI pseudo-ops when
572 assembling for 64-bit targets. */
575 targetm.asm_out.aligned_op.di = NULL;
576 targetm.asm_out.unaligned_op.di = NULL;
579 /* Arrange to save and restore machine status around nested functions. */
580 init_machine_status = rs6000_init_machine_status;
581 free_machine_status = rs6000_free_machine_status;
584 /* Handle -mabi= options. */
586 rs6000_parse_abi_options ()
588 if (rs6000_abi_string == 0)
590 else if (! strcmp (rs6000_abi_string, "altivec"))
591 rs6000_altivec_abi = 1;
592 else if (! strcmp (rs6000_abi_string, "no-altivec"))
593 rs6000_altivec_abi = 0;
595 error ("unknown ABI specified: '%s'", rs6000_abi_string);
599 optimization_options (level, size)
600 int level ATTRIBUTE_UNUSED;
601 int size ATTRIBUTE_UNUSED;
605 /* Do anything needed at the start of the asm file. */
608 rs6000_file_start (file, default_cpu)
610 const char *default_cpu;
614 const char *start = buffer;
615 struct rs6000_cpu_select *ptr;
617 if (flag_verbose_asm)
619 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
620 rs6000_select[0].string = default_cpu;
622 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
624 ptr = &rs6000_select[i];
625 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
627 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
633 switch (rs6000_sdata)
635 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
636 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
637 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
638 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
641 if (rs6000_sdata && g_switch_value)
643 fprintf (file, "%s -G %d", start, g_switch_value);
654 /* Create a CONST_DOUBLE from a string. */
657 rs6000_float_const (string, mode)
659 enum machine_mode mode;
661 REAL_VALUE_TYPE value;
662 value = REAL_VALUE_ATOF (string, mode);
663 return immed_real_const_1 (value, mode);
666 /* Return non-zero if this function is known to have a null epilogue. */
671 if (reload_completed)
673 rs6000_stack_t *info = rs6000_stack_info ();
675 if (info->first_gp_reg_save == 32
676 && info->first_fp_reg_save == 64
677 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
680 && info->vrsave_mask == 0
688 /* Returns 1 always. */
691 any_operand (op, mode)
692 rtx op ATTRIBUTE_UNUSED;
693 enum machine_mode mode ATTRIBUTE_UNUSED;
698 /* Returns 1 if op is the count register. */
700 count_register_operand (op, mode)
702 enum machine_mode mode ATTRIBUTE_UNUSED;
704 if (GET_CODE (op) != REG)
707 if (REGNO (op) == COUNT_REGISTER_REGNUM)
710 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
716 /* Returns 1 if op is an altivec register. */
718 altivec_register_operand (op, mode)
720 enum machine_mode mode ATTRIBUTE_UNUSED;
723 return (register_operand (op, mode)
724 && (GET_CODE (op) != REG
725 || REGNO (op) > FIRST_PSEUDO_REGISTER
726 || ALTIVEC_REGNO_P (REGNO (op))));
730 xer_operand (op, mode)
732 enum machine_mode mode ATTRIBUTE_UNUSED;
734 if (GET_CODE (op) != REG)
737 if (XER_REGNO_P (REGNO (op)))
743 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
744 by such constants completes more quickly. */
747 s8bit_cint_operand (op, mode)
749 enum machine_mode mode ATTRIBUTE_UNUSED;
751 return ( GET_CODE (op) == CONST_INT
752 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
755 /* Return 1 if OP is a constant that can fit in a D field. */
758 short_cint_operand (op, mode)
760 enum machine_mode mode ATTRIBUTE_UNUSED;
762 return (GET_CODE (op) == CONST_INT
763 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
766 /* Similar for an unsigned D field. */
769 u_short_cint_operand (op, mode)
771 enum machine_mode mode ATTRIBUTE_UNUSED;
773 return (GET_CODE (op) == CONST_INT
774 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
777 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
780 non_short_cint_operand (op, mode)
782 enum machine_mode mode ATTRIBUTE_UNUSED;
784 return (GET_CODE (op) == CONST_INT
785 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
788 /* Returns 1 if OP is a CONST_INT that is a positive value
789 and an exact power of 2. */
792 exact_log2_cint_operand (op, mode)
794 enum machine_mode mode ATTRIBUTE_UNUSED;
796 return (GET_CODE (op) == CONST_INT
798 && exact_log2 (INTVAL (op)) >= 0);
801 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
805 gpc_reg_operand (op, mode)
807 enum machine_mode mode;
809 return (register_operand (op, mode)
810 && (GET_CODE (op) != REG
811 || (REGNO (op) >= ARG_POINTER_REGNUM
812 && !XER_REGNO_P (REGNO (op)))
813 || REGNO (op) < MQ_REGNO));
816 /* Returns 1 if OP is either a pseudo-register or a register denoting a
820 cc_reg_operand (op, mode)
822 enum machine_mode mode;
824 return (register_operand (op, mode)
825 && (GET_CODE (op) != REG
826 || REGNO (op) >= FIRST_PSEUDO_REGISTER
827 || CR_REGNO_P (REGNO (op))));
830 /* Returns 1 if OP is either a pseudo-register or a register denoting a
831 CR field that isn't CR0. */
834 cc_reg_not_cr0_operand (op, mode)
836 enum machine_mode mode;
838 return (register_operand (op, mode)
839 && (GET_CODE (op) != REG
840 || REGNO (op) >= FIRST_PSEUDO_REGISTER
841 || CR_REGNO_NOT_CR0_P (REGNO (op))));
844 /* Returns 1 if OP is either a constant integer valid for a D-field or
845 a non-special register. If a register, it must be in the proper
846 mode unless MODE is VOIDmode. */
849 reg_or_short_operand (op, mode)
851 enum machine_mode mode;
853 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
856 /* Similar, except check if the negation of the constant would be
857 valid for a D-field. */
860 reg_or_neg_short_operand (op, mode)
862 enum machine_mode mode;
864 if (GET_CODE (op) == CONST_INT)
865 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
867 return gpc_reg_operand (op, mode);
870 /* Returns 1 if OP is either a constant integer valid for a DS-field or
871 a non-special register. If a register, it must be in the proper
872 mode unless MODE is VOIDmode. */
875 reg_or_aligned_short_operand (op, mode)
877 enum machine_mode mode;
879 if (gpc_reg_operand (op, mode))
881 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
888 /* Return 1 if the operand is either a register or an integer whose
889 high-order 16 bits are zero. */
892 reg_or_u_short_operand (op, mode)
894 enum machine_mode mode;
896 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
899 /* Return 1 is the operand is either a non-special register or ANY
903 reg_or_cint_operand (op, mode)
905 enum machine_mode mode;
907 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
910 /* Return 1 is the operand is either a non-special register or ANY
911 32-bit signed constant integer. */
914 reg_or_arith_cint_operand (op, mode)
916 enum machine_mode mode;
918 return (gpc_reg_operand (op, mode)
919 || (GET_CODE (op) == CONST_INT
920 #if HOST_BITS_PER_WIDE_INT != 32
921 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
922 < (unsigned HOST_WIDE_INT) 0x100000000ll)
927 /* Return 1 is the operand is either a non-special register or a 32-bit
928 signed constant integer valid for 64-bit addition. */
931 reg_or_add_cint64_operand (op, mode)
933 enum machine_mode mode;
935 return (gpc_reg_operand (op, mode)
936 || (GET_CODE (op) == CONST_INT
937 #if HOST_BITS_PER_WIDE_INT == 32
938 && INTVAL (op) < 0x7fff8000
940 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
946 /* Return 1 is the operand is either a non-special register or a 32-bit
947 signed constant integer valid for 64-bit subtraction. */
950 reg_or_sub_cint64_operand (op, mode)
952 enum machine_mode mode;
954 return (gpc_reg_operand (op, mode)
955 || (GET_CODE (op) == CONST_INT
956 #if HOST_BITS_PER_WIDE_INT == 32
957 && (- INTVAL (op)) < 0x7fff8000
959 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
965 /* Return 1 is the operand is either a non-special register or ANY
966 32-bit unsigned constant integer. */
969 reg_or_logical_cint_operand (op, mode)
971 enum machine_mode mode;
973 if (GET_CODE (op) == CONST_INT)
975 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
977 if (GET_MODE_BITSIZE (mode) <= 32)
984 return ((INTVAL (op) & GET_MODE_MASK (mode)
985 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
987 else if (GET_CODE (op) == CONST_DOUBLE)
989 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
993 return CONST_DOUBLE_HIGH (op) == 0;
996 return gpc_reg_operand (op, mode);
999 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1002 got_operand (op, mode)
1004 enum machine_mode mode ATTRIBUTE_UNUSED;
1006 return (GET_CODE (op) == SYMBOL_REF
1007 || GET_CODE (op) == CONST
1008 || GET_CODE (op) == LABEL_REF);
1011 /* Return 1 if the operand is a simple references that can be loaded via
1012 the GOT (labels involving addition aren't allowed). */
1015 got_no_const_operand (op, mode)
1017 enum machine_mode mode ATTRIBUTE_UNUSED;
1019 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1022 /* Return the number of instructions it takes to form a constant in an
1023 integer register. */
1026 num_insns_constant_wide (value)
1027 HOST_WIDE_INT value;
1029 /* signed constant loadable with {cal|addi} */
1030 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1033 /* constant loadable with {cau|addis} */
1034 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1037 #if HOST_BITS_PER_WIDE_INT == 64
1038 else if (TARGET_POWERPC64)
1040 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1041 HOST_WIDE_INT high = value >> 31;
1043 if (high == 0 || high == -1)
1049 return num_insns_constant_wide (high) + 1;
1051 return (num_insns_constant_wide (high)
1052 + num_insns_constant_wide (low) + 1);
1061 num_insns_constant (op, mode)
1063 enum machine_mode mode;
1065 if (GET_CODE (op) == CONST_INT)
1067 #if HOST_BITS_PER_WIDE_INT == 64
1068 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1069 && mask64_operand (op, mode))
1073 return num_insns_constant_wide (INTVAL (op));
1076 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1081 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1082 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1083 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1086 else if (GET_CODE (op) == CONST_DOUBLE)
1092 int endian = (WORDS_BIG_ENDIAN == 0);
1094 if (mode == VOIDmode || mode == DImode)
1096 high = CONST_DOUBLE_HIGH (op);
1097 low = CONST_DOUBLE_LOW (op);
1101 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1102 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1104 low = l[1 - endian];
1108 return (num_insns_constant_wide (low)
1109 + num_insns_constant_wide (high));
1113 if (high == 0 && low >= 0)
1114 return num_insns_constant_wide (low);
1116 else if (high == -1 && low < 0)
1117 return num_insns_constant_wide (low);
1119 else if (mask64_operand (op, mode))
1123 return num_insns_constant_wide (high) + 1;
1126 return (num_insns_constant_wide (high)
1127 + num_insns_constant_wide (low) + 1);
1135 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1136 register with one instruction per word. We only do this if we can
1137 safely read CONST_DOUBLE_{LOW,HIGH}. */
1140 easy_fp_constant (op, mode)
1142 enum machine_mode mode;
1144 if (GET_CODE (op) != CONST_DOUBLE
1145 || GET_MODE (op) != mode
1146 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1149 /* Consider all constants with -msoft-float to be easy. */
1150 if (TARGET_SOFT_FLOAT && mode != DImode)
1153 /* If we are using V.4 style PIC, consider all constants to be hard. */
1154 if (flag_pic && DEFAULT_ABI == ABI_V4)
1157 #ifdef TARGET_RELOCATABLE
1158 /* Similarly if we are using -mrelocatable, consider all constants
1160 if (TARGET_RELOCATABLE)
1169 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1170 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1172 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1173 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1176 else if (mode == SFmode)
1181 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1182 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1184 return num_insns_constant_wide (l) == 1;
1187 else if (mode == DImode)
1188 return ((TARGET_POWERPC64
1189 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1190 || (num_insns_constant (op, DImode) <= 2));
1192 else if (mode == SImode)
1198 /* Return 1 if the operand is a CONST_INT and can be put into a
1199 register with one instruction. */
1202 easy_vector_constant (op)
1208 if (GET_CODE (op) != CONST_VECTOR)
1211 units = CONST_VECTOR_NUNITS (op);
1213 /* We can generate 0 easily. Look for that. */
1214 for (i = 0; i < units; ++i)
1216 elt = CONST_VECTOR_ELT (op, i);
1218 /* We could probably simplify this by just checking for equality
1219 with CONST0_RTX for the current mode, but let's be safe
1222 switch (GET_CODE (elt))
1225 if (INTVAL (elt) != 0)
1229 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1237 /* We could probably generate a few other constants trivially, but
1238 gcc doesn't generate them yet. FIXME later. */
1242 /* Return 1 if the operand is the constant 0. This works for scalars
1243 as well as vectors. */
1245 zero_constant (op, mode)
1247 enum machine_mode mode;
1249 return op == CONST0_RTX (mode);
1252 /* Return 1 if the operand is 0.0. */
1254 zero_fp_constant (op, mode)
1256 enum machine_mode mode;
1258 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1261 /* Return 1 if the operand is in volatile memory. Note that during
1262 the RTL generation phase, memory_operand does not return TRUE for
1263 volatile memory references. So this function allows us to
1264 recognize volatile references where its safe. */
1267 volatile_mem_operand (op, mode)
1269 enum machine_mode mode;
1271 if (GET_CODE (op) != MEM)
1274 if (!MEM_VOLATILE_P (op))
1277 if (mode != GET_MODE (op))
1280 if (reload_completed)
1281 return memory_operand (op, mode);
1283 if (reload_in_progress)
1284 return strict_memory_address_p (mode, XEXP (op, 0));
1286 return memory_address_p (mode, XEXP (op, 0));
1289 /* Return 1 if the operand is an offsettable memory operand. */
1292 offsettable_mem_operand (op, mode)
1294 enum machine_mode mode;
1296 return ((GET_CODE (op) == MEM)
1297 && offsettable_address_p (reload_completed || reload_in_progress,
1298 mode, XEXP (op, 0)));
1301 /* Return 1 if the operand is either an easy FP constant (see above) or
1305 mem_or_easy_const_operand (op, mode)
1307 enum machine_mode mode;
1309 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1312 /* Return 1 if the operand is either a non-special register or an item
1313 that can be used as the operand of a `mode' add insn. */
1316 add_operand (op, mode)
1318 enum machine_mode mode;
1320 if (GET_CODE (op) == CONST_INT)
1321 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1322 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1324 return gpc_reg_operand (op, mode);
1327 /* Return 1 if OP is a constant but not a valid add_operand. */
1330 non_add_cint_operand (op, mode)
1332 enum machine_mode mode ATTRIBUTE_UNUSED;
1334 return (GET_CODE (op) == CONST_INT
1335 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1336 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1339 /* Return 1 if the operand is a non-special register or a constant that
1340 can be used as the operand of an OR or XOR insn on the RS/6000. */
1343 logical_operand (op, mode)
1345 enum machine_mode mode;
1347 HOST_WIDE_INT opl, oph;
1349 if (gpc_reg_operand (op, mode))
1352 if (GET_CODE (op) == CONST_INT)
1354 opl = INTVAL (op) & GET_MODE_MASK (mode);
1356 #if HOST_BITS_PER_WIDE_INT <= 32
1357 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1361 else if (GET_CODE (op) == CONST_DOUBLE)
1363 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1366 opl = CONST_DOUBLE_LOW (op);
1367 oph = CONST_DOUBLE_HIGH (op);
1374 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1375 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1378 /* Return 1 if C is a constant that is not a logical operand (as
1379 above), but could be split into one. */
1382 non_logical_cint_operand (op, mode)
1384 enum machine_mode mode;
1386 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1387 && ! logical_operand (op, mode)
1388 && reg_or_logical_cint_operand (op, mode));
1391 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1392 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1393 Reject all ones and all zeros, since these should have been optimized
1394 away and confuse the making of MB and ME. */
1397 mask_operand (op, mode)
1399 enum machine_mode mode ATTRIBUTE_UNUSED;
1401 HOST_WIDE_INT c, lsb;
1403 if (GET_CODE (op) != CONST_INT)
1408 /* Fail in 64-bit mode if the mask wraps around because the upper
1409 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1410 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1413 /* We don't change the number of transitions by inverting,
1414 so make sure we start with the LS bit zero. */
1418 /* Reject all zeros or all ones. */
1422 /* Find the first transition. */
1425 /* Invert to look for a second transition. */
1428 /* Erase first transition. */
1431 /* Find the second transition (if any). */
1434 /* Match if all the bits above are 1's (or c is zero). */
1438 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1439 It is if there are no more than one 1->0 or 0->1 transitions.
1440 Reject all ones and all zeros, since these should have been optimized
1441 away and confuse the making of MB and ME. */
1444 mask64_operand (op, mode)
1446 enum machine_mode mode;
1448 if (GET_CODE (op) == CONST_INT)
1450 HOST_WIDE_INT c, lsb;
1452 /* We don't change the number of transitions by inverting,
1453 so make sure we start with the LS bit zero. */
1458 /* Reject all zeros or all ones. */
1462 /* Find the transition, and check that all bits above are 1's. */
1466 else if (GET_CODE (op) == CONST_DOUBLE
1467 && (mode == VOIDmode || mode == DImode))
1469 HOST_WIDE_INT low, high, lsb;
1471 if (HOST_BITS_PER_WIDE_INT < 64)
1472 high = CONST_DOUBLE_HIGH (op);
1474 low = CONST_DOUBLE_LOW (op);
1477 if (HOST_BITS_PER_WIDE_INT < 64)
1484 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1488 return high == -lsb;
1492 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1498 /* Return 1 if the operand is either a non-special register or a constant
1499 that can be used as the operand of a PowerPC64 logical AND insn. */
1502 and64_operand (op, mode)
1504 enum machine_mode mode;
1506 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1507 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1509 return (logical_operand (op, mode) || mask64_operand (op, mode));
1512 /* Return 1 if the operand is either a non-special register or a
1513 constant that can be used as the operand of an RS/6000 logical AND insn. */
1516 and_operand (op, mode)
1518 enum machine_mode mode;
1520 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1521 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1523 return (logical_operand (op, mode) || mask_operand (op, mode));
1526 /* Return 1 if the operand is a general register or memory operand. */
1529 reg_or_mem_operand (op, mode)
1531 enum machine_mode mode;
1533 return (gpc_reg_operand (op, mode)
1534 || memory_operand (op, mode)
1535 || volatile_mem_operand (op, mode));
1538 /* Return 1 if the operand is a general register or memory operand without
1539 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1543 lwa_operand (op, mode)
1545 enum machine_mode mode;
1549 if (reload_completed && GET_CODE (inner) == SUBREG)
1550 inner = SUBREG_REG (inner);
1552 return gpc_reg_operand (inner, mode)
1553 || (memory_operand (inner, mode)
1554 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1555 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1556 && (GET_CODE (XEXP (inner, 0)) != PLUS
1557 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1558 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1561 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1564 symbol_ref_operand (op, mode)
1566 enum machine_mode mode;
1568 if (mode != VOIDmode && GET_MODE (op) != mode)
1571 return (GET_CODE (op) == SYMBOL_REF);
1574 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1575 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1578 call_operand (op, mode)
1580 enum machine_mode mode;
1582 if (mode != VOIDmode && GET_MODE (op) != mode)
1585 return (GET_CODE (op) == SYMBOL_REF
1586 || (GET_CODE (op) == REG
1587 && (REGNO (op) == LINK_REGISTER_REGNUM
1588 || REGNO (op) == COUNT_REGISTER_REGNUM
1589 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1592 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1593 this file and the function is not weakly defined. */
1596 current_file_function_operand (op, mode)
1598 enum machine_mode mode ATTRIBUTE_UNUSED;
1600 return (GET_CODE (op) == SYMBOL_REF
1601 && (SYMBOL_REF_FLAG (op)
1602 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1603 && ! DECL_WEAK (current_function_decl))));
1606 /* Return 1 if this operand is a valid input for a move insn. */
1609 input_operand (op, mode)
1611 enum machine_mode mode;
1613 /* Memory is always valid. */
1614 if (memory_operand (op, mode))
1617 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1618 if (GET_CODE (op) == CONSTANT_P_RTX)
1621 /* For floating-point, easy constants are valid. */
1622 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1624 && easy_fp_constant (op, mode))
1627 /* Allow any integer constant. */
1628 if (GET_MODE_CLASS (mode) == MODE_INT
1629 && (GET_CODE (op) == CONST_INT
1630 || GET_CODE (op) == CONST_DOUBLE))
1633 /* For floating-point or multi-word mode, the only remaining valid type
1635 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1636 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1637 return register_operand (op, mode);
1639 /* The only cases left are integral modes one word or smaller (we
1640 do not get called for MODE_CC values). These can be in any
1642 if (register_operand (op, mode))
1645 /* A SYMBOL_REF referring to the TOC is valid. */
1646 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1649 /* A constant pool expression (relative to the TOC) is valid */
1650 if (TOC_RELATIVE_EXPR_P (op))
1653 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1655 if (DEFAULT_ABI == ABI_V4
1656 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1657 && small_data_operand (op, Pmode))
1663 /* Return 1 for an operand in small memory on V.4/eabi. */
1666 small_data_operand (op, mode)
1667 rtx op ATTRIBUTE_UNUSED;
1668 enum machine_mode mode ATTRIBUTE_UNUSED;
1673 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1676 if (DEFAULT_ABI != ABI_V4)
1679 if (GET_CODE (op) == SYMBOL_REF)
1682 else if (GET_CODE (op) != CONST
1683 || GET_CODE (XEXP (op, 0)) != PLUS
1684 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1685 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1690 rtx sum = XEXP (op, 0);
1691 HOST_WIDE_INT summand;
1693 /* We have to be careful here, because it is the referenced address
1694 that must be 32k from _SDA_BASE_, not just the symbol. */
1695 summand = INTVAL (XEXP (sum, 1));
1696 if (summand < 0 || summand > g_switch_value)
1699 sym_ref = XEXP (sum, 0);
1702 if (*XSTR (sym_ref, 0) != '@')
1713 constant_pool_expr_1 (op, have_sym, have_toc)
1718 switch (GET_CODE(op))
1721 if (CONSTANT_POOL_ADDRESS_P (op))
1723 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1731 else if (! strcmp (XSTR (op, 0), toc_label_name))
1740 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1741 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1743 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1752 constant_pool_expr_p (op)
1757 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1761 toc_relative_expr_p (op)
1766 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1769 /* Try machine-dependent ways of modifying an illegitimate address
1770 to be legitimate. If we find one, return the new, valid address.
1771 This is used from only one place: `memory_address' in explow.c.
1773 OLDX is the address as it was before break_out_memory_refs was
1774 called. In some cases it is useful to look at this to decide what
1777 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1779 It is always safe for this function to do nothing. It exists to
1780 recognize opportunities to optimize the output.
1782 On RS/6000, first check for the sum of a register with a constant
1783 integer that is out of range. If so, generate code to add the
1784 constant with the low-order 16 bits masked to the register and force
1785 this result into another register (this can be done with `cau').
1786 Then generate an address of REG+(CONST&0xffff), allowing for the
1787 possibility of bit 16 being a one.
1789 Then check for the sum of a register and something not constant, try to
1790 load the other things into a register and return the sum. */
1792 rs6000_legitimize_address (x, oldx, mode)
1794 rtx oldx ATTRIBUTE_UNUSED;
1795 enum machine_mode mode;
1797 if (GET_CODE (x) == PLUS
1798 && GET_CODE (XEXP (x, 0)) == REG
1799 && GET_CODE (XEXP (x, 1)) == CONST_INT
1800 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1802 HOST_WIDE_INT high_int, low_int;
1804 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1805 high_int = INTVAL (XEXP (x, 1)) - low_int;
1806 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1807 GEN_INT (high_int)), 0);
1808 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1810 else if (GET_CODE (x) == PLUS
1811 && GET_CODE (XEXP (x, 0)) == REG
1812 && GET_CODE (XEXP (x, 1)) != CONST_INT
1813 && GET_MODE_NUNITS (mode) == 1
1814 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1815 && (TARGET_POWERPC64 || mode != DImode)
1818 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1819 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1821 else if (ALTIVEC_VECTOR_MODE (mode))
1825 /* Make sure both operands are registers. */
1826 if (GET_CODE (x) == PLUS)
1827 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1828 force_reg (Pmode, XEXP (x, 1)));
1830 reg = force_reg (Pmode, x);
1833 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1834 && GET_CODE (x) != CONST_INT
1835 && GET_CODE (x) != CONST_DOUBLE
1837 && GET_MODE_NUNITS (mode) == 1
1838 && (GET_MODE_BITSIZE (mode) <= 32
1839 || (TARGET_HARD_FLOAT && mode == DFmode)))
1841 rtx reg = gen_reg_rtx (Pmode);
1842 emit_insn (gen_elf_high (reg, (x)));
1843 return gen_rtx_LO_SUM (Pmode, reg, (x));
1845 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1847 && GET_CODE (x) != CONST_INT
1848 && GET_CODE (x) != CONST_DOUBLE
1850 && (TARGET_HARD_FLOAT || mode != DFmode)
1854 rtx reg = gen_reg_rtx (Pmode);
1855 emit_insn (gen_macho_high (reg, (x)));
1856 return gen_rtx_LO_SUM (Pmode, reg, (x));
1859 && CONSTANT_POOL_EXPR_P (x)
1860 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1862 return create_TOC_reference (x);
1868 /* The convention appears to be to define this wherever it is used.
1869 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1870 is now used here. */
1871 #ifndef REG_MODE_OK_FOR_BASE_P
1872 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1875 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1876 replace the input X, or the original X if no replacement is called for.
1877 The output parameter *WIN is 1 if the calling macro should goto WIN,
1880 For RS/6000, we wish to handle large displacements off a base
1881 register by splitting the addend across an addiu/addis and the mem insn.
1882 This cuts number of extra insns needed from 3 to 1.
1884 On Darwin, we use this to generate code for floating point constants.
1885 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1886 The Darwin code is inside #if TARGET_MACHO because only then is
1887 machopic_function_base_name() defined. */
1889 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1891 enum machine_mode mode;
1894 int ind_levels ATTRIBUTE_UNUSED;
1897 /* We must recognize output that we have already generated ourselves. */
1898 if (GET_CODE (x) == PLUS
1899 && GET_CODE (XEXP (x, 0)) == PLUS
1900 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1901 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1902 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1904 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1905 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1906 opnum, (enum reload_type)type);
1912 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1913 && GET_CODE (x) == LO_SUM
1914 && GET_CODE (XEXP (x, 0)) == PLUS
1915 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1916 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1917 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1918 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1919 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1920 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1921 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1923 /* Result of previous invocation of this function on Darwin
1924 floating point constant. */
1925 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1926 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1927 opnum, (enum reload_type)type);
1932 if (GET_CODE (x) == PLUS
1933 && GET_CODE (XEXP (x, 0)) == REG
1934 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1935 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1936 && GET_CODE (XEXP (x, 1)) == CONST_INT
1937 && !ALTIVEC_VECTOR_MODE (mode))
1939 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1940 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1942 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1944 /* Check for 32-bit overflow. */
1945 if (high + low != val)
1951 /* Reload the high part into a base reg; leave the low part
1952 in the mem directly. */
1954 x = gen_rtx_PLUS (GET_MODE (x),
1955 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1959 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1960 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1961 opnum, (enum reload_type)type);
1966 if (GET_CODE (x) == SYMBOL_REF
1967 && DEFAULT_ABI == ABI_DARWIN
1968 && !ALTIVEC_VECTOR_MODE (mode)
1971 /* Darwin load of floating point constant. */
1972 rtx offset = gen_rtx (CONST, Pmode,
1973 gen_rtx (MINUS, Pmode, x,
1974 gen_rtx (SYMBOL_REF, Pmode,
1975 machopic_function_base_name ())));
1976 x = gen_rtx (LO_SUM, GET_MODE (x),
1977 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1978 gen_rtx (HIGH, Pmode, offset)), offset);
1979 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1980 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1981 opnum, (enum reload_type)type);
1987 && CONSTANT_POOL_EXPR_P (x)
1988 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1990 (x) = create_TOC_reference (x);
1998 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1999 that is a valid memory address for an instruction.
2000 The MODE argument is the machine mode for the MEM expression
2001 that wants to use this address.
2003 On the RS/6000, there are four valid address: a SYMBOL_REF that
2004 refers to a constant pool entry of an address (or the sum of it
2005 plus a constant), a short (16-bit signed) constant plus a register,
2006 the sum of two registers, or a register indirect, possibly with an
2007 auto-increment. For DFmode and DImode with an constant plus register,
2008 we must ensure that both words are addressable or PowerPC64 with offset
2011 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2012 32-bit DImode, TImode), indexed addressing cannot be used because
2013 adjacent memory cells are accessed by adding word-sized offsets
2014 during assembly output. */
2016 rs6000_legitimate_address (mode, x, reg_ok_strict)
2017 enum machine_mode mode;
2021 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2023 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2025 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2027 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2029 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2031 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2033 && GET_CODE (x) == PLUS
2034 && GET_CODE (XEXP (x, 0)) == REG
2035 && XEXP (x, 0) == virtual_stack_vars_rtx
2036 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2038 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2041 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2042 && (TARGET_POWERPC64 || mode != DImode)
2043 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2045 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2050 /* Try to output insns to set TARGET equal to the constant C if it can
2051 be done in less than N insns. Do all computations in MODE.
2052 Returns the place where the output has been placed if it can be
2053 done and the insns have been emitted. If it would take more than N
2054 insns, zero is returned and no insns and emitted. */
2057 rs6000_emit_set_const (dest, mode, source, n)
2059 enum machine_mode mode;
2060 int n ATTRIBUTE_UNUSED;
2062 HOST_WIDE_INT c0, c1;
2064 if (mode == QImode || mode == HImode || mode == SImode)
2067 dest = gen_reg_rtx (mode);
2068 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2072 if (GET_CODE (source) == CONST_INT)
2074 c0 = INTVAL (source);
2077 else if (GET_CODE (source) == CONST_DOUBLE)
2079 #if HOST_BITS_PER_WIDE_INT >= 64
2080 c0 = CONST_DOUBLE_LOW (source);
2083 c0 = CONST_DOUBLE_LOW (source);
2084 c1 = CONST_DOUBLE_HIGH (source);
2090 return rs6000_emit_set_long_const (dest, c0, c1);
2093 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2094 fall back to a straight forward decomposition. We do this to avoid
2095 exponential run times encountered when looking for longer sequences
2096 with rs6000_emit_set_const. */
2098 rs6000_emit_set_long_const (dest, c1, c2)
2100 HOST_WIDE_INT c1, c2;
2102 if (!TARGET_POWERPC64)
2104 rtx operand1, operand2;
2106 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2108 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2110 emit_move_insn (operand1, GEN_INT (c1));
2111 emit_move_insn (operand2, GEN_INT (c2));
2115 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2118 ud2 = (c1 & 0xffff0000) >> 16;
2119 #if HOST_BITS_PER_WIDE_INT >= 64
2123 ud4 = (c2 & 0xffff0000) >> 16;
2125 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2126 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2129 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2131 emit_move_insn (dest, GEN_INT (ud1));
2134 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2135 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2138 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2141 emit_move_insn (dest, GEN_INT (ud2 << 16));
2143 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2145 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2146 || (ud4 == 0 && ! (ud3 & 0x8000)))
2149 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2152 emit_move_insn (dest, GEN_INT (ud3 << 16));
2155 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2156 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2158 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2163 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2166 emit_move_insn (dest, GEN_INT (ud4 << 16));
2169 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2171 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2173 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2174 GEN_INT (ud2 << 16)));
2176 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2182 /* Emit a move from SOURCE to DEST in mode MODE. */
2184 rs6000_emit_move (dest, source, mode)
2187 enum machine_mode mode;
2191 operands[1] = source;
2193 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2194 if (GET_CODE (operands[1]) == CONST_DOUBLE
2195 && ! FLOAT_MODE_P (mode)
2196 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2198 /* FIXME. This should never happen. */
2199 /* Since it seems that it does, do the safe thing and convert
2201 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2203 if (GET_CODE (operands[1]) == CONST_DOUBLE
2204 && ! FLOAT_MODE_P (mode)
2205 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2206 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2207 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2208 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2211 /* Check if GCC is setting up a block move that will end up using FP
2212 registers as temporaries. We must make sure this is acceptable. */
2213 if (GET_CODE (operands[0]) == MEM
2214 && GET_CODE (operands[1]) == MEM
2216 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2217 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2218 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2219 ? 32 : MEM_ALIGN (operands[0])))
2220 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2222 : MEM_ALIGN (operands[1]))))
2223 && ! MEM_VOLATILE_P (operands [0])
2224 && ! MEM_VOLATILE_P (operands [1]))
2226 emit_move_insn (adjust_address (operands[0], SImode, 0),
2227 adjust_address (operands[1], SImode, 0));
2228 emit_move_insn (adjust_address (operands[0], SImode, 4),
2229 adjust_address (operands[1], SImode, 4));
2233 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2234 operands[1] = force_reg (mode, operands[1]);
2236 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2237 && GET_CODE (operands[0]) == MEM)
2241 if (reload_in_progress || reload_completed)
2242 regnum = true_regnum (operands[1]);
2243 else if (GET_CODE (operands[1]) == REG)
2244 regnum = REGNO (operands[1]);
2248 /* If operands[1] is a register, on POWER it may have
2249 double-precision data in it, so truncate it to single
2251 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2254 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2255 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2256 operands[1] = newreg;
2260 /* Handle the case where reload calls us with an invalid address;
2261 and the case of CONSTANT_P_RTX. */
2262 if (!VECTOR_MODE_P (mode)
2263 && (! general_operand (operands[1], mode)
2264 || ! nonimmediate_operand (operands[0], mode)
2265 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2267 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2271 /* FIXME: In the long term, this switch statement should go away
2272 and be replaced by a sequence of tests based on things like
2278 if (CONSTANT_P (operands[1])
2279 && GET_CODE (operands[1]) != CONST_INT)
2280 operands[1] = force_const_mem (mode, operands[1]);
2286 if (CONSTANT_P (operands[1])
2287 && ! easy_fp_constant (operands[1], mode))
2288 operands[1] = force_const_mem (mode, operands[1]);
2295 if (CONSTANT_P (operands[1])
2296 && !easy_vector_constant (operands[1]))
2297 operands[1] = force_const_mem (mode, operands[1]);
2302 /* Use default pattern for address of ELF small data */
2305 && DEFAULT_ABI == ABI_V4
2306 && (GET_CODE (operands[1]) == SYMBOL_REF
2307 || GET_CODE (operands[1]) == CONST)
2308 && small_data_operand (operands[1], mode))
2310 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2314 if (DEFAULT_ABI == ABI_V4
2315 && mode == Pmode && mode == SImode
2316 && flag_pic == 1 && got_operand (operands[1], mode))
2318 emit_insn (gen_movsi_got (operands[0], operands[1]));
2322 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2323 && TARGET_NO_TOC && ! flag_pic
2325 && CONSTANT_P (operands[1])
2326 && GET_CODE (operands[1]) != HIGH
2327 && GET_CODE (operands[1]) != CONST_INT)
2329 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2331 /* If this is a function address on -mcall-aixdesc,
2332 convert it to the address of the descriptor. */
2333 if (DEFAULT_ABI == ABI_AIX
2334 && GET_CODE (operands[1]) == SYMBOL_REF
2335 && XSTR (operands[1], 0)[0] == '.')
2337 const char *name = XSTR (operands[1], 0);
2339 while (*name == '.')
2341 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2342 CONSTANT_POOL_ADDRESS_P (new_ref)
2343 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2344 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2345 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2346 operands[1] = new_ref;
2349 if (DEFAULT_ABI == ABI_DARWIN)
2351 emit_insn (gen_macho_high (target, operands[1]));
2352 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2356 emit_insn (gen_elf_high (target, operands[1]));
2357 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2361 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2362 and we have put it in the TOC, we just need to make a TOC-relative
2365 && GET_CODE (operands[1]) == SYMBOL_REF
2366 && CONSTANT_POOL_EXPR_P (operands[1])
2367 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2368 get_pool_mode (operands[1])))
2370 operands[1] = create_TOC_reference (operands[1]);
2372 else if (mode == Pmode
2373 && CONSTANT_P (operands[1])
2374 && ((GET_CODE (operands[1]) != CONST_INT
2375 && ! easy_fp_constant (operands[1], mode))
2376 || (GET_CODE (operands[1]) == CONST_INT
2377 && num_insns_constant (operands[1], mode) > 2)
2378 || (GET_CODE (operands[0]) == REG
2379 && FP_REGNO_P (REGNO (operands[0]))))
2380 && GET_CODE (operands[1]) != HIGH
2381 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2382 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2384 /* Emit a USE operation so that the constant isn't deleted if
2385 expensive optimizations are turned on because nobody
2386 references it. This should only be done for operands that
2387 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2388 This should not be done for operands that contain LABEL_REFs.
2389 For now, we just handle the obvious case. */
2390 if (GET_CODE (operands[1]) != LABEL_REF)
2391 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2394 /* Darwin uses a special PIC legitimizer. */
2395 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2398 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2400 if (operands[0] != operands[1])
2401 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2406 /* If we are to limit the number of things we put in the TOC and
2407 this is a symbol plus a constant we can add in one insn,
2408 just put the symbol in the TOC and add the constant. Don't do
2409 this if reload is in progress. */
2410 if (GET_CODE (operands[1]) == CONST
2411 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2412 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2413 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2414 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2415 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2416 && ! side_effects_p (operands[0]))
2419 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2420 rtx other = XEXP (XEXP (operands[1], 0), 1);
2422 sym = force_reg (mode, sym);
2424 emit_insn (gen_addsi3 (operands[0], sym, other));
2426 emit_insn (gen_adddi3 (operands[0], sym, other));
2430 operands[1] = force_const_mem (mode, operands[1]);
2433 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2434 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2435 get_pool_constant (XEXP (operands[1], 0)),
2436 get_pool_mode (XEXP (operands[1], 0))))
2439 = gen_rtx_MEM (mode,
2440 create_TOC_reference (XEXP (operands[1], 0)));
2441 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2442 RTX_UNCHANGING_P (operands[1]) = 1;
2448 if (GET_CODE (operands[0]) == MEM
2449 && GET_CODE (XEXP (operands[0], 0)) != REG
2450 && ! reload_in_progress)
2452 = replace_equiv_address (operands[0],
2453 copy_addr_to_reg (XEXP (operands[0], 0)));
2455 if (GET_CODE (operands[1]) == MEM
2456 && GET_CODE (XEXP (operands[1], 0)) != REG
2457 && ! reload_in_progress)
2459 = replace_equiv_address (operands[1],
2460 copy_addr_to_reg (XEXP (operands[1], 0)));
2467 /* Above, we may have called force_const_mem which may have returned
2468 an invalid address. If we can, fix this up; otherwise, reload will
2469 have to deal with it. */
2470 if (GET_CODE (operands[1]) == MEM
2471 && ! memory_address_p (mode, XEXP (operands[1], 0))
2472 && ! reload_in_progress)
2473 operands[1] = adjust_address (operands[1], mode, 0);
2475 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2479 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2480 for a call to a function whose data type is FNTYPE.
2481 For a library call, FNTYPE is 0.
2483 For incoming args we set the number of arguments in the prototype large
2484 so we never return a PARALLEL. */
2487 init_cumulative_args (cum, fntype, libname, incoming)
2488 CUMULATIVE_ARGS *cum;
2490 rtx libname ATTRIBUTE_UNUSED;
2493 static CUMULATIVE_ARGS zero_cumulative;
2495 *cum = zero_cumulative;
2497 cum->fregno = FP_ARG_MIN_REG;
2498 cum->vregno = ALTIVEC_ARG_MIN_REG;
2499 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2500 cum->call_cookie = CALL_NORMAL;
2501 cum->sysv_gregno = GP_ARG_MIN_REG;
2504 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2506 else if (cum->prototype)
2507 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2508 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2509 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2512 cum->nargs_prototype = 0;
2514 cum->orig_nargs = cum->nargs_prototype;
2516 /* Check for longcall's */
2517 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2518 cum->call_cookie = CALL_LONG;
2520 if (TARGET_DEBUG_ARG)
2522 fprintf (stderr, "\ninit_cumulative_args:");
2525 tree ret_type = TREE_TYPE (fntype);
2526 fprintf (stderr, " ret code = %s,",
2527 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2530 if (cum->call_cookie & CALL_LONG)
2531 fprintf (stderr, " longcall,");
2533 fprintf (stderr, " proto = %d, nargs = %d\n",
2534 cum->prototype, cum->nargs_prototype);
2538 /* If defined, a C expression which determines whether, and in which
2539 direction, to pad out an argument with extra space. The value
2540 should be of type `enum direction': either `upward' to pad above
2541 the argument, `downward' to pad below, or `none' to inhibit
2544 For the AIX ABI structs are always stored left shifted in their
2548 function_arg_padding (mode, type)
2549 enum machine_mode mode;
2552 if (type != 0 && AGGREGATE_TYPE_P (type))
2555 /* This is the default definition. */
2556 return (! BYTES_BIG_ENDIAN
2559 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2560 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2561 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2562 ? downward : upward));
2565 /* If defined, a C expression that gives the alignment boundary, in bits,
2566 of an argument with the specified mode and type. If it is not defined,
2567 PARM_BOUNDARY is used for all arguments.
2569 V.4 wants long longs to be double word aligned. */
2572 function_arg_boundary (mode, type)
2573 enum machine_mode mode;
2574 tree type ATTRIBUTE_UNUSED;
2576 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2578 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2581 return PARM_BOUNDARY;
2584 /* Update the data in CUM to advance over an argument
2585 of mode MODE and data type TYPE.
2586 (TYPE is null for libcalls where that information may not be available.) */
2589 function_arg_advance (cum, mode, type, named)
2590 CUMULATIVE_ARGS *cum;
2591 enum machine_mode mode;
2595 cum->nargs_prototype--;
2597 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2599 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2602 cum->words += RS6000_ARG_SIZE (mode, type);
2604 else if (DEFAULT_ABI == ABI_V4)
2606 if (TARGET_HARD_FLOAT
2607 && (mode == SFmode || mode == DFmode))
2609 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2614 cum->words += cum->words & 1;
2615 cum->words += RS6000_ARG_SIZE (mode, type);
2621 int gregno = cum->sysv_gregno;
2623 /* Aggregates and IEEE quad get passed by reference. */
2624 if ((type && AGGREGATE_TYPE_P (type))
2628 n_words = RS6000_ARG_SIZE (mode, type);
2630 /* Long long is put in odd registers. */
2631 if (n_words == 2 && (gregno & 1) == 0)
2634 /* Long long is not split between registers and stack. */
2635 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2637 /* Long long is aligned on the stack. */
2639 cum->words += cum->words & 1;
2640 cum->words += n_words;
2643 /* Note: continuing to accumulate gregno past when we've started
2644 spilling to the stack indicates the fact that we've started
2645 spilling to the stack to expand_builtin_saveregs. */
2646 cum->sysv_gregno = gregno + n_words;
2649 if (TARGET_DEBUG_ARG)
2651 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2652 cum->words, cum->fregno);
2653 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2654 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2655 fprintf (stderr, "mode = %4s, named = %d\n",
2656 GET_MODE_NAME (mode), named);
2661 int align = (TARGET_32BIT && (cum->words & 1) != 0
2662 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2664 cum->words += align + RS6000_ARG_SIZE (mode, type);
2666 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2669 if (TARGET_DEBUG_ARG)
2671 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2672 cum->words, cum->fregno);
2673 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2674 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2675 fprintf (stderr, "named = %d, align = %d\n", named, align);
2680 /* Determine where to put an argument to a function.
2681 Value is zero to push the argument on the stack,
2682 or a hard register in which to store the argument.
2684 MODE is the argument's machine mode.
2685 TYPE is the data type of the argument (as a tree).
2686 This is null for libcalls where that information may
2688 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2689 the preceding args and about the function being called.
2690 NAMED is nonzero if this argument is a named parameter
2691 (otherwise it is an extra parameter matching an ellipsis).
2693 On RS/6000 the first eight words of non-FP are normally in registers
2694 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2695 Under V.4, the first 8 FP args are in registers.
2697 If this is floating-point and no prototype is specified, we use
2698 both an FP and integer register (or possibly FP reg and stack). Library
2699 functions (when TYPE is zero) always have the proper types for args,
2700 so we can pass the FP value just in one register. emit_library_function
2701 doesn't support PARALLEL anyway. */
2704 function_arg (cum, mode, type, named)
2705 CUMULATIVE_ARGS *cum;
2706 enum machine_mode mode;
2710 enum rs6000_abi abi = DEFAULT_ABI;
2712 /* Return a marker to indicate whether CR1 needs to set or clear the
2713 bit that V.4 uses to say fp args were passed in registers.
2714 Assume that we don't need the marker for software floating point,
2715 or compiler generated library calls. */
2716 if (mode == VOIDmode)
2719 && TARGET_HARD_FLOAT
2720 && cum->nargs_prototype < 0
2721 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2723 return GEN_INT (cum->call_cookie
2724 | ((cum->fregno == FP_ARG_MIN_REG)
2725 ? CALL_V4_SET_FP_ARGS
2726 : CALL_V4_CLEAR_FP_ARGS));
2729 return GEN_INT (cum->call_cookie);
2732 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2734 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2735 return gen_rtx_REG (mode, cum->vregno);
2739 else if (abi == ABI_V4)
2741 if (TARGET_HARD_FLOAT
2742 && (mode == SFmode || mode == DFmode))
2744 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2745 return gen_rtx_REG (mode, cum->fregno);
2752 int gregno = cum->sysv_gregno;
2754 /* Aggregates and IEEE quad get passed by reference. */
2755 if ((type && AGGREGATE_TYPE_P (type))
2759 n_words = RS6000_ARG_SIZE (mode, type);
2761 /* Long long is put in odd registers. */
2762 if (n_words == 2 && (gregno & 1) == 0)
2765 /* Long long is not split between registers and stack. */
2766 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2767 return gen_rtx_REG (mode, gregno);
2774 int align = (TARGET_32BIT && (cum->words & 1) != 0
2775 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2776 int align_words = cum->words + align;
2778 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2781 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2784 || ((cum->nargs_prototype > 0)
2785 /* IBM AIX extended its linkage convention definition always
2786 to require FP args after register save area hole on the
2788 && (DEFAULT_ABI != ABI_AIX
2790 || (align_words < GP_ARG_NUM_REG))))
2791 return gen_rtx_REG (mode, cum->fregno);
2793 return gen_rtx_PARALLEL (mode,
2795 gen_rtx_EXPR_LIST (VOIDmode,
2796 ((align_words >= GP_ARG_NUM_REG)
2799 + RS6000_ARG_SIZE (mode, type)
2801 /* If this is partially on the stack, then
2802 we only include the portion actually
2803 in registers here. */
2804 ? gen_rtx_REG (SImode,
2805 GP_ARG_MIN_REG + align_words)
2806 : gen_rtx_REG (mode,
2807 GP_ARG_MIN_REG + align_words))),
2809 gen_rtx_EXPR_LIST (VOIDmode,
2810 gen_rtx_REG (mode, cum->fregno),
2813 else if (align_words < GP_ARG_NUM_REG)
2814 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2820 /* For an arg passed partly in registers and partly in memory,
2821 this is the number of registers used.
2822 For args passed entirely in registers or entirely in memory, zero. */
2825 function_arg_partial_nregs (cum, mode, type, named)
2826 CUMULATIVE_ARGS *cum;
2827 enum machine_mode mode;
2829 int named ATTRIBUTE_UNUSED;
2831 if (DEFAULT_ABI == ABI_V4)
2834 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2835 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2837 if (cum->nargs_prototype >= 0)
2841 if (cum->words < GP_ARG_NUM_REG
2842 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2844 int ret = GP_ARG_NUM_REG - cum->words;
2845 if (ret && TARGET_DEBUG_ARG)
2846 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2854 /* A C expression that indicates when an argument must be passed by
2855 reference. If nonzero for an argument, a copy of that argument is
2856 made in memory and a pointer to the argument is passed instead of
2857 the argument itself. The pointer is passed in whatever way is
2858 appropriate for passing a pointer to that type.
2860 Under V.4, structures and unions are passed by reference. */
2863 function_arg_pass_by_reference (cum, mode, type, named)
2864 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2865 enum machine_mode mode ATTRIBUTE_UNUSED;
2867 int named ATTRIBUTE_UNUSED;
2869 if (DEFAULT_ABI == ABI_V4
2870 && ((type && AGGREGATE_TYPE_P (type))
2873 if (TARGET_DEBUG_ARG)
2874 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2882 /* Perform any needed actions needed for a function that is receiving a
2883 variable number of arguments.
2887 MODE and TYPE are the mode and type of the current parameter.
2889 PRETEND_SIZE is a variable that should be set to the amount of stack
2890 that must be pushed by the prolog to pretend that our caller pushed
2893 Normally, this macro will push all remaining incoming registers on the
2894 stack and set PRETEND_SIZE to the length of the registers pushed. */
2897 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2898 CUMULATIVE_ARGS *cum;
2899 enum machine_mode mode;
2905 CUMULATIVE_ARGS next_cum;
2906 int reg_size = TARGET_32BIT ? 4 : 8;
2907 rtx save_area = NULL_RTX, mem;
2908 int first_reg_offset, set;
2912 fntype = TREE_TYPE (current_function_decl);
2913 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2914 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2915 != void_type_node));
2917 /* For varargs, we do not want to skip the dummy va_dcl argument.
2918 For stdargs, we do want to skip the last named argument. */
2921 function_arg_advance (&next_cum, mode, type, 1);
2923 if (DEFAULT_ABI == ABI_V4)
2925 /* Indicate to allocate space on the stack for varargs save area. */
2926 cfun->machine->sysv_varargs_p = 1;
2928 save_area = plus_constant (virtual_stack_vars_rtx,
2929 - RS6000_VARARGS_SIZE);
2931 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2935 first_reg_offset = next_cum.words;
2936 save_area = virtual_incoming_args_rtx;
2937 cfun->machine->sysv_varargs_p = 0;
2939 if (MUST_PASS_IN_STACK (mode, type))
2940 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2943 set = get_varargs_alias_set ();
2944 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2946 mem = gen_rtx_MEM (BLKmode,
2947 plus_constant (save_area,
2948 first_reg_offset * reg_size)),
2949 set_mem_alias_set (mem, set);
2950 set_mem_align (mem, BITS_PER_WORD);
2953 (GP_ARG_MIN_REG + first_reg_offset, mem,
2954 GP_ARG_NUM_REG - first_reg_offset,
2955 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2957 /* ??? Does ABI_V4 need this at all? */
2958 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2961 /* Save FP registers if needed. */
2962 if (DEFAULT_ABI == ABI_V4
2963 && TARGET_HARD_FLOAT && ! no_rtl
2964 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2966 int fregno = next_cum.fregno;
2967 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2968 rtx lab = gen_label_rtx ();
2969 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2971 emit_jump_insn (gen_rtx_SET (VOIDmode,
2973 gen_rtx_IF_THEN_ELSE (VOIDmode,
2974 gen_rtx_NE (VOIDmode, cr1,
2976 gen_rtx_LABEL_REF (VOIDmode, lab),
2979 while (fregno <= FP_ARG_V4_MAX_REG)
2981 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2982 set_mem_alias_set (mem, set);
2983 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2992 /* Create the va_list data type. */
2995 rs6000_build_va_list ()
2997 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2999 /* For AIX, prefer 'char *' because that's what the system
3000 header files like. */
3001 if (DEFAULT_ABI != ABI_V4)
3002 return build_pointer_type (char_type_node);
3004 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3005 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3007 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3008 unsigned_char_type_node);
3009 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3010 unsigned_char_type_node);
3011 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3013 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3016 DECL_FIELD_CONTEXT (f_gpr) = record;
3017 DECL_FIELD_CONTEXT (f_fpr) = record;
3018 DECL_FIELD_CONTEXT (f_ovf) = record;
3019 DECL_FIELD_CONTEXT (f_sav) = record;
3021 TREE_CHAIN (record) = type_decl;
3022 TYPE_NAME (record) = type_decl;
3023 TYPE_FIELDS (record) = f_gpr;
3024 TREE_CHAIN (f_gpr) = f_fpr;
3025 TREE_CHAIN (f_fpr) = f_ovf;
3026 TREE_CHAIN (f_ovf) = f_sav;
3028 layout_type (record);
3030 /* The correct type is an array type of one element. */
3031 return build_array_type (record, build_index_type (size_zero_node));
3034 /* Implement va_start. */
3037 rs6000_va_start (stdarg_p, valist, nextarg)
3042 HOST_WIDE_INT words, n_gpr, n_fpr;
3043 tree f_gpr, f_fpr, f_ovf, f_sav;
3044 tree gpr, fpr, ovf, sav, t;
3046 /* Only SVR4 needs something special. */
3047 if (DEFAULT_ABI != ABI_V4)
3049 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3053 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3054 f_fpr = TREE_CHAIN (f_gpr);
3055 f_ovf = TREE_CHAIN (f_fpr);
3056 f_sav = TREE_CHAIN (f_ovf);
3058 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3059 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3060 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3061 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3062 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3064 /* Count number of gp and fp argument registers used. */
3065 words = current_function_args_info.words;
3066 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3067 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3069 if (TARGET_DEBUG_ARG)
3071 fputs ("va_start: words = ", stderr);
3072 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3073 fputs (", n_gpr = ", stderr);
3074 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3075 fputs (", n_fpr = ", stderr);
3076 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3077 putc ('\n', stderr);
3080 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3081 TREE_SIDE_EFFECTS (t) = 1;
3082 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3084 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3085 TREE_SIDE_EFFECTS (t) = 1;
3086 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3088 /* Find the overflow area. */
3089 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3091 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3092 build_int_2 (words * UNITS_PER_WORD, 0));
3093 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3094 TREE_SIDE_EFFECTS (t) = 1;
3095 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3097 /* Find the register save area. */
3098 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3099 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3100 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3101 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3102 TREE_SIDE_EFFECTS (t) = 1;
3103 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3106 /* Implement va_arg. */
3109 rs6000_va_arg (valist, type)
3112 tree f_gpr, f_fpr, f_ovf, f_sav;
3113 tree gpr, fpr, ovf, sav, reg, t, u;
3114 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3115 rtx lab_false, lab_over, addr_rtx, r;
3117 if (DEFAULT_ABI != ABI_V4)
3118 return std_expand_builtin_va_arg (valist, type);
3120 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3121 f_fpr = TREE_CHAIN (f_gpr);
3122 f_ovf = TREE_CHAIN (f_fpr);
3123 f_sav = TREE_CHAIN (f_ovf);
3125 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3126 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3127 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3128 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3129 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3131 size = int_size_in_bytes (type);
3132 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3134 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3136 /* Aggregates and long doubles are passed by reference. */
3142 size = UNITS_PER_WORD;
3145 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3147 /* FP args go in FP registers, if present. */
3156 /* Otherwise into GP registers. */
3164 /* Pull the value out of the saved registers ... */
3166 lab_false = gen_label_rtx ();
3167 lab_over = gen_label_rtx ();
3168 addr_rtx = gen_reg_rtx (Pmode);
3170 /* Vectors never go in registers. */
3171 if (TREE_CODE (type) != VECTOR_TYPE)
3173 TREE_THIS_VOLATILE (reg) = 1;
3174 emit_cmp_and_jump_insns
3175 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3176 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3179 /* Long long is aligned in the registers. */
3182 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3183 build_int_2 (n_reg - 1, 0));
3184 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3185 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3186 TREE_SIDE_EFFECTS (u) = 1;
3187 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3191 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3195 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3196 build_int_2 (n_reg, 0));
3197 TREE_SIDE_EFFECTS (u) = 1;
3199 u = build1 (CONVERT_EXPR, integer_type_node, u);
3200 TREE_SIDE_EFFECTS (u) = 1;
3202 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3203 TREE_SIDE_EFFECTS (u) = 1;
3205 t = build (PLUS_EXPR, ptr_type_node, t, u);
3206 TREE_SIDE_EFFECTS (t) = 1;
3208 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3210 emit_move_insn (addr_rtx, r);
3212 emit_jump_insn (gen_jump (lab_over));
3216 emit_label (lab_false);
3218 /* ... otherwise out of the overflow area. */
3220 /* Make sure we don't find reg 7 for the next int arg.
3222 All AltiVec vectors go in the overflow area. So in the AltiVec
3223 case we need to get the vectors from the overflow area, but
3224 remember where the GPRs and FPRs are. */
3225 if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3227 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3228 TREE_SIDE_EFFECTS (t) = 1;
3229 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3232 /* Care for on-stack alignment if needed. */
3239 /* Vectors are 16 byte aligned. */
3240 if (TREE_CODE (type) == VECTOR_TYPE)
3245 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3246 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3250 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3252 emit_move_insn (addr_rtx, r);
3254 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3255 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3256 TREE_SIDE_EFFECTS (t) = 1;
3257 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3259 emit_label (lab_over);
3263 r = gen_rtx_MEM (Pmode, addr_rtx);
3264 set_mem_alias_set (r, get_varargs_alias_set ());
3265 emit_move_insn (addr_rtx, r);
3273 #define def_builtin(MASK, NAME, TYPE, CODE) \
3275 if ((MASK) & target_flags) \
3276 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3279 struct builtin_description
3281 const unsigned int mask;
3282 const enum insn_code icode;
3283 const char *const name;
3284 const enum rs6000_builtins code;
3287 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3289 static const struct builtin_description bdesc_3arg[] =
3291 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3292 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3293 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3294 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3295 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3296 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3297 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3298 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3299 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3300 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3301 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3302 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3303 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3304 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3305 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3306 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3307 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3308 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3309 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3310 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3311 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3312 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3313 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3316 /* DST operations: void foo (void *, const int, const char). */
3318 static const struct builtin_description bdesc_dst[] =
3320 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3321 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3322 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3323 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3326 /* Simple binary operations: VECc = foo (VECa, VECb). */
3328 static const struct builtin_description bdesc_2arg[] =
3330 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3331 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3332 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3333 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3334 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3335 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3336 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3337 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3338 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3339 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3340 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3341 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3342 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3343 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3344 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3345 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3346 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3347 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3348 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3349 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3350 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3351 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3359 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3360 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3361 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3362 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3366 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3367 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3368 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3369 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3370 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3371 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3372 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3377 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3378 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3379 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3380 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3381 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3382 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3383 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3384 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3385 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3386 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3387 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3388 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3389 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3390 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3391 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3392 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3393 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3395 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3404 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3405 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3406 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3407 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3421 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3423 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3426 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3427 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3428 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3429 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3430 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3431 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3432 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3433 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3434 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3435 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3436 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3442 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3445 /* AltiVec predicates. */
3447 struct builtin_description_predicates
3449 const unsigned int mask;
3450 const enum insn_code icode;
3452 const char *const name;
3453 const enum rs6000_builtins code;
3456 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3458 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3459 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3461 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3463 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3464 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3465 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3466 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3467 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3468 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3469 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3470 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3473 /* ABS* opreations. */
3475 static const struct builtin_description bdesc_abs[] =
3477 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3478 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3479 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3480 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3481 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3482 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3483 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3486 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3489 static const struct builtin_description bdesc_1arg[] =
3491 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3492 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3493 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3494 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3495 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3496 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3497 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3498 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3499 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3500 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3501 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3502 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3503 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3504 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3505 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3506 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3507 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3511 altivec_expand_unop_builtin (icode, arglist, target)
3512 enum insn_code icode;
3517 tree arg0 = TREE_VALUE (arglist);
3518 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3519 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3520 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3522 /* If we got invalid arguments bail out before generating bad rtl. */
3523 if (arg0 == error_mark_node)
3527 || GET_MODE (target) != tmode
3528 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3529 target = gen_reg_rtx (tmode);
3531 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3532 op0 = copy_to_mode_reg (mode0, op0);
3534 pat = GEN_FCN (icode) (target, op0);
3543 altivec_expand_abs_builtin (icode, arglist, target)
3544 enum insn_code icode;
3548 rtx pat, scratch1, scratch2;
3549 tree arg0 = TREE_VALUE (arglist);
3550 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3551 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3552 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3554 /* If we have invalid arguments, bail out before generating bad rtl. */
3555 if (arg0 == error_mark_node)
3559 || GET_MODE (target) != tmode
3560 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3561 target = gen_reg_rtx (tmode);
3563 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3564 op0 = copy_to_mode_reg (mode0, op0);
3566 scratch1 = gen_reg_rtx (mode0);
3567 scratch2 = gen_reg_rtx (mode0);
3569 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3578 altivec_expand_binop_builtin (icode, arglist, target)
3579 enum insn_code icode;
3584 tree arg0 = TREE_VALUE (arglist);
3585 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3586 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3587 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3588 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3589 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3590 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3592 /* If we got invalid arguments bail out before generating bad rtl. */
3593 if (arg0 == error_mark_node || arg1 == error_mark_node)
3597 || GET_MODE (target) != tmode
3598 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3599 target = gen_reg_rtx (tmode);
3601 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3602 op0 = copy_to_mode_reg (mode0, op0);
3603 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3604 op1 = copy_to_mode_reg (mode1, op1);
3606 pat = GEN_FCN (icode) (target, op0, op1);
3615 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3616 enum insn_code icode;
3622 tree cr6_form = TREE_VALUE (arglist);
3623 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3624 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3625 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3626 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3627 enum machine_mode tmode = SImode;
3628 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3629 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3632 if (TREE_CODE (cr6_form) != INTEGER_CST)
3634 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3638 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3643 /* If we have invalid arguments, bail out before generating bad rtl. */
3644 if (arg0 == error_mark_node || arg1 == error_mark_node)
3648 || GET_MODE (target) != tmode
3649 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3650 target = gen_reg_rtx (tmode);
3652 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3653 op0 = copy_to_mode_reg (mode0, op0);
3654 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3655 op1 = copy_to_mode_reg (mode1, op1);
3657 scratch = gen_reg_rtx (mode0);
3659 pat = GEN_FCN (icode) (scratch, op0, op1,
3660 gen_rtx (SYMBOL_REF, Pmode, opcode));
3665 /* The vec_any* and vec_all* predicates use the same opcodes for two
3666 different operations, but the bits in CR6 will be different
3667 depending on what information we want. So we have to play tricks
3668 with CR6 to get the right bits out.
3670 If you think this is disgusting, look at the specs for the
3671 AltiVec predicates. */
3673 switch (cr6_form_int)
3676 emit_insn (gen_cr6_test_for_zero (target));
3679 emit_insn (gen_cr6_test_for_zero_reverse (target));
3682 emit_insn (gen_cr6_test_for_lt (target));
3685 emit_insn (gen_cr6_test_for_lt_reverse (target));
3688 error ("argument 1 of __builtin_altivec_predicate is out of range");
3696 altivec_expand_stv_builtin (icode, arglist)
3697 enum insn_code icode;
3700 tree arg0 = TREE_VALUE (arglist);
3701 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3702 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3703 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3704 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3705 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3707 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3708 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3709 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3711 /* Invalid arguments. Bail before doing anything stoopid! */
3712 if (arg0 == error_mark_node
3713 || arg1 == error_mark_node
3714 || arg2 == error_mark_node)
3717 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3718 op0 = copy_to_mode_reg (mode2, op0);
3719 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3720 op1 = copy_to_mode_reg (mode0, op1);
3721 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3722 op2 = copy_to_mode_reg (mode1, op2);
3724 pat = GEN_FCN (icode) (op1, op2, op0);
3731 altivec_expand_ternop_builtin (icode, arglist, target)
3732 enum insn_code icode;
3737 tree arg0 = TREE_VALUE (arglist);
3738 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3739 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3740 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3741 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3742 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3743 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3744 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3745 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3746 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3748 /* If we got invalid arguments bail out before generating bad rtl. */
3749 if (arg0 == error_mark_node
3750 || arg1 == error_mark_node
3751 || arg2 == error_mark_node)
3755 || GET_MODE (target) != tmode
3756 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3757 target = gen_reg_rtx (tmode);
3759 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3760 op0 = copy_to_mode_reg (mode0, op0);
3761 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3762 op1 = copy_to_mode_reg (mode1, op1);
3763 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3764 op2 = copy_to_mode_reg (mode2, op2);
3766 pat = GEN_FCN (icode) (target, op0, op1, op2);
3774 altivec_expand_builtin (exp, target)
3778 struct builtin_description *d;
3779 struct builtin_description_predicates *dp;
3781 enum insn_code icode;
3782 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3783 tree arglist = TREE_OPERAND (exp, 1);
3784 tree arg0, arg1, arg2;
3785 rtx op0, op1, op2, pat;
3786 enum machine_mode tmode, mode0, mode1, mode2;
3787 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3791 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3792 icode = CODE_FOR_altivec_lvx_16qi;
3793 arg0 = TREE_VALUE (arglist);
3794 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3795 tmode = insn_data[icode].operand[0].mode;
3796 mode0 = insn_data[icode].operand[1].mode;
3799 || GET_MODE (target) != tmode
3800 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3801 target = gen_reg_rtx (tmode);
3803 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3804 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3806 pat = GEN_FCN (icode) (target, op0);
3812 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3813 icode = CODE_FOR_altivec_lvx_8hi;
3814 arg0 = TREE_VALUE (arglist);
3815 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3816 tmode = insn_data[icode].operand[0].mode;
3817 mode0 = insn_data[icode].operand[1].mode;
3820 || GET_MODE (target) != tmode
3821 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3822 target = gen_reg_rtx (tmode);
3824 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3825 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3827 pat = GEN_FCN (icode) (target, op0);
3833 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3834 icode = CODE_FOR_altivec_lvx_4si;
3835 arg0 = TREE_VALUE (arglist);
3836 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3837 tmode = insn_data[icode].operand[0].mode;
3838 mode0 = insn_data[icode].operand[1].mode;
3841 || GET_MODE (target) != tmode
3842 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3843 target = gen_reg_rtx (tmode);
3845 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3846 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3848 pat = GEN_FCN (icode) (target, op0);
3854 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3855 icode = CODE_FOR_altivec_lvx_4sf;
3856 arg0 = TREE_VALUE (arglist);
3857 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3858 tmode = insn_data[icode].operand[0].mode;
3859 mode0 = insn_data[icode].operand[1].mode;
3862 || GET_MODE (target) != tmode
3863 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3864 target = gen_reg_rtx (tmode);
3866 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3867 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3869 pat = GEN_FCN (icode) (target, op0);
3875 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3876 icode = CODE_FOR_altivec_stvx_16qi;
3877 arg0 = TREE_VALUE (arglist);
3878 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3879 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3880 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3881 mode0 = insn_data[icode].operand[0].mode;
3882 mode1 = insn_data[icode].operand[1].mode;
3884 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3885 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3886 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3887 op1 = copy_to_mode_reg (mode1, op1);
3889 pat = GEN_FCN (icode) (op0, op1);
3894 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3895 icode = CODE_FOR_altivec_stvx_8hi;
3896 arg0 = TREE_VALUE (arglist);
3897 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3898 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3899 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3900 mode0 = insn_data[icode].operand[0].mode;
3901 mode1 = insn_data[icode].operand[1].mode;
3903 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3904 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3905 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3906 op1 = copy_to_mode_reg (mode1, op1);
3908 pat = GEN_FCN (icode) (op0, op1);
3913 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3914 icode = CODE_FOR_altivec_stvx_4si;
3915 arg0 = TREE_VALUE (arglist);
3916 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3917 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3918 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3919 mode0 = insn_data[icode].operand[0].mode;
3920 mode1 = insn_data[icode].operand[1].mode;
3922 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3923 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3924 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3925 op1 = copy_to_mode_reg (mode1, op1);
3927 pat = GEN_FCN (icode) (op0, op1);
3932 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3933 icode = CODE_FOR_altivec_stvx_4sf;
3934 arg0 = TREE_VALUE (arglist);
3935 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3936 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3937 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3938 mode0 = insn_data[icode].operand[0].mode;
3939 mode1 = insn_data[icode].operand[1].mode;
3941 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3942 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3943 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3944 op1 = copy_to_mode_reg (mode1, op1);
3946 pat = GEN_FCN (icode) (op0, op1);
3951 case ALTIVEC_BUILTIN_STVX:
3952 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3953 case ALTIVEC_BUILTIN_STVEBX:
3954 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3955 case ALTIVEC_BUILTIN_STVEHX:
3956 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3957 case ALTIVEC_BUILTIN_STVEWX:
3958 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3959 case ALTIVEC_BUILTIN_STVXL:
3960 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3962 case ALTIVEC_BUILTIN_MFVSCR:
3963 icode = CODE_FOR_altivec_mfvscr;
3964 tmode = insn_data[icode].operand[0].mode;
3967 || GET_MODE (target) != tmode
3968 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3969 target = gen_reg_rtx (tmode);
3971 pat = GEN_FCN (icode) (target);
3977 case ALTIVEC_BUILTIN_MTVSCR:
3978 icode = CODE_FOR_altivec_mtvscr;
3979 arg0 = TREE_VALUE (arglist);
3980 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3981 mode0 = insn_data[icode].operand[0].mode;
3983 /* If we got invalid arguments bail out before generating bad rtl. */
3984 if (arg0 == error_mark_node)
3987 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3988 op0 = copy_to_mode_reg (mode0, op0);
3990 pat = GEN_FCN (icode) (op0);
3995 case ALTIVEC_BUILTIN_DSSALL:
3996 emit_insn (gen_altivec_dssall ());
3999 case ALTIVEC_BUILTIN_DSS:
4000 icode = CODE_FOR_altivec_dss;
4001 arg0 = TREE_VALUE (arglist);
4002 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4003 mode0 = insn_data[icode].operand[0].mode;
4005 /* If we got invalid arguments bail out before generating bad rtl. */
4006 if (arg0 == error_mark_node)
4009 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4010 op0 = copy_to_mode_reg (mode0, op0);
4012 emit_insn (gen_altivec_dss (op0));
4016 /* Handle DST variants. */
4017 d = (struct builtin_description *) bdesc_dst;
4018 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4019 if (d->code == fcode)
4021 arg0 = TREE_VALUE (arglist);
4022 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4023 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4024 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4025 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4026 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4027 mode0 = insn_data[d->icode].operand[0].mode;
4028 mode1 = insn_data[d->icode].operand[1].mode;
4029 mode2 = insn_data[d->icode].operand[2].mode;
4031 /* Invalid arguments, bail out before generating bad rtl. */
4032 if (arg0 == error_mark_node
4033 || arg1 == error_mark_node
4034 || arg2 == error_mark_node)
4037 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4038 op0 = copy_to_mode_reg (mode0, op0);
4039 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4040 op1 = copy_to_mode_reg (mode1, op1);
4042 if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4044 error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4048 pat = GEN_FCN (d->icode) (op0, op1, op2);
4055 /* Expand abs* operations. */
4056 d = (struct builtin_description *) bdesc_abs;
4057 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4058 if (d->code == fcode)
4059 return altivec_expand_abs_builtin (d->icode, arglist, target);
4061 /* Handle simple unary operations. */
4062 d = (struct builtin_description *) bdesc_1arg;
4063 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4064 if (d->code == fcode)
4065 return altivec_expand_unop_builtin (d->icode, arglist, target);
4067 /* Handle simple binary operations. */
4068 d = (struct builtin_description *) bdesc_2arg;
4069 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4070 if (d->code == fcode)
4071 return altivec_expand_binop_builtin (d->icode, arglist, target);
4073 /* Expand the AltiVec predicates. */
4074 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4075 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4076 if (dp->code == fcode)
4077 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4079 /* LV* are funky. We initialized them differently. */
4082 case ALTIVEC_BUILTIN_LVSL:
4083 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4085 case ALTIVEC_BUILTIN_LVSR:
4086 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4088 case ALTIVEC_BUILTIN_LVEBX:
4089 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4091 case ALTIVEC_BUILTIN_LVEHX:
4092 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4094 case ALTIVEC_BUILTIN_LVEWX:
4095 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4097 case ALTIVEC_BUILTIN_LVXL:
4098 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4100 case ALTIVEC_BUILTIN_LVX:
4101 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4108 /* Handle simple ternary operations. */
4109 d = (struct builtin_description *) bdesc_3arg;
4110 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4111 if (d->code == fcode)
4112 return altivec_expand_ternop_builtin (d->icode, arglist, target);
4118 /* Expand an expression EXP that calls a built-in function,
4119 with result going to TARGET if that's convenient
4120 (and in mode MODE if that's convenient).
4121 SUBTARGET may be used as the target for computing one of EXP's operands.
4122 IGNORE is nonzero if the value is to be ignored. */
4125 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4128 rtx subtarget ATTRIBUTE_UNUSED;
4129 enum machine_mode mode ATTRIBUTE_UNUSED;
4130 int ignore ATTRIBUTE_UNUSED;
4133 return altivec_expand_builtin (exp, target);
4139 rs6000_init_builtins ()
4142 altivec_init_builtins ();
4146 altivec_init_builtins (void)
4148 struct builtin_description *d;
4149 struct builtin_description_predicates *dp;
4152 tree endlink = void_list_node;
4154 tree pint_type_node = build_pointer_type (integer_type_node);
4155 tree pvoid_type_node = build_pointer_type (void_type_node);
4156 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4157 tree pchar_type_node = build_pointer_type (char_type_node);
4158 tree pfloat_type_node = build_pointer_type (float_type_node);
4160 tree v4sf_ftype_v4sf_v4sf_v16qi
4161 = build_function_type (V4SF_type_node,
4162 tree_cons (NULL_TREE, V4SF_type_node,
4163 tree_cons (NULL_TREE, V4SF_type_node,
4164 tree_cons (NULL_TREE,
4167 tree v4si_ftype_v4si_v4si_v16qi
4168 = build_function_type (V4SI_type_node,
4169 tree_cons (NULL_TREE, V4SI_type_node,
4170 tree_cons (NULL_TREE, V4SI_type_node,
4171 tree_cons (NULL_TREE,
4174 tree v8hi_ftype_v8hi_v8hi_v16qi
4175 = build_function_type (V8HI_type_node,
4176 tree_cons (NULL_TREE, V8HI_type_node,
4177 tree_cons (NULL_TREE, V8HI_type_node,
4178 tree_cons (NULL_TREE,
4181 tree v16qi_ftype_v16qi_v16qi_v16qi
4182 = build_function_type (V16QI_type_node,
4183 tree_cons (NULL_TREE, V16QI_type_node,
4184 tree_cons (NULL_TREE, V16QI_type_node,
4185 tree_cons (NULL_TREE,
4189 /* V4SI foo (char). */
4190 tree v4si_ftype_char
4191 = build_function_type (V4SI_type_node,
4192 tree_cons (NULL_TREE, char_type_node, endlink));
4194 /* V8HI foo (char). */
4195 tree v8hi_ftype_char
4196 = build_function_type (V8HI_type_node,
4197 tree_cons (NULL_TREE, char_type_node, endlink));
4199 /* V16QI foo (char). */
4200 tree v16qi_ftype_char
4201 = build_function_type (V16QI_type_node,
4202 tree_cons (NULL_TREE, char_type_node, endlink));
4203 /* V4SF foo (V4SF). */
4204 tree v4sf_ftype_v4sf
4205 = build_function_type (V4SF_type_node,
4206 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4208 /* V4SI foo (int *). */
4209 tree v4si_ftype_pint
4210 = build_function_type (V4SI_type_node,
4211 tree_cons (NULL_TREE, pint_type_node, endlink));
4212 /* V8HI foo (short *). */
4213 tree v8hi_ftype_pshort
4214 = build_function_type (V8HI_type_node,
4215 tree_cons (NULL_TREE, pshort_type_node, endlink));
4216 /* V16QI foo (char *). */
4217 tree v16qi_ftype_pchar
4218 = build_function_type (V16QI_type_node,
4219 tree_cons (NULL_TREE, pchar_type_node, endlink));
4220 /* V4SF foo (float *). */
4221 tree v4sf_ftype_pfloat
4222 = build_function_type (V4SF_type_node,
4223 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4225 /* V8HI foo (V16QI). */
4226 tree v8hi_ftype_v16qi
4227 = build_function_type (V8HI_type_node,
4228 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4230 /* void foo (void *, int, char/literal). */
4231 tree void_ftype_pvoid_int_char
4232 = build_function_type (void_type_node,
4233 tree_cons (NULL_TREE, pvoid_type_node,
4234 tree_cons (NULL_TREE, integer_type_node,
4235 tree_cons (NULL_TREE,
4239 /* void foo (int *, V4SI). */
4240 tree void_ftype_pint_v4si
4241 = build_function_type (void_type_node,
4242 tree_cons (NULL_TREE, pint_type_node,
4243 tree_cons (NULL_TREE, V4SI_type_node,
4245 /* void foo (short *, V8HI). */
4246 tree void_ftype_pshort_v8hi
4247 = build_function_type (void_type_node,
4248 tree_cons (NULL_TREE, pshort_type_node,
4249 tree_cons (NULL_TREE, V8HI_type_node,
4251 /* void foo (char *, V16QI). */
4252 tree void_ftype_pchar_v16qi
4253 = build_function_type (void_type_node,
4254 tree_cons (NULL_TREE, pchar_type_node,
4255 tree_cons (NULL_TREE, V16QI_type_node,
4257 /* void foo (float *, V4SF). */
4258 tree void_ftype_pfloat_v4sf
4259 = build_function_type (void_type_node,
4260 tree_cons (NULL_TREE, pfloat_type_node,
4261 tree_cons (NULL_TREE, V4SF_type_node,
4264 /* void foo (V4SI). */
4265 tree void_ftype_v4si
4266 = build_function_type (void_type_node,
4267 tree_cons (NULL_TREE, V4SI_type_node,
4270 /* void foo (vint, int, void *). */
4271 tree void_ftype_v4si_int_pvoid
4272 = build_function_type (void_type_node,
4273 tree_cons (NULL_TREE, V4SI_type_node,
4274 tree_cons (NULL_TREE, integer_type_node,
4275 tree_cons (NULL_TREE,
4279 /* void foo (vchar, int, void *). */
4280 tree void_ftype_v16qi_int_pvoid
4281 = build_function_type (void_type_node,
4282 tree_cons (NULL_TREE, V16QI_type_node,
4283 tree_cons (NULL_TREE, integer_type_node,
4284 tree_cons (NULL_TREE,
4288 /* void foo (vshort, int, void *). */
4289 tree void_ftype_v8hi_int_pvoid
4290 = build_function_type (void_type_node,
4291 tree_cons (NULL_TREE, V8HI_type_node,
4292 tree_cons (NULL_TREE, integer_type_node,
4293 tree_cons (NULL_TREE,
4297 /* void foo (char). */
4299 = build_function_type (void_type_node,
4300 tree_cons (NULL_TREE, char_type_node,
4303 /* void foo (void). */
4304 tree void_ftype_void
4305 = build_function_type (void_type_node, void_list_node);
4307 /* vshort foo (void). */
4308 tree v8hi_ftype_void
4309 = build_function_type (V8HI_type_node, void_list_node);
4311 tree v4si_ftype_v4si_v4si
4312 = build_function_type (V4SI_type_node,
4313 tree_cons (NULL_TREE, V4SI_type_node,
4314 tree_cons (NULL_TREE, V4SI_type_node,
4317 /* These are for the unsigned 5 bit literals. */
4319 tree v4sf_ftype_v4si_char
4320 = build_function_type (V4SF_type_node,
4321 tree_cons (NULL_TREE, V4SI_type_node,
4322 tree_cons (NULL_TREE, char_type_node,
4324 tree v4si_ftype_v4sf_char
4325 = build_function_type (V4SI_type_node,
4326 tree_cons (NULL_TREE, V4SF_type_node,
4327 tree_cons (NULL_TREE, char_type_node,
4329 tree v4si_ftype_v4si_char
4330 = build_function_type (V4SI_type_node,
4331 tree_cons (NULL_TREE, V4SI_type_node,
4332 tree_cons (NULL_TREE, char_type_node,
4334 tree v8hi_ftype_v8hi_char
4335 = build_function_type (V8HI_type_node,
4336 tree_cons (NULL_TREE, V8HI_type_node,
4337 tree_cons (NULL_TREE, char_type_node,
4339 tree v16qi_ftype_v16qi_char
4340 = build_function_type (V16QI_type_node,
4341 tree_cons (NULL_TREE, V16QI_type_node,
4342 tree_cons (NULL_TREE, char_type_node,
4345 /* These are for the unsigned 4 bit literals. */
4347 tree v16qi_ftype_v16qi_v16qi_char
4348 = build_function_type (V16QI_type_node,
4349 tree_cons (NULL_TREE, V16QI_type_node,
4350 tree_cons (NULL_TREE, V16QI_type_node,
4351 tree_cons (NULL_TREE,
4355 tree v8hi_ftype_v8hi_v8hi_char
4356 = build_function_type (V8HI_type_node,
4357 tree_cons (NULL_TREE, V8HI_type_node,
4358 tree_cons (NULL_TREE, V8HI_type_node,
4359 tree_cons (NULL_TREE,
4363 tree v4si_ftype_v4si_v4si_char
4364 = build_function_type (V4SI_type_node,
4365 tree_cons (NULL_TREE, V4SI_type_node,
4366 tree_cons (NULL_TREE, V4SI_type_node,
4367 tree_cons (NULL_TREE,
4371 tree v4sf_ftype_v4sf_v4sf_char
4372 = build_function_type (V4SF_type_node,
4373 tree_cons (NULL_TREE, V4SF_type_node,
4374 tree_cons (NULL_TREE, V4SF_type_node,
4375 tree_cons (NULL_TREE,
4379 /* End of 4 bit literals. */
4381 tree v4sf_ftype_v4sf_v4sf
4382 = build_function_type (V4SF_type_node,
4383 tree_cons (NULL_TREE, V4SF_type_node,
4384 tree_cons (NULL_TREE, V4SF_type_node,
4386 tree v4sf_ftype_v4sf_v4sf_v4si
4387 = build_function_type (V4SF_type_node,
4388 tree_cons (NULL_TREE, V4SF_type_node,
4389 tree_cons (NULL_TREE, V4SF_type_node,
4390 tree_cons (NULL_TREE,
4393 tree v4sf_ftype_v4sf_v4sf_v4sf
4394 = build_function_type (V4SF_type_node,
4395 tree_cons (NULL_TREE, V4SF_type_node,
4396 tree_cons (NULL_TREE, V4SF_type_node,
4397 tree_cons (NULL_TREE,
4400 tree v4si_ftype_v4si_v4si_v4si
4401 = build_function_type (V4SI_type_node,
4402 tree_cons (NULL_TREE, V4SI_type_node,
4403 tree_cons (NULL_TREE, V4SI_type_node,
4404 tree_cons (NULL_TREE,
4408 tree v8hi_ftype_v8hi_v8hi
4409 = build_function_type (V8HI_type_node,
4410 tree_cons (NULL_TREE, V8HI_type_node,
4411 tree_cons (NULL_TREE, V8HI_type_node,
4413 tree v8hi_ftype_v8hi_v8hi_v8hi
4414 = build_function_type (V8HI_type_node,
4415 tree_cons (NULL_TREE, V8HI_type_node,
4416 tree_cons (NULL_TREE, V8HI_type_node,
4417 tree_cons (NULL_TREE,
4420 tree v4si_ftype_v8hi_v8hi_v4si
4421 = build_function_type (V4SI_type_node,
4422 tree_cons (NULL_TREE, V8HI_type_node,
4423 tree_cons (NULL_TREE, V8HI_type_node,
4424 tree_cons (NULL_TREE,
4427 tree v4si_ftype_v16qi_v16qi_v4si
4428 = build_function_type (V4SI_type_node,
4429 tree_cons (NULL_TREE, V16QI_type_node,
4430 tree_cons (NULL_TREE, V16QI_type_node,
4431 tree_cons (NULL_TREE,
4435 tree v16qi_ftype_v16qi_v16qi
4436 = build_function_type (V16QI_type_node,
4437 tree_cons (NULL_TREE, V16QI_type_node,
4438 tree_cons (NULL_TREE, V16QI_type_node,
4441 tree v4si_ftype_v4sf_v4sf
4442 = build_function_type (V4SI_type_node,
4443 tree_cons (NULL_TREE, V4SF_type_node,
4444 tree_cons (NULL_TREE, V4SF_type_node,
4447 tree v4si_ftype_v4si
4448 = build_function_type (V4SI_type_node,
4449 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4451 tree v8hi_ftype_v8hi
4452 = build_function_type (V8HI_type_node,
4453 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4455 tree v16qi_ftype_v16qi
4456 = build_function_type (V16QI_type_node,
4457 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4459 tree v8hi_ftype_v16qi_v16qi
4460 = build_function_type (V8HI_type_node,
4461 tree_cons (NULL_TREE, V16QI_type_node,
4462 tree_cons (NULL_TREE, V16QI_type_node,
4465 tree v4si_ftype_v8hi_v8hi
4466 = build_function_type (V4SI_type_node,
4467 tree_cons (NULL_TREE, V8HI_type_node,
4468 tree_cons (NULL_TREE, V8HI_type_node,
4471 tree v8hi_ftype_v4si_v4si
4472 = build_function_type (V8HI_type_node,
4473 tree_cons (NULL_TREE, V4SI_type_node,
4474 tree_cons (NULL_TREE, V4SI_type_node,
4477 tree v16qi_ftype_v8hi_v8hi
4478 = build_function_type (V16QI_type_node,
4479 tree_cons (NULL_TREE, V8HI_type_node,
4480 tree_cons (NULL_TREE, V8HI_type_node,
4483 tree v4si_ftype_v16qi_v4si
4484 = build_function_type (V4SI_type_node,
4485 tree_cons (NULL_TREE, V16QI_type_node,
4486 tree_cons (NULL_TREE, V4SI_type_node,
4489 tree v4si_ftype_v16qi_v16qi
4490 = build_function_type (V4SI_type_node,
4491 tree_cons (NULL_TREE, V16QI_type_node,
4492 tree_cons (NULL_TREE, V16QI_type_node,
4495 tree v4si_ftype_v8hi_v4si
4496 = build_function_type (V4SI_type_node,
4497 tree_cons (NULL_TREE, V8HI_type_node,
4498 tree_cons (NULL_TREE, V4SI_type_node,
4501 tree v4si_ftype_v8hi
4502 = build_function_type (V4SI_type_node,
4503 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4505 tree int_ftype_v4si_v4si
4506 = build_function_type (integer_type_node,
4507 tree_cons (NULL_TREE, V4SI_type_node,
4508 tree_cons (NULL_TREE, V4SI_type_node,
4511 tree int_ftype_v4sf_v4sf
4512 = build_function_type (integer_type_node,
4513 tree_cons (NULL_TREE, V4SF_type_node,
4514 tree_cons (NULL_TREE, V4SF_type_node,
4517 tree int_ftype_v16qi_v16qi
4518 = build_function_type (integer_type_node,
4519 tree_cons (NULL_TREE, V16QI_type_node,
4520 tree_cons (NULL_TREE, V16QI_type_node,
4523 tree int_ftype_int_v4si_v4si
4524 = build_function_type
4526 tree_cons (NULL_TREE, integer_type_node,
4527 tree_cons (NULL_TREE, V4SI_type_node,
4528 tree_cons (NULL_TREE, V4SI_type_node,
4531 tree int_ftype_int_v4sf_v4sf
4532 = build_function_type
4534 tree_cons (NULL_TREE, integer_type_node,
4535 tree_cons (NULL_TREE, V4SF_type_node,
4536 tree_cons (NULL_TREE, V4SF_type_node,
4539 tree int_ftype_int_v8hi_v8hi
4540 = build_function_type
4542 tree_cons (NULL_TREE, integer_type_node,
4543 tree_cons (NULL_TREE, V8HI_type_node,
4544 tree_cons (NULL_TREE, V8HI_type_node,
4547 tree int_ftype_int_v16qi_v16qi
4548 = build_function_type
4550 tree_cons (NULL_TREE, integer_type_node,
4551 tree_cons (NULL_TREE, V16QI_type_node,
4552 tree_cons (NULL_TREE, V16QI_type_node,
4555 tree v16qi_ftype_int_pvoid
4556 = build_function_type (V16QI_type_node,
4557 tree_cons (NULL_TREE, integer_type_node,
4558 tree_cons (NULL_TREE, pvoid_type_node,
4561 tree v4si_ftype_int_pvoid
4562 = build_function_type (V4SI_type_node,
4563 tree_cons (NULL_TREE, integer_type_node,
4564 tree_cons (NULL_TREE, pvoid_type_node,
4567 tree v8hi_ftype_int_pvoid
4568 = build_function_type (V8HI_type_node,
4569 tree_cons (NULL_TREE, integer_type_node,
4570 tree_cons (NULL_TREE, pvoid_type_node,
4573 tree int_ftype_v8hi_v8hi
4574 = build_function_type (integer_type_node,
4575 tree_cons (NULL_TREE, V8HI_type_node,
4576 tree_cons (NULL_TREE, V8HI_type_node,
4579 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4580 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4581 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4582 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4583 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4584 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4585 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4586 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4587 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4588 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4589 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4590 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4591 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4592 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4593 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4594 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4595 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4596 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4597 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4598 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4599 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4600 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4601 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4602 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4604 /* Add the simple ternary operators. */
4605 d = (struct builtin_description *) bdesc_3arg;
4606 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4609 enum machine_mode mode0, mode1, mode2, mode3;
4615 mode0 = insn_data[d->icode].operand[0].mode;
4616 mode1 = insn_data[d->icode].operand[1].mode;
4617 mode2 = insn_data[d->icode].operand[2].mode;
4618 mode3 = insn_data[d->icode].operand[3].mode;
4620 /* When all four are of the same mode. */
4621 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4626 type = v4si_ftype_v4si_v4si_v4si;
4629 type = v4sf_ftype_v4sf_v4sf_v4sf;
4632 type = v8hi_ftype_v8hi_v8hi_v8hi;
4635 type = v16qi_ftype_v16qi_v16qi_v16qi;
4641 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4646 type = v4si_ftype_v4si_v4si_v16qi;
4649 type = v4sf_ftype_v4sf_v4sf_v16qi;
4652 type = v8hi_ftype_v8hi_v8hi_v16qi;
4655 type = v16qi_ftype_v16qi_v16qi_v16qi;
4661 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4662 && mode3 == V4SImode)
4663 type = v4si_ftype_v16qi_v16qi_v4si;
4664 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4665 && mode3 == V4SImode)
4666 type = v4si_ftype_v8hi_v8hi_v4si;
4667 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4668 && mode3 == V4SImode)
4669 type = v4sf_ftype_v4sf_v4sf_v4si;
4671 /* vchar, vchar, vchar, 4 bit literal. */
4672 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4674 type = v16qi_ftype_v16qi_v16qi_char;
4676 /* vshort, vshort, vshort, 4 bit literal. */
4677 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4679 type = v8hi_ftype_v8hi_v8hi_char;
4681 /* vint, vint, vint, 4 bit literal. */
4682 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4684 type = v4si_ftype_v4si_v4si_char;
4686 /* vfloat, vfloat, vfloat, 4 bit literal. */
4687 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4689 type = v4sf_ftype_v4sf_v4sf_char;
4694 def_builtin (d->mask, d->name, type, d->code);
4697 /* Add the DST variants. */
4698 d = (struct builtin_description *) bdesc_dst;
4699 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4700 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4702 /* Initialize the predicates. */
4703 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4704 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4706 enum machine_mode mode1;
4709 mode1 = insn_data[dp->icode].operand[1].mode;
4714 type = int_ftype_int_v4si_v4si;
4717 type = int_ftype_int_v8hi_v8hi;
4720 type = int_ftype_int_v16qi_v16qi;
4723 type = int_ftype_int_v4sf_v4sf;
4729 def_builtin (dp->mask, dp->name, type, dp->code);
4732 /* Add the simple binary operators. */
4733 d = (struct builtin_description *) bdesc_2arg;
4734 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4736 enum machine_mode mode0, mode1, mode2;
4742 mode0 = insn_data[d->icode].operand[0].mode;
4743 mode1 = insn_data[d->icode].operand[1].mode;
4744 mode2 = insn_data[d->icode].operand[2].mode;
4746 /* When all three operands are of the same mode. */
4747 if (mode0 == mode1 && mode1 == mode2)
4752 type = v4sf_ftype_v4sf_v4sf;
4755 type = v4si_ftype_v4si_v4si;
4758 type = v16qi_ftype_v16qi_v16qi;
4761 type = v8hi_ftype_v8hi_v8hi;
4768 /* A few other combos we really don't want to do manually. */
4770 /* vint, vfloat, vfloat. */
4771 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4772 type = v4si_ftype_v4sf_v4sf;
4774 /* vshort, vchar, vchar. */
4775 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4776 type = v8hi_ftype_v16qi_v16qi;
4778 /* vint, vshort, vshort. */
4779 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4780 type = v4si_ftype_v8hi_v8hi;
4782 /* vshort, vint, vint. */
4783 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4784 type = v8hi_ftype_v4si_v4si;
4786 /* vchar, vshort, vshort. */
4787 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4788 type = v16qi_ftype_v8hi_v8hi;
4790 /* vint, vchar, vint. */
4791 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4792 type = v4si_ftype_v16qi_v4si;
4794 /* vint, vchar, vchar. */
4795 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4796 type = v4si_ftype_v16qi_v16qi;
4798 /* vint, vshort, vint. */
4799 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4800 type = v4si_ftype_v8hi_v4si;
4802 /* vint, vint, 5 bit literal. */
4803 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4804 type = v4si_ftype_v4si_char;
4806 /* vshort, vshort, 5 bit literal. */
4807 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4808 type = v8hi_ftype_v8hi_char;
4810 /* vchar, vchar, 5 bit literal. */
4811 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4812 type = v16qi_ftype_v16qi_char;
4814 /* vfloat, vint, 5 bit literal. */
4815 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4816 type = v4sf_ftype_v4si_char;
4818 /* vint, vfloat, 5 bit literal. */
4819 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4820 type = v4si_ftype_v4sf_char;
4823 else if (mode0 == SImode)
4828 type = int_ftype_v4si_v4si;
4831 type = int_ftype_v4sf_v4sf;
4834 type = int_ftype_v16qi_v16qi;
4837 type = int_ftype_v8hi_v8hi;
4847 def_builtin (d->mask, d->name, type, d->code);
4850 /* Initialize the abs* operators. */
4851 d = (struct builtin_description *) bdesc_abs;
4852 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4854 enum machine_mode mode0;
4857 mode0 = insn_data[d->icode].operand[0].mode;
4862 type = v4si_ftype_v4si;
4865 type = v8hi_ftype_v8hi;
4868 type = v16qi_ftype_v16qi;
4871 type = v4sf_ftype_v4sf;
4877 def_builtin (d->mask, d->name, type, d->code);
4880 /* Add the simple unary operators. */
4881 d = (struct builtin_description *) bdesc_1arg;
4882 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4884 enum machine_mode mode0, mode1;
4890 mode0 = insn_data[d->icode].operand[0].mode;
4891 mode1 = insn_data[d->icode].operand[1].mode;
4893 if (mode0 == V4SImode && mode1 == QImode)
4894 type = v4si_ftype_char;
4895 else if (mode0 == V8HImode && mode1 == QImode)
4896 type = v8hi_ftype_char;
4897 else if (mode0 == V16QImode && mode1 == QImode)
4898 type = v16qi_ftype_char;
4899 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4900 type = v4sf_ftype_v4sf;
4901 else if (mode0 == V8HImode && mode1 == V16QImode)
4902 type = v8hi_ftype_v16qi;
4903 else if (mode0 == V4SImode && mode1 == V8HImode)
4904 type = v4si_ftype_v8hi;
4908 def_builtin (d->mask, d->name, type, d->code);
4913 /* Generate a memory reference for expand_block_move, copying volatile,
4914 and other bits from an original memory reference. */
4917 expand_block_move_mem (mode, addr, orig_mem)
4918 enum machine_mode mode;
4922 rtx mem = gen_rtx_MEM (mode, addr);
4924 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4928 /* Expand a block move operation, and return 1 if successful. Return 0
4929 if we should let the compiler generate normal code.
4931 operands[0] is the destination
4932 operands[1] is the source
4933 operands[2] is the length
4934 operands[3] is the alignment */
4936 #define MAX_MOVE_REG 4
4939 expand_block_move (operands)
4942 rtx orig_dest = operands[0];
4943 rtx orig_src = operands[1];
4944 rtx bytes_rtx = operands[2];
4945 rtx align_rtx = operands[3];
4946 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4957 rtx stores[MAX_MOVE_REG];
4960 /* If this is not a fixed size move, just call memcpy */
4964 /* If this is not a fixed size alignment, abort */
4965 if (GET_CODE (align_rtx) != CONST_INT)
4967 align = INTVAL (align_rtx);
4969 /* Anything to move? */
4970 bytes = INTVAL (bytes_rtx);
4974 /* store_one_arg depends on expand_block_move to handle at least the size of
4975 reg_parm_stack_space. */
4976 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4979 /* Move the address into scratch registers. */
4980 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4981 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4983 if (TARGET_STRING) /* string instructions are available */
4985 for ( ; bytes > 0; bytes -= move_bytes)
4987 if (bytes > 24 /* move up to 32 bytes at a time */
4995 && ! fixed_regs[12])
4997 move_bytes = (bytes > 32) ? 32 : bytes;
4998 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
5001 expand_block_move_mem (BLKmode,
5004 GEN_INT ((move_bytes == 32)
5008 else if (bytes > 16 /* move up to 24 bytes at a time */
5014 && ! fixed_regs[10])
5016 move_bytes = (bytes > 24) ? 24 : bytes;
5017 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
5020 expand_block_move_mem (BLKmode,
5023 GEN_INT (move_bytes),
5026 else if (bytes > 8 /* move up to 16 bytes at a time */
5032 move_bytes = (bytes > 16) ? 16 : bytes;
5033 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5036 expand_block_move_mem (BLKmode,
5039 GEN_INT (move_bytes),
5042 else if (bytes >= 8 && TARGET_POWERPC64
5043 /* 64-bit loads and stores require word-aligned
5045 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5048 tmp_reg = gen_reg_rtx (DImode);
5049 emit_move_insn (tmp_reg,
5050 expand_block_move_mem (DImode,
5051 src_reg, orig_src));
5052 emit_move_insn (expand_block_move_mem (DImode,
5053 dest_reg, orig_dest),
5056 else if (bytes > 4 && !TARGET_POWERPC64)
5057 { /* move up to 8 bytes at a time */
5058 move_bytes = (bytes > 8) ? 8 : bytes;
5059 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5062 expand_block_move_mem (BLKmode,
5065 GEN_INT (move_bytes),
5068 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5069 { /* move 4 bytes */
5071 tmp_reg = gen_reg_rtx (SImode);
5072 emit_move_insn (tmp_reg,
5073 expand_block_move_mem (SImode,
5074 src_reg, orig_src));
5075 emit_move_insn (expand_block_move_mem (SImode,
5076 dest_reg, orig_dest),
5079 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5080 { /* move 2 bytes */
5082 tmp_reg = gen_reg_rtx (HImode);
5083 emit_move_insn (tmp_reg,
5084 expand_block_move_mem (HImode,
5085 src_reg, orig_src));
5086 emit_move_insn (expand_block_move_mem (HImode,
5087 dest_reg, orig_dest),
5090 else if (bytes == 1) /* move 1 byte */
5093 tmp_reg = gen_reg_rtx (QImode);
5094 emit_move_insn (tmp_reg,
5095 expand_block_move_mem (QImode,
5096 src_reg, orig_src));
5097 emit_move_insn (expand_block_move_mem (QImode,
5098 dest_reg, orig_dest),
5102 { /* move up to 4 bytes at a time */
5103 move_bytes = (bytes > 4) ? 4 : bytes;
5104 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5107 expand_block_move_mem (BLKmode,
5110 GEN_INT (move_bytes),
5114 if (bytes > move_bytes)
5116 if (! TARGET_POWERPC64)
5118 emit_insn (gen_addsi3 (src_reg, src_reg,
5119 GEN_INT (move_bytes)));
5120 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5121 GEN_INT (move_bytes)));
5125 emit_insn (gen_adddi3 (src_reg, src_reg,
5126 GEN_INT (move_bytes)));
5127 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5128 GEN_INT (move_bytes)));
5134 else /* string instructions not available */
5136 num_reg = offset = 0;
5137 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5139 /* Calculate the correct offset for src/dest */
5143 dest_addr = dest_reg;
5147 src_addr = plus_constant (src_reg, offset);
5148 dest_addr = plus_constant (dest_reg, offset);
5151 /* Generate the appropriate load and store, saving the stores
5153 if (bytes >= 8 && TARGET_POWERPC64
5154 /* 64-bit loads and stores require word-aligned
5156 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5159 tmp_reg = gen_reg_rtx (DImode);
5160 emit_insn (gen_movdi (tmp_reg,
5161 expand_block_move_mem (DImode,
5164 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5169 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5172 tmp_reg = gen_reg_rtx (SImode);
5173 emit_insn (gen_movsi (tmp_reg,
5174 expand_block_move_mem (SImode,
5177 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5182 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5185 tmp_reg = gen_reg_rtx (HImode);
5186 emit_insn (gen_movhi (tmp_reg,
5187 expand_block_move_mem (HImode,
5190 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5198 tmp_reg = gen_reg_rtx (QImode);
5199 emit_insn (gen_movqi (tmp_reg,
5200 expand_block_move_mem (QImode,
5203 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5209 if (num_reg >= MAX_MOVE_REG)
5211 for (i = 0; i < num_reg; i++)
5212 emit_insn (stores[i]);
5217 for (i = 0; i < num_reg; i++)
5218 emit_insn (stores[i]);
5225 /* Return 1 if OP is a load multiple operation. It is known to be a
5226 PARALLEL and the first section will be tested. */
5229 load_multiple_operation (op, mode)
5231 enum machine_mode mode ATTRIBUTE_UNUSED;
5233 int count = XVECLEN (op, 0);
5234 unsigned int dest_regno;
5238 /* Perform a quick check so we don't blow up below. */
5240 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5241 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5242 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5245 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5246 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5248 for (i = 1; i < count; i++)
5250 rtx elt = XVECEXP (op, 0, i);
5252 if (GET_CODE (elt) != SET
5253 || GET_CODE (SET_DEST (elt)) != REG
5254 || GET_MODE (SET_DEST (elt)) != SImode
5255 || REGNO (SET_DEST (elt)) != dest_regno + i
5256 || GET_CODE (SET_SRC (elt)) != MEM
5257 || GET_MODE (SET_SRC (elt)) != SImode
5258 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5259 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5260 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5261 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5268 /* Similar, but tests for store multiple. Here, the second vector element
5269 is a CLOBBER. It will be tested later. */
5272 store_multiple_operation (op, mode)
5274 enum machine_mode mode ATTRIBUTE_UNUSED;
5276 int count = XVECLEN (op, 0) - 1;
5277 unsigned int src_regno;
5281 /* Perform a quick check so we don't blow up below. */
5283 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5284 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5285 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5288 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5289 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5291 for (i = 1; i < count; i++)
5293 rtx elt = XVECEXP (op, 0, i + 1);
5295 if (GET_CODE (elt) != SET
5296 || GET_CODE (SET_SRC (elt)) != REG
5297 || GET_MODE (SET_SRC (elt)) != SImode
5298 || REGNO (SET_SRC (elt)) != src_regno + i
5299 || GET_CODE (SET_DEST (elt)) != MEM
5300 || GET_MODE (SET_DEST (elt)) != SImode
5301 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5302 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5303 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5304 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5311 /* Return 1 for a parallel vrsave operation. */
5314 vrsave_operation (op, mode)
5316 enum machine_mode mode ATTRIBUTE_UNUSED;
5318 int count = XVECLEN (op, 0);
5319 unsigned int dest_regno, src_regno;
5323 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5324 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5325 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5328 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5329 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5331 if (dest_regno != VRSAVE_REGNO
5332 && src_regno != VRSAVE_REGNO)
5335 for (i = 1; i < count; i++)
5337 rtx elt = XVECEXP (op, 0, i);
5339 if (GET_CODE (elt) != CLOBBER
5340 && GET_CODE (elt) != SET)
5347 /* Return 1 for an PARALLEL suitable for mtcrf. */
5350 mtcrf_operation (op, mode)
5352 enum machine_mode mode ATTRIBUTE_UNUSED;
5354 int count = XVECLEN (op, 0);
5358 /* Perform a quick check so we don't blow up below. */
5360 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5361 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5362 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5364 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5366 if (GET_CODE (src_reg) != REG
5367 || GET_MODE (src_reg) != SImode
5368 || ! INT_REGNO_P (REGNO (src_reg)))
5371 for (i = 0; i < count; i++)
5373 rtx exp = XVECEXP (op, 0, i);
5377 if (GET_CODE (exp) != SET
5378 || GET_CODE (SET_DEST (exp)) != REG
5379 || GET_MODE (SET_DEST (exp)) != CCmode
5380 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5382 unspec = SET_SRC (exp);
5383 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5385 if (GET_CODE (unspec) != UNSPEC
5386 || XINT (unspec, 1) != 20
5387 || XVECLEN (unspec, 0) != 2
5388 || XVECEXP (unspec, 0, 0) != src_reg
5389 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5390 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5396 /* Return 1 for an PARALLEL suitable for lmw. */
5399 lmw_operation (op, mode)
5401 enum machine_mode mode ATTRIBUTE_UNUSED;
5403 int count = XVECLEN (op, 0);
5404 unsigned int dest_regno;
5406 unsigned int base_regno;
5407 HOST_WIDE_INT offset;
5410 /* Perform a quick check so we don't blow up below. */
5412 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5413 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5414 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5417 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5418 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5421 || count != 32 - (int) dest_regno)
5424 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5427 base_regno = REGNO (src_addr);
5428 if (base_regno == 0)
5431 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5433 offset = INTVAL (XEXP (src_addr, 1));
5434 base_regno = REGNO (XEXP (src_addr, 0));
5439 for (i = 0; i < count; i++)
5441 rtx elt = XVECEXP (op, 0, i);
5444 HOST_WIDE_INT newoffset;
5446 if (GET_CODE (elt) != SET
5447 || GET_CODE (SET_DEST (elt)) != REG
5448 || GET_MODE (SET_DEST (elt)) != SImode
5449 || REGNO (SET_DEST (elt)) != dest_regno + i
5450 || GET_CODE (SET_SRC (elt)) != MEM
5451 || GET_MODE (SET_SRC (elt)) != SImode)
5453 newaddr = XEXP (SET_SRC (elt), 0);
5454 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5459 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5461 addr_reg = XEXP (newaddr, 0);
5462 newoffset = INTVAL (XEXP (newaddr, 1));
5466 if (REGNO (addr_reg) != base_regno
5467 || newoffset != offset + 4 * i)
5474 /* Return 1 for an PARALLEL suitable for stmw. */
5477 stmw_operation (op, mode)
5479 enum machine_mode mode ATTRIBUTE_UNUSED;
5481 int count = XVECLEN (op, 0);
5482 unsigned int src_regno;
5484 unsigned int base_regno;
5485 HOST_WIDE_INT offset;
5488 /* Perform a quick check so we don't blow up below. */
5490 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5491 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5492 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5495 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5496 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5499 || count != 32 - (int) src_regno)
5502 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5505 base_regno = REGNO (dest_addr);
5506 if (base_regno == 0)
5509 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5511 offset = INTVAL (XEXP (dest_addr, 1));
5512 base_regno = REGNO (XEXP (dest_addr, 0));
5517 for (i = 0; i < count; i++)
5519 rtx elt = XVECEXP (op, 0, i);
5522 HOST_WIDE_INT newoffset;
5524 if (GET_CODE (elt) != SET
5525 || GET_CODE (SET_SRC (elt)) != REG
5526 || GET_MODE (SET_SRC (elt)) != SImode
5527 || REGNO (SET_SRC (elt)) != src_regno + i
5528 || GET_CODE (SET_DEST (elt)) != MEM
5529 || GET_MODE (SET_DEST (elt)) != SImode)
5531 newaddr = XEXP (SET_DEST (elt), 0);
5532 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5537 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5539 addr_reg = XEXP (newaddr, 0);
5540 newoffset = INTVAL (XEXP (newaddr, 1));
5544 if (REGNO (addr_reg) != base_regno
5545 || newoffset != offset + 4 * i)
5552 /* A validation routine: say whether CODE, a condition code, and MODE
5553 match. The other alternatives either don't make sense or should
5554 never be generated. */
5557 validate_condition_mode (code, mode)
5559 enum machine_mode mode;
5561 if (GET_RTX_CLASS (code) != '<'
5562 || GET_MODE_CLASS (mode) != MODE_CC)
5565 /* These don't make sense. */
5566 if ((code == GT || code == LT || code == GE || code == LE)
5567 && mode == CCUNSmode)
5570 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5571 && mode != CCUNSmode)
5574 if (mode != CCFPmode
5575 && (code == ORDERED || code == UNORDERED
5576 || code == UNEQ || code == LTGT
5577 || code == UNGT || code == UNLT
5578 || code == UNGE || code == UNLE))
5581 /* These should never be generated except for
5582 flag_unsafe_math_optimizations. */
5583 if (mode == CCFPmode
5584 && ! flag_unsafe_math_optimizations
5585 && (code == LE || code == GE
5586 || code == UNEQ || code == LTGT
5587 || code == UNGT || code == UNLT))
5590 /* These are invalid; the information is not there. */
5591 if (mode == CCEQmode
5592 && code != EQ && code != NE)
5596 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5597 We only check the opcode against the mode of the CC value here. */
5600 branch_comparison_operator (op, mode)
5602 enum machine_mode mode ATTRIBUTE_UNUSED;
5604 enum rtx_code code = GET_CODE (op);
5605 enum machine_mode cc_mode;
5607 if (GET_RTX_CLASS (code) != '<')
5610 cc_mode = GET_MODE (XEXP (op, 0));
5611 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5614 validate_condition_mode (code, cc_mode);
5619 /* Return 1 if OP is a comparison operation that is valid for a branch
5620 insn and which is true if the corresponding bit in the CC register
5624 branch_positive_comparison_operator (op, mode)
5626 enum machine_mode mode;
5630 if (! branch_comparison_operator (op, mode))
5633 code = GET_CODE (op);
5634 return (code == EQ || code == LT || code == GT
5635 || code == LTU || code == GTU
5636 || code == UNORDERED);
5639 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5640 We check the opcode against the mode of the CC value and disallow EQ or
5641 NE comparisons for integers. */
5644 scc_comparison_operator (op, mode)
5646 enum machine_mode mode;
5648 enum rtx_code code = GET_CODE (op);
5649 enum machine_mode cc_mode;
5651 if (GET_MODE (op) != mode && mode != VOIDmode)
5654 if (GET_RTX_CLASS (code) != '<')
5657 cc_mode = GET_MODE (XEXP (op, 0));
5658 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5661 validate_condition_mode (code, cc_mode);
5663 if (code == NE && cc_mode != CCFPmode)
5670 trap_comparison_operator (op, mode)
5672 enum machine_mode mode;
5674 if (mode != VOIDmode && mode != GET_MODE (op))
5676 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5680 boolean_operator (op, mode)
5682 enum machine_mode mode ATTRIBUTE_UNUSED;
5684 enum rtx_code code = GET_CODE (op);
5685 return (code == AND || code == IOR || code == XOR);
5689 boolean_or_operator (op, mode)
5691 enum machine_mode mode ATTRIBUTE_UNUSED;
5693 enum rtx_code code = GET_CODE (op);
5694 return (code == IOR || code == XOR);
5698 min_max_operator (op, mode)
5700 enum machine_mode mode ATTRIBUTE_UNUSED;
5702 enum rtx_code code = GET_CODE (op);
5703 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5706 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5707 mask required to convert the result of a rotate insn into a shift
5708 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5711 includes_lshift_p (shiftop, andop)
5715 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5717 shift_mask <<= INTVAL (shiftop);
5719 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5722 /* Similar, but for right shift. */
5725 includes_rshift_p (shiftop, andop)
5729 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5731 shift_mask >>= INTVAL (shiftop);
5733 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5736 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5737 to perform a left shift. It must have exactly SHIFTOP least
5738 signifigant 0's, then one or more 1's, then zero or more 0's. */
5741 includes_rldic_lshift_p (shiftop, andop)
5745 if (GET_CODE (andop) == CONST_INT)
5747 HOST_WIDE_INT c, lsb, shift_mask;
5750 if (c == 0 || c == ~0)
5754 shift_mask <<= INTVAL (shiftop);
5756 /* Find the least signifigant one bit. */
5759 /* It must coincide with the LSB of the shift mask. */
5760 if (-lsb != shift_mask)
5763 /* Invert to look for the next transition (if any). */
5766 /* Remove the low group of ones (originally low group of zeros). */
5769 /* Again find the lsb, and check we have all 1's above. */
5773 else if (GET_CODE (andop) == CONST_DOUBLE
5774 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5776 HOST_WIDE_INT low, high, lsb;
5777 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5779 low = CONST_DOUBLE_LOW (andop);
5780 if (HOST_BITS_PER_WIDE_INT < 64)
5781 high = CONST_DOUBLE_HIGH (andop);
5783 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5784 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5787 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5789 shift_mask_high = ~0;
5790 if (INTVAL (shiftop) > 32)
5791 shift_mask_high <<= INTVAL (shiftop) - 32;
5795 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5802 return high == -lsb;
5805 shift_mask_low = ~0;
5806 shift_mask_low <<= INTVAL (shiftop);
5810 if (-lsb != shift_mask_low)
5813 if (HOST_BITS_PER_WIDE_INT < 64)
5818 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5821 return high == -lsb;
5825 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5831 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5832 to perform a left shift. It must have SHIFTOP or more least
5833 signifigant 0's, with the remainder of the word 1's. */
5836 includes_rldicr_lshift_p (shiftop, andop)
5840 if (GET_CODE (andop) == CONST_INT)
5842 HOST_WIDE_INT c, lsb, shift_mask;
5845 shift_mask <<= INTVAL (shiftop);
5848 /* Find the least signifigant one bit. */
5851 /* It must be covered by the shift mask.
5852 This test also rejects c == 0. */
5853 if ((lsb & shift_mask) == 0)
5856 /* Check we have all 1's above the transition, and reject all 1's. */
5857 return c == -lsb && lsb != 1;
5859 else if (GET_CODE (andop) == CONST_DOUBLE
5860 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5862 HOST_WIDE_INT low, lsb, shift_mask_low;
5864 low = CONST_DOUBLE_LOW (andop);
5866 if (HOST_BITS_PER_WIDE_INT < 64)
5868 HOST_WIDE_INT high, shift_mask_high;
5870 high = CONST_DOUBLE_HIGH (andop);
5874 shift_mask_high = ~0;
5875 if (INTVAL (shiftop) > 32)
5876 shift_mask_high <<= INTVAL (shiftop) - 32;
5880 if ((lsb & shift_mask_high) == 0)
5883 return high == -lsb;
5889 shift_mask_low = ~0;
5890 shift_mask_low <<= INTVAL (shiftop);
5894 if ((lsb & shift_mask_low) == 0)
5897 return low == -lsb && lsb != 1;
5903 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5904 for lfq and stfq insns.
5906 Note reg1 and reg2 *must* be hard registers. To be sure we will
5907 abort if we are passed pseudo registers. */
5910 registers_ok_for_quad_peep (reg1, reg2)
5913 /* We might have been passed a SUBREG. */
5914 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5917 return (REGNO (reg1) == REGNO (reg2) - 1);
5920 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5921 addr1 and addr2 must be in consecutive memory locations
5922 (addr2 == addr1 + 8). */
5925 addrs_ok_for_quad_peep (addr1, addr2)
5932 /* Extract an offset (if used) from the first addr. */
5933 if (GET_CODE (addr1) == PLUS)
5935 /* If not a REG, return zero. */
5936 if (GET_CODE (XEXP (addr1, 0)) != REG)
5940 reg1 = REGNO (XEXP (addr1, 0));
5941 /* The offset must be constant! */
5942 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5944 offset1 = INTVAL (XEXP (addr1, 1));
5947 else if (GET_CODE (addr1) != REG)
5951 reg1 = REGNO (addr1);
5952 /* This was a simple (mem (reg)) expression. Offset is 0. */
5956 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5957 if (GET_CODE (addr2) != PLUS)
5960 if (GET_CODE (XEXP (addr2, 0)) != REG
5961 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5964 if (reg1 != REGNO (XEXP (addr2, 0)))
5967 /* The offset for the second addr must be 8 more than the first addr. */
5968 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5971 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5976 /* Return the register class of a scratch register needed to copy IN into
5977 or out of a register in CLASS in MODE. If it can be done directly,
5978 NO_REGS is returned. */
5981 secondary_reload_class (class, mode, in)
5982 enum reg_class class;
5983 enum machine_mode mode ATTRIBUTE_UNUSED;
5988 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5990 /* We cannot copy a symbolic operand directly into anything
5991 other than BASE_REGS for TARGET_ELF. So indicate that a
5992 register from BASE_REGS is needed as an intermediate
5995 On Darwin, pic addresses require a load from memory, which
5996 needs a base register. */
5997 if (class != BASE_REGS
5998 && (GET_CODE (in) == SYMBOL_REF
5999 || GET_CODE (in) == HIGH
6000 || GET_CODE (in) == LABEL_REF
6001 || GET_CODE (in) == CONST))
6005 if (GET_CODE (in) == REG)
6008 if (regno >= FIRST_PSEUDO_REGISTER)
6010 regno = true_regnum (in);
6011 if (regno >= FIRST_PSEUDO_REGISTER)
6015 else if (GET_CODE (in) == SUBREG)
6017 regno = true_regnum (in);
6018 if (regno >= FIRST_PSEUDO_REGISTER)
6024 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6026 if (class == GENERAL_REGS || class == BASE_REGS
6027 || (regno >= 0 && INT_REGNO_P (regno)))
6030 /* Constants, memory, and FP registers can go into FP registers. */
6031 if ((regno == -1 || FP_REGNO_P (regno))
6032 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6035 /* Memory, and AltiVec registers can go into AltiVec registers. */
6036 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6037 && class == ALTIVEC_REGS)
6040 /* We can copy among the CR registers. */
6041 if ((class == CR_REGS || class == CR0_REGS)
6042 && regno >= 0 && CR_REGNO_P (regno))
6045 /* Otherwise, we need GENERAL_REGS. */
6046 return GENERAL_REGS;
6049 /* Given a comparison operation, return the bit number in CCR to test. We
6050 know this is a valid comparison.
6052 SCC_P is 1 if this is for an scc. That means that %D will have been
6053 used instead of %C, so the bits will be in different places.
6055 Return -1 if OP isn't a valid comparison for some reason. */
6062 enum rtx_code code = GET_CODE (op);
6063 enum machine_mode cc_mode;
6068 if (GET_RTX_CLASS (code) != '<')
6073 if (GET_CODE (reg) != REG
6074 || ! CR_REGNO_P (REGNO (reg)))
6077 cc_mode = GET_MODE (reg);
6078 cc_regnum = REGNO (reg);
6079 base_bit = 4 * (cc_regnum - CR0_REGNO);
6081 validate_condition_mode (code, cc_mode);
6086 return scc_p ? base_bit + 3 : base_bit + 2;
6088 return base_bit + 2;
6089 case GT: case GTU: case UNLE:
6090 return base_bit + 1;
6091 case LT: case LTU: case UNGE:
6093 case ORDERED: case UNORDERED:
6094 return base_bit + 3;
6097 /* If scc, we will have done a cror to put the bit in the
6098 unordered position. So test that bit. For integer, this is ! LT
6099 unless this is an scc insn. */
6100 return scc_p ? base_bit + 3 : base_bit;
6103 return scc_p ? base_bit + 3 : base_bit + 1;
6110 /* Return the GOT register. */
6113 rs6000_got_register (value)
6114 rtx value ATTRIBUTE_UNUSED;
6116 /* The second flow pass currently (June 1999) can't update
6117 regs_ever_live without disturbing other parts of the compiler, so
6118 update it here to make the prolog/epilogue code happy. */
6119 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6120 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6122 current_function_uses_pic_offset_table = 1;
6124 return pic_offset_table_rtx;
6127 /* Functions to init, mark and free struct machine_function.
6128 These will be called, via pointer variables,
6129 from push_function_context and pop_function_context. */
6132 rs6000_init_machine_status (p)
6135 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6139 rs6000_free_machine_status (p)
6142 if (p->machine == NULL)
6150 /* Print an operand. Recognize special options, documented below. */
6153 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6154 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6156 #define SMALL_DATA_RELOC "sda21"
6157 #define SMALL_DATA_REG 0
6161 print_operand (file, x, code)
6169 /* These macros test for integers and extract the low-order bits. */
6171 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6172 && GET_MODE (X) == VOIDmode)
6174 #define INT_LOWPART(X) \
6175 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6180 /* Write out an instruction after the call which may be replaced
6181 with glue code by the loader. This depends on the AIX version. */
6182 asm_fprintf (file, RS6000_CALL_GLUE);
6185 /* %a is output_address. */
6188 /* If X is a constant integer whose low-order 5 bits are zero,
6189 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6190 in the AIX assembler where "sri" with a zero shift count
6191 writes a trash instruction. */
6192 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6199 /* If constant, low-order 16 bits of constant, unsigned.
6200 Otherwise, write normally. */
6202 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6204 print_operand (file, x, 0);
6208 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6209 for 64-bit mask direction. */
6210 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6213 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6217 /* There used to be a comment for 'C' reading "This is an
6218 optional cror needed for certain floating-point
6219 comparisons. Otherwise write nothing." */
6221 /* Similar, except that this is for an scc, so we must be able to
6222 encode the test in a single bit that is one. We do the above
6223 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6224 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6225 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6227 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6229 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6231 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6234 else if (GET_CODE (x) == NE)
6236 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6238 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6239 base_bit + 2, base_bit + 2);
6244 /* X is a CR register. Print the number of the EQ bit of the CR */
6245 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6246 output_operand_lossage ("invalid %%E value");
6248 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6252 /* X is a CR register. Print the shift count needed to move it
6253 to the high-order four bits. */
6254 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6255 output_operand_lossage ("invalid %%f value");
6257 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6261 /* Similar, but print the count for the rotate in the opposite
6263 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6264 output_operand_lossage ("invalid %%F value");
6266 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6270 /* X is a constant integer. If it is negative, print "m",
6271 otherwise print "z". This is to make a aze or ame insn. */
6272 if (GET_CODE (x) != CONST_INT)
6273 output_operand_lossage ("invalid %%G value");
6274 else if (INTVAL (x) >= 0)
6281 /* If constant, output low-order five bits. Otherwise, write
6284 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6286 print_operand (file, x, 0);
6290 /* If constant, output low-order six bits. Otherwise, write
6293 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6295 print_operand (file, x, 0);
6299 /* Print `i' if this is a constant, else nothing. */
6305 /* Write the bit number in CCR for jump. */
6308 output_operand_lossage ("invalid %%j code");
6310 fprintf (file, "%d", i);
6314 /* Similar, but add one for shift count in rlinm for scc and pass
6315 scc flag to `ccr_bit'. */
6318 output_operand_lossage ("invalid %%J code");
6320 /* If we want bit 31, write a shift count of zero, not 32. */
6321 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6325 /* X must be a constant. Write the 1's complement of the
6328 output_operand_lossage ("invalid %%k value");
6330 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6334 /* X must be a symbolic constant on ELF. Write an
6335 expression suitable for an 'addi' that adds in the low 16
6337 if (GET_CODE (x) != CONST)
6339 print_operand_address (file, x);
6344 if (GET_CODE (XEXP (x, 0)) != PLUS
6345 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6346 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6347 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6348 output_operand_lossage ("invalid %%K value");
6349 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6351 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6355 /* %l is output_asm_label. */
6358 /* Write second word of DImode or DFmode reference. Works on register
6359 or non-indexed memory only. */
6360 if (GET_CODE (x) == REG)
6361 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6362 else if (GET_CODE (x) == MEM)
6364 /* Handle possible auto-increment. Since it is pre-increment and
6365 we have already done it, we can just use an offset of word. */
6366 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6367 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6368 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6371 output_address (XEXP (adjust_address_nv (x, SImode,
6375 if (small_data_operand (x, GET_MODE (x)))
6376 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6377 reg_names[SMALL_DATA_REG]);
6382 /* MB value for a mask operand. */
6383 if (! mask_operand (x, SImode))
6384 output_operand_lossage ("invalid %%m value");
6386 val = INT_LOWPART (x);
6388 /* If the high bit is set and the low bit is not, the value is zero.
6389 If the high bit is zero, the value is the first 1 bit we find from
6391 if ((val & 0x80000000) && ((val & 1) == 0))
6396 else if ((val & 0x80000000) == 0)
6398 for (i = 1; i < 32; i++)
6399 if ((val <<= 1) & 0x80000000)
6401 fprintf (file, "%d", i);
6405 /* Otherwise, look for the first 0 bit from the right. The result is its
6406 number plus 1. We know the low-order bit is one. */
6407 for (i = 0; i < 32; i++)
6408 if (((val >>= 1) & 1) == 0)
6411 /* If we ended in ...01, i would be 0. The correct value is 31, so
6413 fprintf (file, "%d", 31 - i);
6417 /* ME value for a mask operand. */
6418 if (! mask_operand (x, SImode))
6419 output_operand_lossage ("invalid %%M value");
6421 val = INT_LOWPART (x);
6423 /* If the low bit is set and the high bit is not, the value is 31.
6424 If the low bit is zero, the value is the first 1 bit we find from
6426 if ((val & 1) && ((val & 0x80000000) == 0))
6431 else if ((val & 1) == 0)
6433 for (i = 0; i < 32; i++)
6434 if ((val >>= 1) & 1)
6437 /* If we had ....10, i would be 0. The result should be
6438 30, so we need 30 - i. */
6439 fprintf (file, "%d", 30 - i);
6443 /* Otherwise, look for the first 0 bit from the left. The result is its
6444 number minus 1. We know the high-order bit is one. */
6445 for (i = 0; i < 32; i++)
6446 if (((val <<= 1) & 0x80000000) == 0)
6449 fprintf (file, "%d", i);
6452 /* %n outputs the negative of its operand. */
6455 /* Write the number of elements in the vector times 4. */
6456 if (GET_CODE (x) != PARALLEL)
6457 output_operand_lossage ("invalid %%N value");
6459 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6463 /* Similar, but subtract 1 first. */
6464 if (GET_CODE (x) != PARALLEL)
6465 output_operand_lossage ("invalid %%O value");
6467 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6471 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6473 || INT_LOWPART (x) < 0
6474 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6475 output_operand_lossage ("invalid %%p value");
6477 fprintf (file, "%d", i);
6481 /* The operand must be an indirect memory reference. The result
6482 is the register number. */
6483 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6484 || REGNO (XEXP (x, 0)) >= 32)
6485 output_operand_lossage ("invalid %%P value");
6487 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6491 /* This outputs the logical code corresponding to a boolean
6492 expression. The expression may have one or both operands
6493 negated (if one, only the first one). For condition register
6494 logical operations, it will also treat the negated
6495 CR codes as NOTs, but not handle NOTs of them. */
6497 const char *const *t = 0;
6499 enum rtx_code code = GET_CODE (x);
6500 static const char * const tbl[3][3] = {
6501 { "and", "andc", "nor" },
6502 { "or", "orc", "nand" },
6503 { "xor", "eqv", "xor" } };
6507 else if (code == IOR)
6509 else if (code == XOR)
6512 output_operand_lossage ("invalid %%q value");
6514 if (GET_CODE (XEXP (x, 0)) != NOT)
6518 if (GET_CODE (XEXP (x, 1)) == NOT)
6529 /* X is a CR register. Print the mask for `mtcrf'. */
6530 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6531 output_operand_lossage ("invalid %%R value");
6533 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6537 /* Low 5 bits of 32 - value */
6539 output_operand_lossage ("invalid %%s value");
6541 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6545 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6546 CONST_INT 32-bit mask is considered sign-extended so any
6547 transition must occur within the CONST_INT, not on the boundary. */
6548 if (! mask64_operand (x, DImode))
6549 output_operand_lossage ("invalid %%S value");
6551 val = INT_LOWPART (x);
6553 if (val & 1) /* Clear Left */
6555 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6556 if (!((val >>= 1) & 1))
6559 #if HOST_BITS_PER_WIDE_INT == 32
6560 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6562 val = CONST_DOUBLE_HIGH (x);
6567 for (i = 32; i < 64; i++)
6568 if (!((val >>= 1) & 1))
6572 /* i = index of last set bit from right
6573 mask begins at 63 - i from left */
6575 output_operand_lossage ("%%S computed all 1's mask");
6577 fprintf (file, "%d", 63 - i);
6580 else /* Clear Right */
6582 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6583 if ((val >>= 1) & 1)
6586 #if HOST_BITS_PER_WIDE_INT == 32
6587 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6589 val = CONST_DOUBLE_HIGH (x);
6591 if (val == (HOST_WIDE_INT) -1)
6594 for (i = 32; i < 64; i++)
6595 if ((val >>= 1) & 1)
6599 /* i = index of last clear bit from right
6600 mask ends at 62 - i from left */
6602 output_operand_lossage ("%%S computed all 0's mask");
6604 fprintf (file, "%d", 62 - i);
6609 /* Print the symbolic name of a branch target register. */
6610 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6611 && REGNO (x) != COUNT_REGISTER_REGNUM))
6612 output_operand_lossage ("invalid %%T value");
6613 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6614 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6616 fputs ("ctr", file);
6620 /* High-order 16 bits of constant for use in unsigned operand. */
6622 output_operand_lossage ("invalid %%u value");
6624 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6625 (INT_LOWPART (x) >> 16) & 0xffff);
6629 /* High-order 16 bits of constant for use in signed operand. */
6631 output_operand_lossage ("invalid %%v value");
6633 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6634 (INT_LOWPART (x) >> 16) & 0xffff);
6638 /* Print `u' if this has an auto-increment or auto-decrement. */
6639 if (GET_CODE (x) == MEM
6640 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6641 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6646 /* Print the trap code for this operand. */
6647 switch (GET_CODE (x))
6650 fputs ("eq", file); /* 4 */
6653 fputs ("ne", file); /* 24 */
6656 fputs ("lt", file); /* 16 */
6659 fputs ("le", file); /* 20 */
6662 fputs ("gt", file); /* 8 */
6665 fputs ("ge", file); /* 12 */
6668 fputs ("llt", file); /* 2 */
6671 fputs ("lle", file); /* 6 */
6674 fputs ("lgt", file); /* 1 */
6677 fputs ("lge", file); /* 5 */
6685 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6688 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6689 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6691 print_operand (file, x, 0);
6695 /* MB value for a PowerPC64 rldic operand. */
6696 val = (GET_CODE (x) == CONST_INT
6697 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6702 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6703 if ((val <<= 1) < 0)
6706 #if HOST_BITS_PER_WIDE_INT == 32
6707 if (GET_CODE (x) == CONST_INT && i >= 0)
6708 i += 32; /* zero-extend high-part was all 0's */
6709 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6711 val = CONST_DOUBLE_LOW (x);
6718 for ( ; i < 64; i++)
6719 if ((val <<= 1) < 0)
6724 fprintf (file, "%d", i + 1);
6728 if (GET_CODE (x) == MEM
6729 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6734 /* Like 'L', for third word of TImode */
6735 if (GET_CODE (x) == REG)
6736 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6737 else if (GET_CODE (x) == MEM)
6739 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6740 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6741 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6743 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6744 if (small_data_operand (x, GET_MODE (x)))
6745 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6746 reg_names[SMALL_DATA_REG]);
6751 /* X is a SYMBOL_REF. Write out the name preceded by a
6752 period and without any trailing data in brackets. Used for function
6753 names. If we are configured for System V (or the embedded ABI) on
6754 the PowerPC, do not emit the period, since those systems do not use
6755 TOCs and the like. */
6756 if (GET_CODE (x) != SYMBOL_REF)
6759 if (XSTR (x, 0)[0] != '.')
6761 switch (DEFAULT_ABI)
6771 case ABI_AIX_NODESC:
6777 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6779 assemble_name (file, XSTR (x, 0));
6784 /* Like 'L', for last word of TImode. */
6785 if (GET_CODE (x) == REG)
6786 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6787 else if (GET_CODE (x) == MEM)
6789 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6790 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6791 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6793 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6794 if (small_data_operand (x, GET_MODE (x)))
6795 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6796 reg_names[SMALL_DATA_REG]);
6800 /* Print AltiVec memory operand. */
6805 if (GET_CODE (x) != MEM)
6810 if (GET_CODE (tmp) == REG)
6811 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6812 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6814 if (REGNO (XEXP (tmp, 0)) == 0)
6815 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6816 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6818 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6819 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6827 if (GET_CODE (x) == REG)
6828 fprintf (file, "%s", reg_names[REGNO (x)]);
6829 else if (GET_CODE (x) == MEM)
6831 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6832 know the width from the mode. */
6833 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6834 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6835 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6836 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6837 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6838 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6840 output_address (XEXP (x, 0));
6843 output_addr_const (file, x);
6847 output_operand_lossage ("invalid %%xn code");
6851 /* Print the address of an operand. */
6854 print_operand_address (file, x)
6858 if (GET_CODE (x) == REG)
6859 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6860 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6861 || GET_CODE (x) == LABEL_REF)
6863 output_addr_const (file, x);
6864 if (small_data_operand (x, GET_MODE (x)))
6865 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6866 reg_names[SMALL_DATA_REG]);
6867 else if (TARGET_TOC)
6870 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6872 if (REGNO (XEXP (x, 0)) == 0)
6873 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6874 reg_names[ REGNO (XEXP (x, 0)) ]);
6876 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6877 reg_names[ REGNO (XEXP (x, 1)) ]);
6879 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6881 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6882 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6885 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6886 && CONSTANT_P (XEXP (x, 1)))
6888 output_addr_const (file, XEXP (x, 1));
6889 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6893 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6894 && CONSTANT_P (XEXP (x, 1)))
6896 fprintf (file, "lo16(");
6897 output_addr_const (file, XEXP (x, 1));
6898 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6901 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6903 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6905 rtx contains_minus = XEXP (x, 1);
6909 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6910 turn it into (sym) for output_addr_const. */
6911 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6912 contains_minus = XEXP (contains_minus, 0);
6914 minus = XEXP (contains_minus, 0);
6915 symref = XEXP (minus, 0);
6916 XEXP (contains_minus, 0) = symref;
6921 name = XSTR (symref, 0);
6922 newname = alloca (strlen (name) + sizeof ("@toc"));
6923 strcpy (newname, name);
6924 strcat (newname, "@toc");
6925 XSTR (symref, 0) = newname;
6927 output_addr_const (file, XEXP (x, 1));
6929 XSTR (symref, 0) = name;
6930 XEXP (contains_minus, 0) = minus;
6933 output_addr_const (file, XEXP (x, 1));
6935 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6941 /* Target hook for assembling integer objects. The powerpc version has
6942 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6943 is defined. It also needs to handle DI-mode objects on 64-bit
6947 rs6000_assemble_integer (x, size, aligned_p)
6952 #ifdef RELOCATABLE_NEEDS_FIXUP
6953 /* Special handling for SI values. */
6954 if (size == 4 && aligned_p)
6956 extern int in_toc_section PARAMS ((void));
6957 static int recurse = 0;
6959 /* For -mrelocatable, we mark all addresses that need to be fixed up
6960 in the .fixup section. */
6961 if (TARGET_RELOCATABLE
6962 && !in_toc_section ()
6963 && !in_text_section ()
6965 && GET_CODE (x) != CONST_INT
6966 && GET_CODE (x) != CONST_DOUBLE
6972 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6974 ASM_OUTPUT_LABEL (asm_out_file, buf);
6975 fprintf (asm_out_file, "\t.long\t(");
6976 output_addr_const (asm_out_file, x);
6977 fprintf (asm_out_file, ")@fixup\n");
6978 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6979 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6980 fprintf (asm_out_file, "\t.long\t");
6981 assemble_name (asm_out_file, buf);
6982 fprintf (asm_out_file, "\n\t.previous\n");
6986 /* Remove initial .'s to turn a -mcall-aixdesc function
6987 address into the address of the descriptor, not the function
6989 else if (GET_CODE (x) == SYMBOL_REF
6990 && XSTR (x, 0)[0] == '.'
6991 && DEFAULT_ABI == ABI_AIX)
6993 const char *name = XSTR (x, 0);
6994 while (*name == '.')
6997 fprintf (asm_out_file, "\t.long\t%s\n", name);
7001 #endif /* RELOCATABLE_NEEDS_FIXUP */
7002 return default_assemble_integer (x, size, aligned_p);
7006 rs6000_reverse_condition (mode, code)
7007 enum machine_mode mode;
7010 /* Reversal of FP compares takes care -- an ordered compare
7011 becomes an unordered compare and vice versa. */
7012 if (mode == CCFPmode)
7013 return reverse_condition_maybe_unordered (code);
7015 return reverse_condition (code);
7018 /* Generate a compare for CODE. Return a brand-new rtx that
7019 represents the result of the compare. */
7022 rs6000_generate_compare (code)
7025 enum machine_mode comp_mode;
7028 if (rs6000_compare_fp_p)
7029 comp_mode = CCFPmode;
7030 else if (code == GTU || code == LTU
7031 || code == GEU || code == LEU)
7032 comp_mode = CCUNSmode;
7036 /* First, the compare. */
7037 compare_result = gen_reg_rtx (comp_mode);
7038 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7039 gen_rtx_COMPARE (comp_mode,
7041 rs6000_compare_op1)));
7043 /* Some kinds of FP comparisons need an OR operation;
7044 except for flag_unsafe_math_optimizations we don't bother. */
7045 if (rs6000_compare_fp_p
7046 && ! flag_unsafe_math_optimizations
7047 && (code == LE || code == GE
7048 || code == UNEQ || code == LTGT
7049 || code == UNGT || code == UNLT))
7051 enum rtx_code or1, or2;
7052 rtx or1_rtx, or2_rtx, compare2_rtx;
7053 rtx or_result = gen_reg_rtx (CCEQmode);
7057 case LE: or1 = LT; or2 = EQ; break;
7058 case GE: or1 = GT; or2 = EQ; break;
7059 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
7060 case LTGT: or1 = LT; or2 = GT; break;
7061 case UNGT: or1 = UNORDERED; or2 = GT; break;
7062 case UNLT: or1 = UNORDERED; or2 = LT; break;
7065 validate_condition_mode (or1, comp_mode);
7066 validate_condition_mode (or2, comp_mode);
7067 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7068 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7069 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7070 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7072 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7074 compare_result = or_result;
7078 validate_condition_mode (code, GET_MODE (compare_result));
7080 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7084 /* Emit the RTL for an sCOND pattern. */
7087 rs6000_emit_sCOND (code, result)
7092 enum machine_mode op_mode;
7094 condition_rtx = rs6000_generate_compare (code);
7096 op_mode = GET_MODE (rs6000_compare_op0);
7097 if (op_mode == VOIDmode)
7098 op_mode = GET_MODE (rs6000_compare_op1);
7100 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7102 PUT_MODE (condition_rtx, DImode);
7103 convert_move (result, condition_rtx, 0);
7107 PUT_MODE (condition_rtx, SImode);
7108 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7112 /* Emit a branch of kind CODE to location LOC. */
7115 rs6000_emit_cbranch (code, loc)
7119 rtx condition_rtx, loc_ref;
7121 condition_rtx = rs6000_generate_compare (code);
7122 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7123 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7124 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7128 /* Return the string to output a conditional branch to LABEL, which is
7129 the operand number of the label, or -1 if the branch is really a
7132 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7133 condition code register and its mode specifies what kind of
7136 REVERSED is non-zero if we should reverse the sense of the comparison.
7138 INSN is the insn. */
7141 output_cbranch (op, label, reversed, insn)
7147 static char string[64];
7148 enum rtx_code code = GET_CODE (op);
7149 rtx cc_reg = XEXP (op, 0);
7150 enum machine_mode mode = GET_MODE (cc_reg);
7151 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7152 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7153 int really_reversed = reversed ^ need_longbranch;
7159 validate_condition_mode (code, mode);
7161 /* Work out which way this really branches. We could use
7162 reverse_condition_maybe_unordered here always but this
7163 makes the resulting assembler clearer. */
7164 if (really_reversed)
7165 code = rs6000_reverse_condition (mode, code);
7169 /* Not all of these are actually distinct opcodes, but
7170 we distinguish them for clarity of the resulting assembler. */
7172 ccode = "ne"; break;
7174 ccode = "eq"; break;
7176 ccode = "ge"; break;
7177 case GT: case GTU: case UNGT:
7178 ccode = "gt"; break;
7180 ccode = "le"; break;
7181 case LT: case LTU: case UNLT:
7182 ccode = "lt"; break;
7183 case UNORDERED: ccode = "un"; break;
7184 case ORDERED: ccode = "nu"; break;
7185 case UNGE: ccode = "nl"; break;
7186 case UNLE: ccode = "ng"; break;
7191 /* Maybe we have a guess as to how likely the branch is.
7192 The old mnemonics don't have a way to specify this information. */
7193 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7194 if (note != NULL_RTX)
7196 /* PROB is the difference from 50%. */
7197 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7199 /* For branches that are very close to 50%, assume not-taken. */
7200 if (abs (prob) > REG_BR_PROB_BASE / 20
7201 && ((prob > 0) ^ need_longbranch))
7210 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7212 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7214 /* We need to escape any '%' characters in the reg_names string.
7215 Assume they'd only be the first character... */
7216 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7218 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7222 /* If the branch distance was too far, we may have to use an
7223 unconditional branch to go the distance. */
7224 if (need_longbranch)
7225 s += sprintf (s, ",$+8\n\tb %s", label);
7227 s += sprintf (s, ",%s", label);
7233 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7234 operands of the last comparison is nonzero/true, FALSE_COND if it
7235 is zero/false. Return 0 if the hardware has no such operation. */
7238 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7244 enum rtx_code code = GET_CODE (op);
7245 rtx op0 = rs6000_compare_op0;
7246 rtx op1 = rs6000_compare_op1;
7248 enum machine_mode compare_mode = GET_MODE (op0);
7249 enum machine_mode result_mode = GET_MODE (dest);
7252 /* These modes should always match. */
7253 if ( GET_MODE (op1) != compare_mode )
7255 if ( GET_MODE (true_cond) != result_mode )
7257 if ( GET_MODE (false_cond) != result_mode )
7260 /* First, work out if the hardware can do this at all, or
7261 if it's too slow... */
7262 /* If the comparison is an integer one, since we only have fsel
7263 it'll be cheaper to use a branch. */
7264 if (! rs6000_compare_fp_p)
7267 /* Eliminate half of the comparisons by switching operands, this
7268 makes the remaining code simpler. */
7269 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7270 || code == LTGT || code == LT)
7272 code = reverse_condition_maybe_unordered (code);
7274 true_cond = false_cond;
7278 /* UNEQ and LTGT take four instructions for a comparison with zero,
7279 it'll probably be faster to use a branch here too. */
7283 if (GET_CODE (op1) == CONST_DOUBLE)
7284 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7286 /* We're going to try to implement comparions by performing
7287 a subtract, then comparing against zero. Unfortunately,
7288 Inf - Inf is NaN which is not zero, and so if we don't
7289 know that the the operand is finite and the comparison
7290 would treat EQ different to UNORDERED, we can't do it. */
7291 if (! flag_unsafe_math_optimizations
7292 && code != GT && code != UNGE
7293 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7294 /* Constructs of the form (a OP b ? a : b) are safe. */
7295 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7296 || (! rtx_equal_p (op0, true_cond)
7297 && ! rtx_equal_p (op1, true_cond))))
7299 /* At this point we know we can use fsel. */
7301 /* Reduce the comparison to a comparison against zero. */
7302 temp = gen_reg_rtx (compare_mode);
7303 emit_insn (gen_rtx_SET (VOIDmode, temp,
7304 gen_rtx_MINUS (compare_mode, op0, op1)));
7306 op1 = CONST0_RTX (compare_mode);
7308 /* If we don't care about NaNs we can reduce some of the comparisons
7309 down to faster ones. */
7310 if (flag_unsafe_math_optimizations)
7316 true_cond = false_cond;
7329 /* Now, reduce everything down to a GE. */
7336 temp = gen_reg_rtx (compare_mode);
7337 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7342 temp = gen_reg_rtx (compare_mode);
7343 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7348 temp = gen_reg_rtx (compare_mode);
7349 emit_insn (gen_rtx_SET (VOIDmode, temp,
7350 gen_rtx_NEG (compare_mode,
7351 gen_rtx_ABS (compare_mode, op0))));
7356 temp = gen_reg_rtx (result_mode);
7357 emit_insn (gen_rtx_SET (VOIDmode, temp,
7358 gen_rtx_IF_THEN_ELSE (result_mode,
7359 gen_rtx_GE (VOIDmode,
7361 true_cond, false_cond)));
7363 true_cond = false_cond;
7365 temp = gen_reg_rtx (compare_mode);
7366 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7371 temp = gen_reg_rtx (result_mode);
7372 emit_insn (gen_rtx_SET (VOIDmode, temp,
7373 gen_rtx_IF_THEN_ELSE (result_mode,
7374 gen_rtx_GE (VOIDmode,
7376 true_cond, false_cond)));
7378 false_cond = true_cond;
7380 temp = gen_reg_rtx (compare_mode);
7381 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7389 emit_insn (gen_rtx_SET (VOIDmode, dest,
7390 gen_rtx_IF_THEN_ELSE (result_mode,
7391 gen_rtx_GE (VOIDmode,
7393 true_cond, false_cond)));
7398 rs6000_emit_minmax (dest, code, op0, op1)
7404 enum machine_mode mode = GET_MODE (op0);
7406 if (code == SMAX || code == UMAX)
7407 target = emit_conditional_move (dest, GE, op0, op1, mode,
7410 target = emit_conditional_move (dest, GE, op0, op1, mode,
7412 if (target == NULL_RTX)
7415 emit_move_insn (dest, target);
7418 /* This page contains routines that are used to determine what the
7419 function prologue and epilogue code will do and write them out. */
7421 /* Return the first fixed-point register that is required to be
7422 saved. 32 if none. */
7425 first_reg_to_save ()
7429 /* Find lowest numbered live register. */
7430 for (first_reg = 13; first_reg <= 31; first_reg++)
7431 if (regs_ever_live[first_reg]
7432 && (! call_used_regs[first_reg]
7433 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7434 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7435 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7438 if (current_function_profile)
7440 /* AIX must save/restore every register that contains a parameter
7441 before/after the .__mcount call plus an additional register
7442 for the static chain, if needed; use registers from 30 down to 22
7444 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7446 int last_parm_reg, profile_first_reg;
7448 /* Figure out last used parameter register. The proper thing
7449 to do is to walk incoming args of the function. A function
7450 might have live parameter registers even if it has no
7452 for (last_parm_reg = 10;
7453 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7457 /* Calculate first reg for saving parameter registers
7459 Skip reg 31 which may contain the frame pointer. */
7460 profile_first_reg = (33 - last_parm_reg
7461 - (current_function_needs_context ? 1 : 0));
7463 /* Need to skip another reg to account for R31 being PICBASE
7464 (when flag_pic is set) or R30 being used as the frame
7465 pointer (when flag_pic is not set). */
7466 --profile_first_reg;
7468 /* Do not save frame pointer if no parameters needs to be saved. */
7469 if (profile_first_reg == 31)
7470 profile_first_reg = 32;
7472 if (first_reg > profile_first_reg)
7473 first_reg = profile_first_reg;
7476 /* SVR4 may need one register to preserve the static chain. */
7477 else if (current_function_needs_context)
7479 /* Skip reg 31 which may contain the frame pointer. */
7486 if (flag_pic && current_function_uses_pic_offset_table &&
7487 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7488 return RS6000_PIC_OFFSET_TABLE_REGNUM;
7494 /* Similar, for FP regs. */
7497 first_fp_reg_to_save ()
7501 /* Find lowest numbered live register. */
7502 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7503 if (regs_ever_live[first_reg])
7509 /* Similar, for AltiVec regs. */
7512 first_altivec_reg_to_save ()
7516 /* Stack frame remains as is unless we are in AltiVec ABI. */
7517 if (! TARGET_ALTIVEC_ABI)
7518 return LAST_ALTIVEC_REGNO + 1;
7520 /* Find lowest numbered live register. */
7521 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7522 if (regs_ever_live[i])
7528 /* Return a 32-bit mask of the AltiVec registers we need to set in
7529 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7530 the 32-bit word is 0. */
7533 compute_vrsave_mask ()
7535 unsigned int i, mask = 0;
7537 /* First, find out if we use _any_ altivec registers. */
7538 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7539 if (regs_ever_live[i])
7540 mask |= ALTIVEC_REG_BIT (i);
7545 /* Next, add all registers that are call-clobbered. We do this
7546 because post-reload register optimizers such as regrename_optimize
7547 may choose to use them. They never change the register class
7548 chosen by reload, so cannot create new uses of altivec registers
7549 if there were none before, so the early exit above is safe. */
7550 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7551 altivec registers not saved in the mask, which might well make the
7552 adjustments below more effective in eliding the save/restore of
7553 VRSAVE in small functions. */
7554 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7555 if (call_used_regs[i])
7556 mask |= ALTIVEC_REG_BIT (i);
7558 /* Next, remove the argument registers from the set. These must
7559 be in the VRSAVE mask set by the caller, so we don't need to add
7560 them in again. More importantly, the mask we compute here is
7561 used to generate CLOBBERs in the set_vrsave insn, and we do not
7562 wish the argument registers to die. */
7563 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7564 mask &= ~ALTIVEC_REG_BIT (i);
7566 /* Similarly, remove the return value from the set. */
7569 diddle_return_value (is_altivec_return_reg, &yes);
7571 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7578 is_altivec_return_reg (reg, xyes)
7582 bool *yes = (bool *) xyes;
7583 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7588 /* Calculate the stack information for the current function. This is
7589 complicated by having two separate calling sequences, the AIX calling
7590 sequence and the V.4 calling sequence.
7592 AIX (and Darwin/Mac OS X) stack frames look like:
7594 SP----> +---------------------------------------+
7595 | back chain to caller | 0 0
7596 +---------------------------------------+
7597 | saved CR | 4 8 (8-11)
7598 +---------------------------------------+
7600 +---------------------------------------+
7601 | reserved for compilers | 12 24
7602 +---------------------------------------+
7603 | reserved for binders | 16 32
7604 +---------------------------------------+
7605 | saved TOC pointer | 20 40
7606 +---------------------------------------+
7607 | Parameter save area (P) | 24 48
7608 +---------------------------------------+
7609 | Alloca space (A) | 24+P etc.
7610 +---------------------------------------+
7611 | Local variable space (L) | 24+P+A
7612 +---------------------------------------+
7613 | Float/int conversion temporary (X) | 24+P+A+L
7614 +---------------------------------------+
7615 | Save area for AltiVec registers (W) | 24+P+A+L+X
7616 +---------------------------------------+
7617 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7618 +---------------------------------------+
7619 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7620 +---------------------------------------+
7621 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7622 +---------------------------------------+
7623 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7624 +---------------------------------------+
7625 old SP->| back chain to caller's caller |
7626 +---------------------------------------+
7628 The required alignment for AIX configurations is two words (i.e., 8
7632 V.4 stack frames look like:
7634 SP----> +---------------------------------------+
7635 | back chain to caller | 0
7636 +---------------------------------------+
7637 | caller's saved LR | 4
7638 +---------------------------------------+
7639 | Parameter save area (P) | 8
7640 +---------------------------------------+
7641 | Alloca space (A) | 8+P
7642 +---------------------------------------+
7643 | Varargs save area (V) | 8+P+A
7644 +---------------------------------------+
7645 | Local variable space (L) | 8+P+A+V
7646 +---------------------------------------+
7647 | Float/int conversion temporary (X) | 8+P+A+V+L
7648 +---------------------------------------+
7649 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7650 +---------------------------------------+
7651 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7652 +---------------------------------------+
7653 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7654 +---------------------------------------+
7655 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7656 +---------------------------------------+
7657 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7658 +---------------------------------------+
7659 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7660 +---------------------------------------+
7661 old SP->| back chain to caller's caller |
7662 +---------------------------------------+
7664 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7665 given. (But note below and in sysv4.h that we require only 8 and
7666 may round up the size of our stack frame anyways. The historical
7667 reason is early versions of powerpc-linux which didn't properly
7668 align the stack at program startup. A happy side-effect is that
7669 -mno-eabi libraries can be used with -meabi programs.)
7671 The EABI configuration defaults to the V.4 layout, unless
7672 -mcall-aix is used, in which case the AIX layout is used. However,
7673 the stack alignment requirements may differ. If -mno-eabi is not
7674 given, the required stack alignment is 8 bytes; if -mno-eabi is
7675 given, the required alignment is 16 bytes. (But see V.4 comment
7678 #ifndef ABI_STACK_BOUNDARY
7679 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7683 rs6000_stack_info ()
7685 static rs6000_stack_t info, zero_info;
7686 rs6000_stack_t *info_ptr = &info;
7687 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7688 enum rs6000_abi abi;
7692 /* Zero all fields portably. */
7695 /* Select which calling sequence. */
7696 info_ptr->abi = abi = DEFAULT_ABI;
7698 /* Calculate which registers need to be saved & save area size. */
7699 info_ptr->first_gp_reg_save = first_reg_to_save ();
7700 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7701 even if it currently looks like we won't. */
7702 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7703 || (flag_pic == 1 && abi == ABI_V4)
7704 || (flag_pic && abi == ABI_DARWIN))
7705 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7706 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7708 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7710 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7711 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7713 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7714 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7715 - info_ptr->first_altivec_reg_save);
7717 /* Does this function call anything? */
7718 info_ptr->calls_p = (! current_function_is_leaf
7719 || cfun->machine->ra_needs_full_frame);
7721 /* Determine if we need to save the link register. */
7722 if (rs6000_ra_ever_killed ()
7723 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7724 #ifdef TARGET_RELOCATABLE
7725 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7727 || (info_ptr->first_fp_reg_save != 64
7728 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7729 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7730 || (abi == ABI_V4 && current_function_calls_alloca)
7731 || (DEFAULT_ABI == ABI_DARWIN
7733 && current_function_uses_pic_offset_table)
7734 || info_ptr->calls_p)
7736 info_ptr->lr_save_p = 1;
7737 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7740 /* Determine if we need to save the condition code registers. */
7741 if (regs_ever_live[CR2_REGNO]
7742 || regs_ever_live[CR3_REGNO]
7743 || regs_ever_live[CR4_REGNO])
7745 info_ptr->cr_save_p = 1;
7747 info_ptr->cr_size = reg_size;
7750 /* If the current function calls __builtin_eh_return, then we need
7751 to allocate stack space for registers that will hold data for
7752 the exception handler. */
7753 if (current_function_calls_eh_return)
7756 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7758 ehrd_size = i * UNITS_PER_WORD;
7763 /* Determine various sizes. */
7764 info_ptr->reg_size = reg_size;
7765 info_ptr->fixed_size = RS6000_SAVE_AREA;
7766 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7767 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7768 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7771 if (TARGET_ALTIVEC_ABI)
7773 info_ptr->vrsave_mask = compute_vrsave_mask ();
7774 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7778 info_ptr->vrsave_mask = 0;
7779 info_ptr->vrsave_size = 0;
7782 /* Calculate the offsets. */
7790 case ABI_AIX_NODESC:
7792 info_ptr->fp_save_offset = - info_ptr->fp_size;
7793 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7795 if (TARGET_ALTIVEC_ABI)
7797 info_ptr->vrsave_save_offset
7798 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7800 /* Align stack so vector save area is on a quadword boundary. */
7801 if (info_ptr->altivec_size != 0)
7802 info_ptr->altivec_padding_size
7803 = 16 - (-info_ptr->vrsave_save_offset % 16);
7805 info_ptr->altivec_padding_size = 0;
7807 info_ptr->altivec_save_offset
7808 = info_ptr->vrsave_save_offset
7809 - info_ptr->altivec_padding_size
7810 - info_ptr->altivec_size;
7812 /* Adjust for AltiVec case. */
7813 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7816 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7817 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7818 info_ptr->lr_save_offset = 2*reg_size;
7822 info_ptr->fp_save_offset = - info_ptr->fp_size;
7823 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7824 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7826 if (TARGET_ALTIVEC_ABI)
7828 info_ptr->vrsave_save_offset
7829 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7831 /* Align stack so vector save area is on a quadword boundary. */
7832 if (info_ptr->altivec_size != 0)
7833 info_ptr->altivec_padding_size
7834 = 16 - (-info_ptr->vrsave_save_offset % 16);
7836 info_ptr->altivec_padding_size = 0;
7838 info_ptr->altivec_save_offset
7839 = info_ptr->vrsave_save_offset
7840 - info_ptr->altivec_padding_size
7841 - info_ptr->altivec_size;
7843 /* Adjust for AltiVec case. */
7844 info_ptr->toc_save_offset
7845 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7848 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7849 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7850 info_ptr->lr_save_offset = reg_size;
7854 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7856 + info_ptr->altivec_size
7857 + info_ptr->altivec_padding_size
7858 + info_ptr->vrsave_size
7862 + info_ptr->vrsave_size
7863 + info_ptr->toc_size,
7864 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7867 total_raw_size = (info_ptr->vars_size
7868 + info_ptr->parm_size
7869 + info_ptr->save_size
7870 + info_ptr->varargs_size
7871 + info_ptr->fixed_size);
7873 info_ptr->total_size =
7874 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7876 /* Determine if we need to allocate any stack frame:
7878 For AIX we need to push the stack if a frame pointer is needed
7879 (because the stack might be dynamically adjusted), if we are
7880 debugging, if we make calls, or if the sum of fp_save, gp_save,
7881 and local variables are more than the space needed to save all
7882 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7883 + 18*8 = 288 (GPR13 reserved).
7885 For V.4 we don't have the stack cushion that AIX uses, but assume
7886 that the debugger can handle stackless frames. */
7888 if (info_ptr->calls_p)
7889 info_ptr->push_p = 1;
7891 else if (abi == ABI_V4)
7892 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7895 info_ptr->push_p = (frame_pointer_needed
7896 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7897 || ((total_raw_size - info_ptr->fixed_size)
7898 > (TARGET_32BIT ? 220 : 288)));
7900 /* Zero offsets if we're not saving those registers. */
7901 if (info_ptr->fp_size == 0)
7902 info_ptr->fp_save_offset = 0;
7904 if (info_ptr->gp_size == 0)
7905 info_ptr->gp_save_offset = 0;
7907 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7908 info_ptr->altivec_save_offset = 0;
7910 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7911 info_ptr->vrsave_save_offset = 0;
7913 if (! info_ptr->lr_save_p)
7914 info_ptr->lr_save_offset = 0;
7916 if (! info_ptr->cr_save_p)
7917 info_ptr->cr_save_offset = 0;
7919 if (! info_ptr->toc_save_p)
7920 info_ptr->toc_save_offset = 0;
7926 debug_stack_info (info)
7927 rs6000_stack_t *info;
7929 const char *abi_string;
7932 info = rs6000_stack_info ();
7934 fprintf (stderr, "\nStack information for function %s:\n",
7935 ((current_function_decl && DECL_NAME (current_function_decl))
7936 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7941 default: abi_string = "Unknown"; break;
7942 case ABI_NONE: abi_string = "NONE"; break;
7944 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7945 case ABI_DARWIN: abi_string = "Darwin"; break;
7946 case ABI_V4: abi_string = "V.4"; break;
7949 fprintf (stderr, "\tABI = %5s\n", abi_string);
7951 if (TARGET_ALTIVEC_ABI)
7952 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7954 if (info->first_gp_reg_save != 32)
7955 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7957 if (info->first_fp_reg_save != 64)
7958 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7960 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7961 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7962 info->first_altivec_reg_save);
7964 if (info->lr_save_p)
7965 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7967 if (info->cr_save_p)
7968 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7970 if (info->toc_save_p)
7971 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7973 if (info->vrsave_mask)
7974 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7977 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7980 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7982 if (info->gp_save_offset)
7983 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7985 if (info->fp_save_offset)
7986 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7988 if (info->altivec_save_offset)
7989 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7990 info->altivec_save_offset);
7992 if (info->vrsave_save_offset)
7993 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7994 info->vrsave_save_offset);
7996 if (info->lr_save_offset)
7997 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7999 if (info->cr_save_offset)
8000 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
8002 if (info->toc_save_offset)
8003 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
8005 if (info->varargs_save_offset)
8006 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
8008 if (info->total_size)
8009 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
8011 if (info->varargs_size)
8012 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
8014 if (info->vars_size)
8015 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
8017 if (info->parm_size)
8018 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
8020 if (info->fixed_size)
8021 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
8024 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
8027 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
8029 if (info->altivec_size)
8030 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
8032 if (info->vrsave_size)
8033 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
8035 if (info->altivec_padding_size)
8036 fprintf (stderr, "\taltivec_padding_size= %5d\n",
8037 info->altivec_padding_size);
8040 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
8043 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
8046 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
8048 if (info->save_size)
8049 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
8051 if (info->reg_size != 4)
8052 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
8054 fprintf (stderr, "\n");
8058 rs6000_return_addr (count, frame)
8062 /* Currently we don't optimize very well between prolog and body
8063 code and for PIC code the code can be actually quite bad, so
8064 don't try to be too clever here. */
8065 if (count != 0 || flag_pic != 0)
8067 cfun->machine->ra_needs_full_frame = 1;
8074 plus_constant (copy_to_reg
8075 (gen_rtx_MEM (Pmode,
8076 memory_address (Pmode, frame))),
8077 RETURN_ADDRESS_OFFSET)));
8080 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8084 rs6000_ra_ever_killed ()
8088 #ifdef ASM_OUTPUT_MI_THUNK
8089 if (current_function_is_thunk)
8092 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8093 || cfun->machine->ra_needs_full_frame)
8094 return regs_ever_live[LINK_REGISTER_REGNUM];
8096 push_topmost_sequence ();
8098 pop_topmost_sequence ();
8100 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8104 /* Add a REG_MAYBE_DEAD note to the insn. */
8106 rs6000_maybe_dead (insn)
8109 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8114 /* Emit instructions needed to load the TOC register.
8115 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8116 a constant pool; or for SVR4 -fpic. */
8119 rs6000_emit_load_toc_table (fromprolog)
8123 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8125 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8127 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8129 rtx temp = (fromprolog
8130 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8131 : gen_reg_rtx (Pmode));
8132 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8133 rs6000_maybe_dead (emit_move_insn (dest, temp));
8135 else if (flag_pic == 2)
8138 rtx tempLR = (fromprolog
8139 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8140 : gen_reg_rtx (Pmode));
8141 rtx temp0 = (fromprolog
8142 ? gen_rtx_REG (Pmode, 0)
8143 : gen_reg_rtx (Pmode));
8146 /* possibly create the toc section */
8147 if (! toc_initialized)
8150 function_section (current_function_decl);
8157 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8158 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8160 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8161 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8163 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8165 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8166 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8173 static int reload_toc_labelno = 0;
8175 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8177 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8178 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8180 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8183 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8184 rs6000_maybe_dead (emit_move_insn (temp0,
8185 gen_rtx_MEM (Pmode, dest)));
8187 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8189 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8191 /* This is for AIX code running in non-PIC ELF. */
8194 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8195 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8197 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8198 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8206 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8208 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8213 get_TOC_alias_set ()
8215 static int set = -1;
8217 set = new_alias_set ();
8221 /* This retuns nonzero if the current function uses the TOC. This is
8222 determined by the presence of (unspec ... 7), which is generated by
8223 the various load_toc_* patterns. */
8230 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8233 rtx pat = PATTERN (insn);
8236 if (GET_CODE (pat) == PARALLEL)
8237 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8238 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8239 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8246 create_TOC_reference (symbol)
8249 return gen_rtx_PLUS (Pmode,
8250 gen_rtx_REG (Pmode, TOC_REGISTER),
8251 gen_rtx_CONST (Pmode,
8252 gen_rtx_MINUS (Pmode, symbol,
8253 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8257 /* __throw will restore its own return address to be the same as the
8258 return address of the function that the throw is being made to.
8259 This is unfortunate, because we want to check the original
8260 return address to see if we need to restore the TOC.
8261 So we have to squirrel it away here.
8262 This is used only in compiling __throw and __rethrow.
8264 Most of this code should be removed by CSE. */
8265 static rtx insn_after_throw;
8267 /* This does the saving... */
8269 rs6000_aix_emit_builtin_unwind_init ()
8272 rtx stack_top = gen_reg_rtx (Pmode);
8273 rtx opcode_addr = gen_reg_rtx (Pmode);
8275 insn_after_throw = gen_reg_rtx (SImode);
8277 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8278 emit_move_insn (stack_top, mem);
8280 mem = gen_rtx_MEM (Pmode,
8281 gen_rtx_PLUS (Pmode, stack_top,
8282 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8283 emit_move_insn (opcode_addr, mem);
8284 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8287 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8288 in _eh.o). Only used on AIX.
8290 The idea is that on AIX, function calls look like this:
8291 bl somefunction-trampoline
8295 somefunction-trampoline:
8297 ... load function address in the count register ...
8299 or like this, if the linker determines that this is not a cross-module call
8300 and so the TOC need not be restored:
8303 or like this, if the compiler could determine that this is not a
8306 now, the tricky bit here is that register 2 is saved and restored
8307 by the _linker_, so we can't readily generate debugging information
8308 for it. So we need to go back up the call chain looking at the
8309 insns at return addresses to see which calls saved the TOC register
8310 and so see where it gets restored from.
8312 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8313 just before the actual epilogue.
8315 On the bright side, this incurs no space or time overhead unless an
8316 exception is thrown, except for the extra code in libgcc.a.
8318 The parameter STACKSIZE is a register containing (at runtime)
8319 the amount to be popped off the stack in addition to the stack frame
8320 of this routine (which will be __throw or __rethrow, and so is
8321 guaranteed to have a stack frame). */
8324 rs6000_emit_eh_toc_restore (stacksize)
8328 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8329 rtx tocompare = gen_reg_rtx (SImode);
8330 rtx opcode = gen_reg_rtx (SImode);
8331 rtx opcode_addr = gen_reg_rtx (Pmode);
8333 rtx loop_start = gen_label_rtx ();
8334 rtx no_toc_restore_needed = gen_label_rtx ();
8335 rtx loop_exit = gen_label_rtx ();
8337 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8338 set_mem_alias_set (mem, rs6000_sr_alias_set);
8339 emit_move_insn (bottom_of_stack, mem);
8341 top_of_stack = expand_binop (Pmode, add_optab,
8342 bottom_of_stack, stacksize,
8343 NULL_RTX, 1, OPTAB_WIDEN);
8345 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
8346 : 0xE8410028, SImode));
8348 if (insn_after_throw == NULL_RTX)
8350 emit_move_insn (opcode, insn_after_throw);
8352 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8353 emit_label (loop_start);
8355 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8356 SImode, NULL_RTX, NULL_RTX,
8357 no_toc_restore_needed);
8359 mem = gen_rtx_MEM (Pmode,
8360 gen_rtx_PLUS (Pmode, bottom_of_stack,
8361 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8362 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8364 emit_label (no_toc_restore_needed);
8365 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8366 Pmode, NULL_RTX, NULL_RTX,
8369 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8370 set_mem_alias_set (mem, rs6000_sr_alias_set);
8371 emit_move_insn (bottom_of_stack, mem);
8373 mem = gen_rtx_MEM (Pmode,
8374 gen_rtx_PLUS (Pmode, bottom_of_stack,
8375 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8376 emit_move_insn (opcode_addr, mem);
8377 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8379 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8380 emit_jump (loop_start);
8381 emit_note (NULL, NOTE_INSN_LOOP_END);
8382 emit_label (loop_exit);
8384 #endif /* TARGET_AIX */
8386 /* This ties together stack memory (MEM with an alias set of
8387 rs6000_sr_alias_set) and the change to the stack pointer. */
8390 rs6000_emit_stack_tie ()
8392 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8394 set_mem_alias_set (mem, rs6000_sr_alias_set);
8395 emit_insn (gen_stack_tie (mem));
8398 /* Emit the correct code for allocating stack space, as insns.
8399 If COPY_R12, make sure a copy of the old frame is left in r12.
8400 The generated code may use hard register 0 as a temporary. */
8403 rs6000_emit_allocate_stack (size, copy_r12)
8408 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8409 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8410 rtx todec = GEN_INT (-size);
8412 if (current_function_limit_stack)
8414 if (REG_P (stack_limit_rtx)
8415 && REGNO (stack_limit_rtx) > 1
8416 && REGNO (stack_limit_rtx) <= 31)
8418 emit_insn (Pmode == SImode
8419 ? gen_addsi3 (tmp_reg,
8422 : gen_adddi3 (tmp_reg,
8426 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8429 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8431 && DEFAULT_ABI == ABI_V4)
8433 rtx toload = gen_rtx_CONST (VOIDmode,
8434 gen_rtx_PLUS (Pmode,
8438 emit_insn (gen_elf_high (tmp_reg, toload));
8439 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8440 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8444 warning ("stack limit expression is not supported");
8447 if (copy_r12 || ! TARGET_UPDATE)
8448 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8454 /* Need a note here so that try_split doesn't get confused. */
8455 if (get_last_insn() == NULL_RTX)
8456 emit_note (0, NOTE_INSN_DELETED);
8457 insn = emit_move_insn (tmp_reg, todec);
8458 try_split (PATTERN (insn), insn, 0);
8462 if (Pmode == SImode)
8463 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8466 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8471 if (Pmode == SImode)
8472 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8474 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8475 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8476 gen_rtx_REG (Pmode, 12));
8479 RTX_FRAME_RELATED_P (insn) = 1;
8481 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8482 gen_rtx_SET (VOIDmode, stack_reg,
8483 gen_rtx_PLUS (Pmode, stack_reg,
8488 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8491 (mem (plus (blah) (regXX)))
8495 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8498 altivec_frame_fixup (insn, reg, val)
8504 real = copy_rtx (PATTERN (insn));
8506 real = replace_rtx (real, reg, GEN_INT (val));
8508 RTX_FRAME_RELATED_P (insn) = 1;
8509 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8514 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8515 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8516 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8517 deduce these equivalences by itself so it wasn't necessary to hold
8518 its hand so much. */
8521 rs6000_frame_related (insn, reg, val, reg2, rreg)
8530 real = copy_rtx (PATTERN (insn));
8532 real = replace_rtx (real, reg,
8533 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8534 STACK_POINTER_REGNUM),
8537 /* We expect that 'real' is either a SET or a PARALLEL containing
8538 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8539 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8541 if (GET_CODE (real) == SET)
8545 temp = simplify_rtx (SET_SRC (set));
8547 SET_SRC (set) = temp;
8548 temp = simplify_rtx (SET_DEST (set));
8550 SET_DEST (set) = temp;
8551 if (GET_CODE (SET_DEST (set)) == MEM)
8553 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8555 XEXP (SET_DEST (set), 0) = temp;
8558 else if (GET_CODE (real) == PARALLEL)
8561 for (i = 0; i < XVECLEN (real, 0); i++)
8562 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8564 rtx set = XVECEXP (real, 0, i);
8566 temp = simplify_rtx (SET_SRC (set));
8568 SET_SRC (set) = temp;
8569 temp = simplify_rtx (SET_DEST (set));
8571 SET_DEST (set) = temp;
8572 if (GET_CODE (SET_DEST (set)) == MEM)
8574 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8576 XEXP (SET_DEST (set), 0) = temp;
8578 RTX_FRAME_RELATED_P (set) = 1;
8584 if (reg2 != NULL_RTX)
8585 real = replace_rtx (real, reg2, rreg);
8587 RTX_FRAME_RELATED_P (insn) = 1;
8588 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8593 /* Returns an insn that has a vrsave set operation with the
8594 appropriate CLOBBERs. */
8597 generate_set_vrsave (reg, info, epiloguep)
8599 rs6000_stack_t *info;
8603 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8604 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8607 = gen_rtx_SET (VOIDmode,
8609 gen_rtx_UNSPEC_VOLATILE (SImode,
8610 gen_rtvec (2, reg, vrsave),
8615 /* We need to clobber the registers in the mask so the scheduler
8616 does not move sets to VRSAVE before sets of AltiVec registers.
8618 However, if the function receives nonlocal gotos, reload will set
8619 all call saved registers live. We will end up with:
8621 (set (reg 999) (mem))
8622 (parallel [ (set (reg vrsave) (unspec blah))
8623 (clobber (reg 999))])
8625 The clobber will cause the store into reg 999 to be dead, and
8626 flow will attempt to delete an epilogue insn. In this case, we
8627 need an unspec use/set of the register. */
8629 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8630 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8632 if (!epiloguep || call_used_regs [i])
8633 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8634 gen_rtx_REG (V4SImode, i));
8637 rtx reg = gen_rtx_REG (V4SImode, i);
8640 = gen_rtx_SET (VOIDmode,
8642 gen_rtx_UNSPEC (V4SImode,
8643 gen_rtvec (1, reg), 27));
8647 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8649 for (i = 0; i < nclobs; ++i)
8650 XVECEXP (insn, 0, i) = clobs[i];
8655 /* Emit function prologue as insns. */
8658 rs6000_emit_prologue ()
8660 rs6000_stack_t *info = rs6000_stack_info ();
8661 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8662 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8663 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8664 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8665 rtx frame_reg_rtx = sp_reg_rtx;
8666 rtx cr_save_rtx = NULL;
8668 int saving_FPRs_inline;
8669 int using_store_multiple;
8670 HOST_WIDE_INT sp_offset = 0;
8672 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8673 && info->first_gp_reg_save < 31);
8674 saving_FPRs_inline = (info->first_fp_reg_save == 64
8675 || FP_SAVE_INLINE (info->first_fp_reg_save));
8677 /* For V.4, update stack before we do any saving and set back pointer. */
8678 if (info->push_p && DEFAULT_ABI == ABI_V4)
8680 if (info->total_size < 32767)
8681 sp_offset = info->total_size;
8683 frame_reg_rtx = frame_ptr_rtx;
8684 rs6000_emit_allocate_stack (info->total_size,
8685 (frame_reg_rtx != sp_reg_rtx
8688 || info->first_fp_reg_save < 64
8689 || info->first_gp_reg_save < 32
8691 if (frame_reg_rtx != sp_reg_rtx)
8692 rs6000_emit_stack_tie ();
8695 /* Save AltiVec registers if needed. */
8696 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8700 /* There should be a non inline version of this, for when we
8701 are saving lots of vector registers. */
8702 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8703 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8705 rtx areg, savereg, mem;
8708 offset = info->altivec_save_offset + sp_offset
8709 + 16 * (i - info->first_altivec_reg_save);
8711 savereg = gen_rtx_REG (V4SImode, i);
8713 areg = gen_rtx_REG (Pmode, 0);
8714 emit_move_insn (areg, GEN_INT (offset));
8716 /* AltiVec addressing mode is [reg+reg]. */
8717 mem = gen_rtx_MEM (V4SImode,
8718 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8720 set_mem_alias_set (mem, rs6000_sr_alias_set);
8722 insn = emit_move_insn (mem, savereg);
8724 altivec_frame_fixup (insn, areg, offset);
8728 /* VRSAVE is a bit vector representing which AltiVec registers
8729 are used. The OS uses this to determine which vector
8730 registers to save on a context switch. We need to save
8731 VRSAVE on the stack frame, add whatever AltiVec registers we
8732 used in this function, and do the corresponding magic in the
8735 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8737 rtx reg, mem, vrsave;
8740 /* Get VRSAVE onto a GPR. */
8741 reg = gen_rtx_REG (SImode, 12);
8742 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8744 emit_insn (gen_get_vrsave_internal (reg));
8746 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8749 offset = info->vrsave_save_offset + sp_offset;
8751 = gen_rtx_MEM (SImode,
8752 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8753 set_mem_alias_set (mem, rs6000_sr_alias_set);
8754 insn = emit_move_insn (mem, reg);
8756 /* Include the registers in the mask. */
8757 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8759 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8762 /* If we use the link register, get it into r0. */
8763 if (info->lr_save_p)
8764 emit_move_insn (gen_rtx_REG (Pmode, 0),
8765 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8767 /* If we need to save CR, put it into r12. */
8768 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8770 cr_save_rtx = gen_rtx_REG (SImode, 12);
8771 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8774 /* Do any required saving of fpr's. If only one or two to save, do
8775 it ourselves. Otherwise, call function. */
8776 if (saving_FPRs_inline)
8779 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8780 if ((regs_ever_live[info->first_fp_reg_save+i]
8781 && ! call_used_regs[info->first_fp_reg_save+i]))
8784 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8785 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8786 GEN_INT (info->fp_save_offset
8789 mem = gen_rtx_MEM (DFmode, addr);
8790 set_mem_alias_set (mem, rs6000_sr_alias_set);
8792 insn = emit_move_insn (mem, reg);
8793 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8794 NULL_RTX, NULL_RTX);
8797 else if (info->first_fp_reg_save != 64)
8801 const char *alloc_rname;
8803 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8805 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8807 LINK_REGISTER_REGNUM));
8808 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8809 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8810 alloc_rname = ggc_strdup (rname);
8811 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8812 gen_rtx_SYMBOL_REF (Pmode,
8814 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8817 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8818 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8819 GEN_INT (info->fp_save_offset
8820 + sp_offset + 8*i));
8821 mem = gen_rtx_MEM (DFmode, addr);
8822 set_mem_alias_set (mem, rs6000_sr_alias_set);
8824 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8826 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8827 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8828 NULL_RTX, NULL_RTX);
8831 /* Save GPRs. This is done as a PARALLEL if we are using
8832 the store-multiple instructions. */
8833 if (using_store_multiple)
8837 p = rtvec_alloc (32 - info->first_gp_reg_save);
8838 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8839 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8842 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8843 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8844 GEN_INT (info->gp_save_offset
8847 mem = gen_rtx_MEM (reg_mode, addr);
8848 set_mem_alias_set (mem, rs6000_sr_alias_set);
8850 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8852 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8853 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8854 NULL_RTX, NULL_RTX);
8859 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8860 if ((regs_ever_live[info->first_gp_reg_save+i]
8861 && ! call_used_regs[info->first_gp_reg_save+i])
8862 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8863 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8864 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8867 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8868 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8869 GEN_INT (info->gp_save_offset
8872 mem = gen_rtx_MEM (reg_mode, addr);
8873 set_mem_alias_set (mem, rs6000_sr_alias_set);
8875 insn = emit_move_insn (mem, reg);
8876 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8877 NULL_RTX, NULL_RTX);
8881 /* ??? There's no need to emit actual instructions here, but it's the
8882 easiest way to get the frame unwind information emitted. */
8883 if (current_function_calls_eh_return)
8885 unsigned int i, regno;
8891 regno = EH_RETURN_DATA_REGNO (i);
8892 if (regno == INVALID_REGNUM)
8895 reg = gen_rtx_REG (reg_mode, regno);
8896 addr = plus_constant (frame_reg_rtx,
8897 info->ehrd_offset + sp_offset
8898 + reg_size * (int) i);
8899 mem = gen_rtx_MEM (reg_mode, addr);
8900 set_mem_alias_set (mem, rs6000_sr_alias_set);
8902 insn = emit_move_insn (mem, reg);
8903 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8904 NULL_RTX, NULL_RTX);
8908 /* Save lr if we used it. */
8909 if (info->lr_save_p)
8911 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8912 GEN_INT (info->lr_save_offset + sp_offset));
8913 rtx reg = gen_rtx_REG (Pmode, 0);
8914 rtx mem = gen_rtx_MEM (Pmode, addr);
8915 /* This should not be of rs6000_sr_alias_set, because of
8916 __builtin_return_address. */
8918 insn = emit_move_insn (mem, reg);
8919 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8920 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8923 /* Save CR if we use any that must be preserved. */
8924 if (info->cr_save_p)
8926 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8927 GEN_INT (info->cr_save_offset + sp_offset));
8928 rtx mem = gen_rtx_MEM (SImode, addr);
8930 set_mem_alias_set (mem, rs6000_sr_alias_set);
8932 /* If r12 was used to hold the original sp, copy cr into r0 now
8934 if (REGNO (frame_reg_rtx) == 12)
8936 cr_save_rtx = gen_rtx_REG (SImode, 0);
8937 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8939 insn = emit_move_insn (mem, cr_save_rtx);
8941 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8942 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8943 OK. All we have to do is specify that _one_ condition code
8944 register is saved in this stack slot. The thrower's epilogue
8945 will then restore all the call-saved registers.
8946 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8947 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8948 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8951 /* Update stack and set back pointer unless this is V.4,
8952 for which it was done previously. */
8953 if (info->push_p && DEFAULT_ABI != ABI_V4)
8954 rs6000_emit_allocate_stack (info->total_size, FALSE);
8956 /* Set frame pointer, if needed. */
8957 if (frame_pointer_needed)
8959 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8961 RTX_FRAME_RELATED_P (insn) = 1;
8964 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8965 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8966 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8967 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
8969 /* If emit_load_toc_table will use the link register, we need to save
8970 it. We use R11 for this purpose because emit_load_toc_table
8971 can use register 0. This allows us to use a plain 'blr' to return
8972 from the procedure more often. */
8973 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8974 && ! info->lr_save_p
8975 && EXIT_BLOCK_PTR->pred != NULL);
8976 if (save_LR_around_toc_setup)
8977 emit_move_insn (gen_rtx_REG (Pmode, 11),
8978 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8980 rs6000_emit_load_toc_table (TRUE);
8982 if (save_LR_around_toc_setup)
8983 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8984 gen_rtx_REG (Pmode, 11));
8987 if (DEFAULT_ABI == ABI_DARWIN
8988 && flag_pic && current_function_uses_pic_offset_table)
8990 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8992 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8995 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
8996 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
9000 /* Write function prologue. */
9003 rs6000_output_function_prologue (file, size)
9005 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9007 rs6000_stack_t *info = rs6000_stack_info ();
9009 if (TARGET_DEBUG_STACK)
9010 debug_stack_info (info);
9012 /* Write .extern for any function we will call to save and restore
9014 if (info->first_fp_reg_save < 64
9015 && !FP_SAVE_INLINE (info->first_fp_reg_save))
9016 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
9017 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
9018 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
9021 /* Write .extern for AIX common mode routines, if needed. */
9022 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
9024 fputs ("\t.extern __mulh\n", file);
9025 fputs ("\t.extern __mull\n", file);
9026 fputs ("\t.extern __divss\n", file);
9027 fputs ("\t.extern __divus\n", file);
9028 fputs ("\t.extern __quoss\n", file);
9029 fputs ("\t.extern __quous\n", file);
9030 common_mode_defined = 1;
9033 if (! HAVE_prologue)
9037 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9038 the "toplevel" insn chain. */
9039 emit_note (0, NOTE_INSN_DELETED);
9040 rs6000_emit_prologue ();
9041 emit_note (0, NOTE_INSN_DELETED);
9043 if (TARGET_DEBUG_STACK)
9044 debug_rtx_list (get_insns (), 100);
9045 final (get_insns (), file, FALSE, FALSE);
9049 rs6000_pic_labelno++;
9052 /* Emit function epilogue as insns.
9054 At present, dwarf2out_frame_debug_expr doesn't understand
9055 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9056 anywhere in the epilogue. Most of the insns below would in any case
9057 need special notes to explain where r11 is in relation to the stack. */
9060 rs6000_emit_epilogue (sibcall)
9063 rs6000_stack_t *info;
9064 int restoring_FPRs_inline;
9065 int using_load_multiple;
9066 int using_mfcr_multiple;
9067 int use_backchain_to_restore_sp;
9069 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9070 rtx frame_reg_rtx = sp_reg_rtx;
9071 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9072 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9075 info = rs6000_stack_info ();
9076 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9077 && info->first_gp_reg_save < 31);
9078 restoring_FPRs_inline = (sibcall
9079 || current_function_calls_eh_return
9080 || info->first_fp_reg_save == 64
9081 || FP_SAVE_INLINE (info->first_fp_reg_save));
9082 use_backchain_to_restore_sp = (frame_pointer_needed
9083 || current_function_calls_alloca
9084 || info->total_size > 32767);
9085 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9086 || rs6000_cpu == PROCESSOR_PPC603
9087 || rs6000_cpu == PROCESSOR_PPC750
9090 /* If we have a frame pointer, a call to alloca, or a large stack
9091 frame, restore the old stack pointer using the backchain. Otherwise,
9092 we know what size to update it with. */
9093 if (use_backchain_to_restore_sp)
9095 /* Under V.4, don't reset the stack pointer until after we're done
9096 loading the saved registers. */
9097 if (DEFAULT_ABI == ABI_V4)
9098 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9100 emit_move_insn (frame_reg_rtx,
9101 gen_rtx_MEM (Pmode, sp_reg_rtx));
9104 else if (info->push_p)
9106 if (DEFAULT_ABI == ABI_V4)
9107 sp_offset = info->total_size;
9110 emit_insn (TARGET_32BIT
9111 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9112 GEN_INT (info->total_size))
9113 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9114 GEN_INT (info->total_size)));
9118 /* Restore AltiVec registers if needed. */
9119 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9123 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9124 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9126 rtx addr, areg, mem;
9128 areg = gen_rtx_REG (Pmode, 0);
9130 (areg, GEN_INT (info->altivec_save_offset
9132 + 16 * (i - info->first_altivec_reg_save)));
9134 /* AltiVec addressing mode is [reg+reg]. */
9135 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9136 mem = gen_rtx_MEM (V4SImode, addr);
9137 set_mem_alias_set (mem, rs6000_sr_alias_set);
9139 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9143 /* Restore VRSAVE if needed. */
9144 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9148 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9149 GEN_INT (info->vrsave_save_offset + sp_offset));
9150 mem = gen_rtx_MEM (SImode, addr);
9151 set_mem_alias_set (mem, rs6000_sr_alias_set);
9152 reg = gen_rtx_REG (SImode, 12);
9153 emit_move_insn (reg, mem);
9155 emit_insn (generate_set_vrsave (reg, info, 1));
9158 /* Get the old lr if we saved it. */
9159 if (info->lr_save_p)
9161 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9162 GEN_INT (info->lr_save_offset + sp_offset));
9163 rtx mem = gen_rtx_MEM (Pmode, addr);
9165 set_mem_alias_set (mem, rs6000_sr_alias_set);
9167 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9170 /* Get the old cr if we saved it. */
9171 if (info->cr_save_p)
9173 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9174 GEN_INT (info->cr_save_offset + sp_offset));
9175 rtx mem = gen_rtx_MEM (SImode, addr);
9177 set_mem_alias_set (mem, rs6000_sr_alias_set);
9179 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9182 /* Set LR here to try to overlap restores below. */
9183 if (info->lr_save_p)
9184 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9185 gen_rtx_REG (Pmode, 0));
9187 /* Load exception handler data registers, if needed. */
9188 if (current_function_calls_eh_return)
9190 unsigned int i, regno;
9196 regno = EH_RETURN_DATA_REGNO (i);
9197 if (regno == INVALID_REGNUM)
9200 addr = plus_constant (frame_reg_rtx,
9201 info->ehrd_offset + sp_offset
9202 + reg_size * (int) i);
9203 mem = gen_rtx_MEM (reg_mode, addr);
9204 set_mem_alias_set (mem, rs6000_sr_alias_set);
9206 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9210 /* Restore GPRs. This is done as a PARALLEL if we are using
9211 the load-multiple instructions. */
9212 if (using_load_multiple)
9215 p = rtvec_alloc (32 - info->first_gp_reg_save);
9216 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9218 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9219 GEN_INT (info->gp_save_offset
9222 rtx mem = gen_rtx_MEM (reg_mode, addr);
9224 set_mem_alias_set (mem, rs6000_sr_alias_set);
9227 gen_rtx_SET (VOIDmode,
9228 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9231 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9234 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9235 if ((regs_ever_live[info->first_gp_reg_save+i]
9236 && ! call_used_regs[info->first_gp_reg_save+i])
9237 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9238 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9239 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9241 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9242 GEN_INT (info->gp_save_offset
9245 rtx mem = gen_rtx_MEM (reg_mode, addr);
9247 set_mem_alias_set (mem, rs6000_sr_alias_set);
9249 emit_move_insn (gen_rtx_REG (reg_mode,
9250 info->first_gp_reg_save + i),
9254 /* Restore fpr's if we need to do it without calling a function. */
9255 if (restoring_FPRs_inline)
9256 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9257 if ((regs_ever_live[info->first_fp_reg_save+i]
9258 && ! call_used_regs[info->first_fp_reg_save+i]))
9261 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9262 GEN_INT (info->fp_save_offset
9265 mem = gen_rtx_MEM (DFmode, addr);
9266 set_mem_alias_set (mem, rs6000_sr_alias_set);
9268 emit_move_insn (gen_rtx_REG (DFmode,
9269 info->first_fp_reg_save + i),
9273 /* If we saved cr, restore it here. Just those that were used. */
9274 if (info->cr_save_p)
9276 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9279 if (using_mfcr_multiple)
9281 for (i = 0; i < 8; i++)
9282 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9288 if (using_mfcr_multiple && count > 1)
9293 p = rtvec_alloc (count);
9296 for (i = 0; i < 8; i++)
9297 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9299 rtvec r = rtvec_alloc (2);
9300 RTVEC_ELT (r, 0) = r12_rtx;
9301 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9302 RTVEC_ELT (p, ndx) =
9303 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9304 gen_rtx_UNSPEC (CCmode, r, 20));
9307 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9312 for (i = 0; i < 8; i++)
9313 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9315 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9321 /* If this is V.4, unwind the stack pointer after all of the loads
9322 have been done. We need to emit a block here so that sched
9323 doesn't decide to move the sp change before the register restores
9324 (which may not have any obvious dependency on the stack). This
9325 doesn't hurt performance, because there is no scheduling that can
9326 be done after this point. */
9327 if (DEFAULT_ABI == ABI_V4)
9329 if (frame_reg_rtx != sp_reg_rtx)
9330 rs6000_emit_stack_tie ();
9332 if (use_backchain_to_restore_sp)
9334 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9336 else if (sp_offset != 0)
9338 emit_insn (Pmode == SImode
9339 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9340 GEN_INT (sp_offset))
9341 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9342 GEN_INT (sp_offset)));
9346 if (current_function_calls_eh_return)
9348 rtx sa = EH_RETURN_STACKADJ_RTX;
9349 emit_insn (Pmode == SImode
9350 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9351 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9357 if (! restoring_FPRs_inline)
9358 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9360 p = rtvec_alloc (2);
9362 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9363 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9365 LINK_REGISTER_REGNUM));
9367 /* If we have to restore more than two FP registers, branch to the
9368 restore function. It will return to our caller. */
9369 if (! restoring_FPRs_inline)
9373 const char *alloc_rname;
9375 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9376 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9377 alloc_rname = ggc_strdup (rname);
9378 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9379 gen_rtx_SYMBOL_REF (Pmode,
9382 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9385 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9386 GEN_INT (info->fp_save_offset + 8*i));
9387 mem = gen_rtx_MEM (DFmode, addr);
9388 set_mem_alias_set (mem, rs6000_sr_alias_set);
9390 RTVEC_ELT (p, i+3) =
9391 gen_rtx_SET (VOIDmode,
9392 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9397 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9401 /* Write function epilogue. */
9404 rs6000_output_function_epilogue (file, size)
9406 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9408 rs6000_stack_t *info = rs6000_stack_info ();
9409 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9411 if (! HAVE_epilogue)
9413 rtx insn = get_last_insn ();
9414 /* If the last insn was a BARRIER, we don't have to write anything except
9416 if (GET_CODE (insn) == NOTE)
9417 insn = prev_nonnote_insn (insn);
9418 if (insn == 0 || GET_CODE (insn) != BARRIER)
9420 /* This is slightly ugly, but at least we don't have two
9421 copies of the epilogue-emitting code. */
9424 /* A NOTE_INSN_DELETED is supposed to be at the start
9425 and end of the "toplevel" insn chain. */
9426 emit_note (0, NOTE_INSN_DELETED);
9427 rs6000_emit_epilogue (FALSE);
9428 emit_note (0, NOTE_INSN_DELETED);
9430 if (TARGET_DEBUG_STACK)
9431 debug_rtx_list (get_insns (), 100);
9432 final (get_insns (), file, FALSE, FALSE);
9437 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9440 We don't output a traceback table if -finhibit-size-directive was
9441 used. The documentation for -finhibit-size-directive reads
9442 ``don't output a @code{.size} assembler directive, or anything
9443 else that would cause trouble if the function is split in the
9444 middle, and the two halves are placed at locations far apart in
9445 memory.'' The traceback table has this property, since it
9446 includes the offset from the start of the function to the
9447 traceback table itself.
9449 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9450 different traceback table. */
9451 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9453 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9454 const char *language_string = lang_hooks.name;
9455 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9458 while (*fname == '.') /* V.4 encodes . in the name */
9461 /* Need label immediately before tbtab, so we can compute its offset
9462 from the function start. */
9465 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9466 ASM_OUTPUT_LABEL (file, fname);
9468 /* The .tbtab pseudo-op can only be used for the first eight
9469 expressions, since it can't handle the possibly variable
9470 length fields that follow. However, if you omit the optional
9471 fields, the assembler outputs zeros for all optional fields
9472 anyways, giving each variable length field is minimum length
9473 (as defined in sys/debug.h). Thus we can not use the .tbtab
9474 pseudo-op at all. */
9476 /* An all-zero word flags the start of the tbtab, for debuggers
9477 that have to find it by searching forward from the entry
9478 point or from the current pc. */
9479 fputs ("\t.long 0\n", file);
9481 /* Tbtab format type. Use format type 0. */
9482 fputs ("\t.byte 0,", file);
9484 /* Language type. Unfortunately, there doesn't seem to be any
9485 official way to get this info, so we use language_string. C
9486 is 0. C++ is 9. No number defined for Obj-C, so use the
9487 value for C for now. There is no official value for Java,
9488 although IBM appears to be using 13. There is no official value
9489 for Chill, so we've chosen 44 pseudo-randomly. */
9490 if (! strcmp (language_string, "GNU C")
9491 || ! strcmp (language_string, "GNU Objective-C"))
9493 else if (! strcmp (language_string, "GNU F77"))
9495 else if (! strcmp (language_string, "GNU Ada"))
9497 else if (! strcmp (language_string, "GNU Pascal"))
9499 else if (! strcmp (language_string, "GNU C++"))
9501 else if (! strcmp (language_string, "GNU Java"))
9503 else if (! strcmp (language_string, "GNU CHILL"))
9507 fprintf (file, "%d,", i);
9509 /* 8 single bit fields: global linkage (not set for C extern linkage,
9510 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9511 from start of procedure stored in tbtab, internal function, function
9512 has controlled storage, function has no toc, function uses fp,
9513 function logs/aborts fp operations. */
9514 /* Assume that fp operations are used if any fp reg must be saved. */
9515 fprintf (file, "%d,",
9516 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9518 /* 6 bitfields: function is interrupt handler, name present in
9519 proc table, function calls alloca, on condition directives
9520 (controls stack walks, 3 bits), saves condition reg, saves
9522 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9523 set up as a frame pointer, even when there is no alloca call. */
9524 fprintf (file, "%d,",
9525 ((optional_tbtab << 6)
9526 | ((optional_tbtab & frame_pointer_needed) << 5)
9527 | (info->cr_save_p << 1)
9528 | (info->lr_save_p)));
9530 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9532 fprintf (file, "%d,",
9533 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9535 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9536 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9540 /* Compute the parameter info from the function decl argument
9543 int next_parm_info_bit = 31;
9545 for (decl = DECL_ARGUMENTS (current_function_decl);
9546 decl; decl = TREE_CHAIN (decl))
9548 rtx parameter = DECL_INCOMING_RTL (decl);
9549 enum machine_mode mode = GET_MODE (parameter);
9551 if (GET_CODE (parameter) == REG)
9553 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9561 else if (mode == DFmode)
9566 /* If only one bit will fit, don't or in this entry. */
9567 if (next_parm_info_bit > 0)
9568 parm_info |= (bits << (next_parm_info_bit - 1));
9569 next_parm_info_bit -= 2;
9573 fixed_parms += ((GET_MODE_SIZE (mode)
9574 + (UNITS_PER_WORD - 1))
9576 next_parm_info_bit -= 1;
9582 /* Number of fixed point parameters. */
9583 /* This is actually the number of words of fixed point parameters; thus
9584 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9585 fprintf (file, "%d,", fixed_parms);
9587 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9589 /* This is actually the number of fp registers that hold parameters;
9590 and thus the maximum value is 13. */
9591 /* Set parameters on stack bit if parameters are not in their original
9592 registers, regardless of whether they are on the stack? Xlc
9593 seems to set the bit when not optimizing. */
9594 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9596 if (! optional_tbtab)
9599 /* Optional fields follow. Some are variable length. */
9601 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9603 /* There is an entry for each parameter in a register, in the order that
9604 they occur in the parameter list. Any intervening arguments on the
9605 stack are ignored. If the list overflows a long (max possible length
9606 34 bits) then completely leave off all elements that don't fit. */
9607 /* Only emit this long if there was at least one parameter. */
9608 if (fixed_parms || float_parms)
9609 fprintf (file, "\t.long %d\n", parm_info);
9611 /* Offset from start of code to tb table. */
9612 fputs ("\t.long ", file);
9613 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9615 RS6000_OUTPUT_BASENAME (file, fname);
9617 assemble_name (file, fname);
9621 RS6000_OUTPUT_BASENAME (file, fname);
9623 assemble_name (file, fname);
9627 /* Interrupt handler mask. */
9628 /* Omit this long, since we never set the interrupt handler bit
9631 /* Number of CTL (controlled storage) anchors. */
9632 /* Omit this long, since the has_ctl bit is never set above. */
9634 /* Displacement into stack of each CTL anchor. */
9635 /* Omit this list of longs, because there are no CTL anchors. */
9637 /* Length of function name. */
9638 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9640 /* Function name. */
9641 assemble_string (fname, strlen (fname));
9643 /* Register for alloca automatic storage; this is always reg 31.
9644 Only emit this if the alloca bit was set above. */
9645 if (frame_pointer_needed)
9646 fputs ("\t.byte 31\n", file);
9648 fputs ("\t.align 2\n", file);
9653 /* A C compound statement that outputs the assembler code for a thunk
9654 function, used to implement C++ virtual function calls with
9655 multiple inheritance. The thunk acts as a wrapper around a virtual
9656 function, adjusting the implicit object parameter before handing
9657 control off to the real function.
9659 First, emit code to add the integer DELTA to the location that
9660 contains the incoming first argument. Assume that this argument
9661 contains a pointer, and is the one used to pass the `this' pointer
9662 in C++. This is the incoming argument *before* the function
9663 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9664 values of all other incoming arguments.
9666 After the addition, emit code to jump to FUNCTION, which is a
9667 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9668 not touch the return address. Hence returning from FUNCTION will
9669 return to whoever called the current `thunk'.
9671 The effect must be as if FUNCTION had been called directly with the
9672 adjusted first argument. This macro is responsible for emitting
9673 all of the code for a thunk function; output_function_prologue()
9674 and output_function_epilogue() are not invoked.
9676 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9677 been extracted from it.) It might possibly be useful on some
9678 targets, but probably not.
9680 If you do not define this macro, the target-independent code in the
9681 C++ frontend will generate a less efficient heavyweight thunk that
9682 calls FUNCTION instead of jumping to it. The generic approach does
9683 not support varargs. */
9686 output_mi_thunk (file, thunk_fndecl, delta, function)
9688 tree thunk_fndecl ATTRIBUTE_UNUSED;
9692 const char *this_reg =
9693 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9696 const char *r0 = reg_names[0];
9697 const char *toc = reg_names[2];
9698 const char *schain = reg_names[11];
9699 const char *r12 = reg_names[12];
9701 static int labelno = 0;
9703 /* Small constants that can be done by one add instruction. */
9704 if (delta >= -32768 && delta <= 32767)
9706 if (! TARGET_NEW_MNEMONICS)
9707 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9709 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9712 /* Large constants that can be done by one addis instruction. */
9713 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9714 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9717 /* 32-bit constants that can be done by an add and addis instruction. */
9718 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9720 /* Break into two pieces, propagating the sign bit from the low
9721 word to the upper word. */
9722 int delta_high = delta >> 16;
9723 int delta_low = delta & 0xffff;
9724 if ((delta_low & 0x8000) != 0)
9727 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9730 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9733 if (! TARGET_NEW_MNEMONICS)
9734 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9736 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9739 /* 64-bit constants, fixme */
9743 /* Get the prefix in front of the names. */
9744 switch (DEFAULT_ABI)
9754 case ABI_AIX_NODESC:
9759 /* If the function is compiled in this module, jump to it directly.
9760 Otherwise, load up its address and jump to it. */
9762 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9764 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9765 && ! lookup_attribute ("longcall",
9766 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9768 fprintf (file, "\tb %s", prefix);
9769 assemble_name (file, fname);
9770 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9776 switch (DEFAULT_ABI)
9782 /* Set up a TOC entry for the function. */
9783 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9785 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9788 if (TARGET_MINIMAL_TOC)
9789 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9792 fputs ("\t.tc ", file);
9793 assemble_name (file, fname);
9794 fputs ("[TC],", file);
9796 assemble_name (file, fname);
9799 if (TARGET_MINIMAL_TOC)
9800 asm_fprintf (file, (TARGET_32BIT)
9801 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9802 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9803 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9804 assemble_name (file, buf);
9805 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9806 fputs ("-(.LCTOC1)", file);
9807 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9809 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9813 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9816 asm_fprintf (file, "\tmtctr %s\n", r0);
9818 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9821 asm_fprintf (file, "\tbctr\n");
9824 case ABI_AIX_NODESC:
9826 fprintf (file, "\tb %s", prefix);
9827 assemble_name (file, fname);
9828 if (flag_pic) fputs ("@plt", file);
9834 fprintf (file, "\tb %s", prefix);
9835 if (flag_pic && !machopic_name_defined_p (fname))
9836 assemble_name (file, machopic_stub_name (fname));
9838 assemble_name (file, fname);
9847 /* A quick summary of the various types of 'constant-pool tables'
9850 Target Flags Name One table per
9851 AIX (none) AIX TOC object file
9852 AIX -mfull-toc AIX TOC object file
9853 AIX -mminimal-toc AIX minimal TOC translation unit
9854 SVR4/EABI (none) SVR4 SDATA object file
9855 SVR4/EABI -fpic SVR4 pic object file
9856 SVR4/EABI -fPIC SVR4 PIC translation unit
9857 SVR4/EABI -mrelocatable EABI TOC function
9858 SVR4/EABI -maix AIX TOC object file
9859 SVR4/EABI -maix -mminimal-toc
9860 AIX minimal TOC translation unit
9862 Name Reg. Set by entries contains:
9863 made by addrs? fp? sum?
9865 AIX TOC 2 crt0 as Y option option
9866 AIX minimal TOC 30 prolog gcc Y Y option
9867 SVR4 SDATA 13 crt0 gcc N Y N
9868 SVR4 pic 30 prolog ld Y not yet N
9869 SVR4 PIC 30 prolog gcc Y option option
9870 EABI TOC 30 prolog gcc Y option option
9874 /* Hash table stuff for keeping track of TOC entries. */
9876 struct toc_hash_struct
9878 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9879 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9881 enum machine_mode key_mode;
9885 static htab_t toc_hash_table;
9887 /* Hash functions for the hash table. */
9890 rs6000_hash_constant (k)
9893 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9894 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9895 int flen = strlen (format);
9898 if (GET_CODE (k) == LABEL_REF)
9899 return result * 1231 + X0INT (XEXP (k, 0), 3);
9901 if (GET_CODE (k) == CONST_DOUBLE)
9903 else if (GET_CODE (k) == CODE_LABEL)
9908 for (; fidx < flen; fidx++)
9909 switch (format[fidx])
9914 const char *str = XSTR (k, fidx);
9916 result = result * 613 + len;
9917 for (i = 0; i < len; i++)
9918 result = result * 613 + (unsigned) str[i];
9923 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9927 result = result * 613 + (unsigned) XINT (k, fidx);
9930 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9931 result = result * 613 + (unsigned) XWINT (k, fidx);
9935 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9936 result = result * 613 + (unsigned) (XWINT (k, fidx)
9947 toc_hash_function (hash_entry)
9948 const void * hash_entry;
9950 const struct toc_hash_struct *thc =
9951 (const struct toc_hash_struct *) hash_entry;
9952 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9955 /* Compare H1 and H2 for equivalence. */
9958 toc_hash_eq (h1, h2)
9962 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9963 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9965 if (((const struct toc_hash_struct *) h1)->key_mode
9966 != ((const struct toc_hash_struct *) h2)->key_mode)
9969 /* Gotcha: One of these const_doubles will be in memory.
9970 The other may be on the constant-pool chain.
9971 So rtx_equal_p will think they are different... */
9974 if (GET_CODE (r1) != GET_CODE (r2)
9975 || GET_MODE (r1) != GET_MODE (r2))
9977 if (GET_CODE (r1) == CONST_DOUBLE)
9979 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9981 for (i = 1; i < format_len; i++)
9982 if (XWINT (r1, i) != XWINT (r2, i))
9987 else if (GET_CODE (r1) == LABEL_REF)
9988 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9989 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9991 return rtx_equal_p (r1, r2);
9994 /* Mark the hash table-entry HASH_ENTRY. */
9997 toc_hash_mark_entry (hash_slot, unused)
9999 void * unused ATTRIBUTE_UNUSED;
10001 const struct toc_hash_struct * hash_entry =
10002 *(const struct toc_hash_struct **) hash_slot;
10003 rtx r = hash_entry->key;
10004 ggc_set_mark (hash_entry);
10005 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
10006 if (GET_CODE (r) == LABEL_REF)
10009 ggc_set_mark (XEXP (r, 0));
10016 /* Mark all the elements of the TOC hash-table *HT. */
10019 toc_hash_mark_table (vht)
10024 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
10027 /* These are the names given by the C++ front-end to vtables, and
10028 vtable-like objects. Ideally, this logic should not be here;
10029 instead, there should be some programmatic way of inquiring as
10030 to whether or not an object is a vtable. */
10032 #define VTABLE_NAME_P(NAME) \
10033 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10034 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10035 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10036 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10039 rs6000_output_symbol_ref (file, x)
10043 /* Currently C++ toc references to vtables can be emitted before it
10044 is decided whether the vtable is public or private. If this is
10045 the case, then the linker will eventually complain that there is
10046 a reference to an unknown section. Thus, for vtables only,
10047 we emit the TOC reference to reference the symbol and not the
10049 const char *name = XSTR (x, 0);
10051 if (VTABLE_NAME_P (name))
10053 RS6000_OUTPUT_BASENAME (file, name);
10056 assemble_name (file, name);
10059 /* Output a TOC entry. We derive the entry name from what is being
10063 output_toc (file, x, labelno, mode)
10067 enum machine_mode mode;
10070 const char *name = buf;
10071 const char *real_name;
10078 /* When the linker won't eliminate them, don't output duplicate
10079 TOC entries (this happens on AIX if there is any kind of TOC,
10080 and on SVR4 under -fPIC or -mrelocatable). */
10083 struct toc_hash_struct *h;
10086 h = ggc_alloc (sizeof (*h));
10088 h->key_mode = mode;
10089 h->labelno = labelno;
10091 found = htab_find_slot (toc_hash_table, h, 1);
10092 if (*found == NULL)
10094 else /* This is indeed a duplicate.
10095 Set this label equal to that label. */
10097 fputs ("\t.set ", file);
10098 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10099 fprintf (file, "%d,", labelno);
10100 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10101 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10107 /* If we're going to put a double constant in the TOC, make sure it's
10108 aligned properly when strict alignment is on. */
10109 if (GET_CODE (x) == CONST_DOUBLE
10110 && STRICT_ALIGNMENT
10111 && GET_MODE_BITSIZE (mode) >= 64
10112 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10113 ASM_OUTPUT_ALIGN (file, 3);
10116 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10118 /* Handle FP constants specially. Note that if we have a minimal
10119 TOC, things we put here aren't actually in the TOC, so we can allow
10121 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10123 REAL_VALUE_TYPE rv;
10126 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10127 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10131 if (TARGET_MINIMAL_TOC)
10132 fputs (DOUBLE_INT_ASM_OP, file);
10134 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10135 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
10140 if (TARGET_MINIMAL_TOC)
10141 fputs ("\t.long ", file);
10143 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10144 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
10148 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10150 REAL_VALUE_TYPE rv;
10153 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10154 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10158 if (TARGET_MINIMAL_TOC)
10159 fputs (DOUBLE_INT_ASM_OP, file);
10161 fprintf (file, "\t.tc FS_%lx[TC],", l);
10162 fprintf (file, "0x%lx00000000\n", l);
10167 if (TARGET_MINIMAL_TOC)
10168 fputs ("\t.long ", file);
10170 fprintf (file, "\t.tc FS_%lx[TC],", l);
10171 fprintf (file, "0x%lx\n", l);
10175 else if (GET_MODE (x) == VOIDmode
10176 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10178 unsigned HOST_WIDE_INT low;
10179 HOST_WIDE_INT high;
10181 if (GET_CODE (x) == CONST_DOUBLE)
10183 low = CONST_DOUBLE_LOW (x);
10184 high = CONST_DOUBLE_HIGH (x);
10187 #if HOST_BITS_PER_WIDE_INT == 32
10190 high = (low & 0x80000000) ? ~0 : 0;
10194 low = INTVAL (x) & 0xffffffff;
10195 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10199 /* TOC entries are always Pmode-sized, but since this
10200 is a bigendian machine then if we're putting smaller
10201 integer constants in the TOC we have to pad them.
10202 (This is still a win over putting the constants in
10203 a separate constant pool, because then we'd have
10204 to have both a TOC entry _and_ the actual constant.)
10206 For a 32-bit target, CONST_INT values are loaded and shifted
10207 entirely within `low' and can be stored in one TOC entry. */
10209 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10210 abort ();/* It would be easy to make this work, but it doesn't now. */
10212 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10213 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10214 POINTER_SIZE, &low, &high, 0);
10218 if (TARGET_MINIMAL_TOC)
10219 fputs (DOUBLE_INT_ASM_OP, file);
10221 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long) high, (long) low);
10222 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10227 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10229 if (TARGET_MINIMAL_TOC)
10230 fputs ("\t.long ", file);
10232 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10233 (long) high, (long) low);
10234 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10238 if (TARGET_MINIMAL_TOC)
10239 fputs ("\t.long ", file);
10241 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10242 fprintf (file, "0x%lx\n", (long) low);
10248 if (GET_CODE (x) == CONST)
10250 if (GET_CODE (XEXP (x, 0)) != PLUS)
10253 base = XEXP (XEXP (x, 0), 0);
10254 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10257 if (GET_CODE (base) == SYMBOL_REF)
10258 name = XSTR (base, 0);
10259 else if (GET_CODE (base) == LABEL_REF)
10260 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10261 else if (GET_CODE (base) == CODE_LABEL)
10262 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10266 STRIP_NAME_ENCODING (real_name, name);
10267 if (TARGET_MINIMAL_TOC)
10268 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10271 fprintf (file, "\t.tc %s", real_name);
10274 fprintf (file, ".N%d", - offset);
10276 fprintf (file, ".P%d", offset);
10278 fputs ("[TC],", file);
10281 /* Currently C++ toc references to vtables can be emitted before it
10282 is decided whether the vtable is public or private. If this is
10283 the case, then the linker will eventually complain that there is
10284 a TOC reference to an unknown section. Thus, for vtables only,
10285 we emit the TOC reference to reference the symbol and not the
10287 if (VTABLE_NAME_P (name))
10289 RS6000_OUTPUT_BASENAME (file, name);
10291 fprintf (file, "%d", offset);
10292 else if (offset > 0)
10293 fprintf (file, "+%d", offset);
10296 output_addr_const (file, x);
10300 /* Output an assembler pseudo-op to write an ASCII string of N characters
10301 starting at P to FILE.
10303 On the RS/6000, we have to do this using the .byte operation and
10304 write out special characters outside the quoted string.
10305 Also, the assembler is broken; very long strings are truncated,
10306 so we must artificially break them up early. */
10309 output_ascii (file, p, n)
10315 int i, count_string;
10316 const char *for_string = "\t.byte \"";
10317 const char *for_decimal = "\t.byte ";
10318 const char *to_close = NULL;
10321 for (i = 0; i < n; i++)
10324 if (c >= ' ' && c < 0177)
10327 fputs (for_string, file);
10330 /* Write two quotes to get one. */
10338 for_decimal = "\"\n\t.byte ";
10342 if (count_string >= 512)
10344 fputs (to_close, file);
10346 for_string = "\t.byte \"";
10347 for_decimal = "\t.byte ";
10355 fputs (for_decimal, file);
10356 fprintf (file, "%d", c);
10358 for_string = "\n\t.byte \"";
10359 for_decimal = ", ";
10365 /* Now close the string if we have written one. Then end the line. */
10367 fputs (to_close, file);
10370 /* Generate a unique section name for FILENAME for a section type
10371 represented by SECTION_DESC. Output goes into BUF.
10373 SECTION_DESC can be any string, as long as it is different for each
10374 possible section type.
10376 We name the section in the same manner as xlc. The name begins with an
10377 underscore followed by the filename (after stripping any leading directory
10378 names) with the last period replaced by the string SECTION_DESC. If
10379 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10383 rs6000_gen_section_name (buf, filename, section_desc)
10385 const char *filename;
10386 const char *section_desc;
10388 const char *q, *after_last_slash, *last_period = 0;
10392 after_last_slash = filename;
10393 for (q = filename; *q; q++)
10396 after_last_slash = q + 1;
10397 else if (*q == '.')
10401 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10402 *buf = (char *) permalloc (len);
10407 for (q = after_last_slash; *q; q++)
10409 if (q == last_period)
10411 strcpy (p, section_desc);
10412 p += strlen (section_desc);
10415 else if (ISALNUM (*q))
10419 if (last_period == 0)
10420 strcpy (p, section_desc);
10425 /* Emit profile function. */
10428 output_profile_hook (labelno)
10431 if (DEFAULT_ABI == ABI_AIX)
10434 const char *label_name;
10439 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10440 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10441 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10443 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10446 else if (DEFAULT_ABI == ABI_DARWIN)
10448 const char *mcount_name = RS6000_MCOUNT;
10449 int caller_addr_regno = LINK_REGISTER_REGNUM;
10451 /* Be conservative and always set this, at least for now. */
10452 current_function_uses_pic_offset_table = 1;
10455 /* For PIC code, set up a stub and collect the caller's address
10456 from r0, which is where the prologue puts it. */
10459 mcount_name = machopic_stub_name (mcount_name);
10460 if (current_function_uses_pic_offset_table)
10461 caller_addr_regno = 0;
10464 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10466 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10470 /* Write function profiler code. */
10473 output_function_profiler (file, labelno)
10479 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10480 switch (DEFAULT_ABI)
10486 case ABI_AIX_NODESC:
10487 fprintf (file, "\tmflr %s\n", reg_names[0]);
10490 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10491 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10492 reg_names[0], reg_names[1]);
10493 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10494 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10495 assemble_name (file, buf);
10496 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10498 else if (flag_pic > 1)
10500 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10501 reg_names[0], reg_names[1]);
10502 /* Now, we need to get the address of the label. */
10503 fputs ("\tbl 1f\n\t.long ", file);
10504 assemble_name (file, buf);
10505 fputs ("-.\n1:", file);
10506 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10507 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10508 reg_names[0], reg_names[11]);
10509 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10510 reg_names[0], reg_names[0], reg_names[11]);
10514 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10515 assemble_name (file, buf);
10516 fputs ("@ha\n", file);
10517 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10518 reg_names[0], reg_names[1]);
10519 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10520 assemble_name (file, buf);
10521 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10524 if (current_function_needs_context)
10525 asm_fprintf (file, "\tmr %s,%s\n",
10526 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10527 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10528 if (current_function_needs_context)
10529 asm_fprintf (file, "\tmr %s,%s\n",
10530 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10535 /* Don't do anything, done in output_profile_hook (). */
10541 /* Adjust the cost of a scheduling dependency. Return the new cost of
10542 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10545 rs6000_adjust_cost (insn, link, dep_insn, cost)
10548 rtx dep_insn ATTRIBUTE_UNUSED;
10551 if (! recog_memoized (insn))
10554 if (REG_NOTE_KIND (link) != 0)
10557 if (REG_NOTE_KIND (link) == 0)
10559 /* Data dependency; DEP_INSN writes a register that INSN reads
10560 some cycles later. */
10561 switch (get_attr_type (insn))
10564 /* Tell the first scheduling pass about the latency between
10565 a mtctr and bctr (and mtlr and br/blr). The first
10566 scheduling pass will not know about this latency since
10567 the mtctr instruction, which has the latency associated
10568 to it, will be generated by reload. */
10569 return TARGET_POWER ? 5 : 4;
10571 /* Leave some extra cycles between a compare and its
10572 dependent branch, to inhibit expensive mispredicts. */
10573 if ((rs6000_cpu_attr == CPU_PPC750
10574 || rs6000_cpu_attr == CPU_PPC7400
10575 || rs6000_cpu_attr == CPU_PPC7450)
10576 && recog_memoized (dep_insn)
10577 && (INSN_CODE (dep_insn) >= 0)
10578 && (get_attr_type (dep_insn) == TYPE_COMPARE
10579 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10580 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10581 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10586 /* Fall out to return default cost. */
10592 /* A C statement (sans semicolon) to update the integer scheduling
10593 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10594 INSN earlier, increase the priority to execute INSN later. Do not
10595 define this macro if you do not need to adjust the scheduling
10596 priorities of insns. */
10599 rs6000_adjust_priority (insn, priority)
10600 rtx insn ATTRIBUTE_UNUSED;
10603 /* On machines (like the 750) which have asymmetric integer units,
10604 where one integer unit can do multiply and divides and the other
10605 can't, reduce the priority of multiply/divide so it is scheduled
10606 before other integer operations. */
10609 if (! INSN_P (insn))
10612 if (GET_CODE (PATTERN (insn)) == USE)
10615 switch (rs6000_cpu_attr) {
10617 switch (get_attr_type (insn))
10624 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10625 priority, priority);
10626 if (priority >= 0 && priority < 0x01000000)
10636 /* Return how many instructions the machine can issue per cycle. */
10639 rs6000_issue_rate ()
10641 switch (rs6000_cpu_attr) {
10642 case CPU_RIOS1: /* ? */
10644 case CPU_PPC601: /* ? */
10663 /* Length in units of the trampoline for entering a nested function. */
10666 rs6000_trampoline_size ()
10670 switch (DEFAULT_ABI)
10676 ret = (TARGET_32BIT) ? 12 : 24;
10681 case ABI_AIX_NODESC:
10682 ret = (TARGET_32BIT) ? 40 : 48;
10689 /* Emit RTL insns to initialize the variable parts of a trampoline.
10690 FNADDR is an RTX for the address of the function's pure code.
10691 CXT is an RTX for the static chain value for the function. */
10694 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10699 enum machine_mode pmode = Pmode;
10700 int regsize = (TARGET_32BIT) ? 4 : 8;
10701 rtx ctx_reg = force_reg (pmode, cxt);
10703 switch (DEFAULT_ABI)
10708 /* Macros to shorten the code expansions below. */
10709 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10710 #define MEM_PLUS(addr,offset) \
10711 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10713 /* Under AIX, just build the 3 word function descriptor */
10716 rtx fn_reg = gen_reg_rtx (pmode);
10717 rtx toc_reg = gen_reg_rtx (pmode);
10718 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10719 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10720 emit_move_insn (MEM_DEREF (addr), fn_reg);
10721 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10722 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10726 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10729 case ABI_AIX_NODESC:
10730 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10731 FALSE, VOIDmode, 4,
10733 GEN_INT (rs6000_trampoline_size ()), SImode,
10743 /* Table of valid machine attributes. */
10745 const struct attribute_spec rs6000_attribute_table[] =
10747 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10748 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10749 { NULL, 0, 0, false, false, false, NULL }
10752 /* Handle a "longcall" attribute; arguments as in struct
10753 attribute_spec.handler. */
10756 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10759 tree args ATTRIBUTE_UNUSED;
10760 int flags ATTRIBUTE_UNUSED;
10761 bool *no_add_attrs;
10763 if (TREE_CODE (*node) != FUNCTION_TYPE
10764 && TREE_CODE (*node) != FIELD_DECL
10765 && TREE_CODE (*node) != TYPE_DECL)
10767 warning ("`%s' attribute only applies to functions",
10768 IDENTIFIER_POINTER (name));
10769 *no_add_attrs = true;
10775 /* Return a reference suitable for calling a function with the
10776 longcall attribute. */
10779 rs6000_longcall_ref (call_ref)
10782 const char *call_name;
10785 if (GET_CODE (call_ref) != SYMBOL_REF)
10788 /* System V adds '.' to the internal name, so skip them. */
10789 call_name = XSTR (call_ref, 0);
10790 if (*call_name == '.')
10792 while (*call_name == '.')
10795 node = get_identifier (call_name);
10796 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10799 return force_reg (Pmode, call_ref);
10803 /* A C statement or statements to switch to the appropriate section
10804 for output of RTX in mode MODE. You can assume that RTX is some
10805 kind of constant in RTL. The argument MODE is redundant except in
10806 the case of a `const_int' rtx. Select the section by calling
10807 `text_section' or one of the alternatives for other sections.
10809 Do not define this macro if you put all constants in the read-only
10812 #ifdef USING_ELFOS_H
10815 rs6000_select_rtx_section (mode, x)
10816 enum machine_mode mode;
10819 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10822 && (GET_CODE (x) == SYMBOL_REF
10823 || GET_CODE (x) == LABEL_REF
10824 || GET_CODE (x) == CONST))
10830 /* A C statement or statements to switch to the appropriate
10831 section for output of DECL. DECL is either a `VAR_DECL' node
10832 or a constant of some sort. RELOC indicates whether forming
10833 the initial value of DECL requires link-time relocations. */
10836 rs6000_select_section (decl, reloc)
10840 int size = int_size_in_bytes (TREE_TYPE (decl));
10843 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10850 needs_sdata = (size > 0
10851 && size <= g_switch_value
10852 && rs6000_sdata != SDATA_NONE
10853 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10855 if (TREE_CODE (decl) == STRING_CST)
10856 readonly = ! flag_writable_strings;
10857 else if (TREE_CODE (decl) == VAR_DECL)
10858 readonly = (! (flag_pic && reloc)
10859 && TREE_READONLY (decl)
10860 && ! TREE_SIDE_EFFECTS (decl)
10861 && DECL_INITIAL (decl)
10862 && DECL_INITIAL (decl) != error_mark_node
10863 && TREE_CONSTANT (DECL_INITIAL (decl)));
10864 else if (TREE_CODE (decl) == CONSTRUCTOR)
10865 readonly = (! (flag_pic && reloc)
10866 && ! TREE_SIDE_EFFECTS (decl)
10867 && TREE_CONSTANT (decl));
10870 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10873 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10876 /* A C statement to build up a unique section name, expressed as a
10877 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10878 RELOC indicates whether the initial value of EXP requires
10879 link-time relocations. If you do not define this macro, GCC will use
10880 the symbol name prefixed by `.' as the section name. Note - this
10881 macro can now be called for uninitialized data items as well as
10882 initialised data and functions. */
10885 rs6000_unique_section (decl, reloc)
10893 const char *prefix;
10895 static const char *const prefixes[7][2] =
10897 { ".rodata.", ".gnu.linkonce.r." },
10898 { ".sdata2.", ".gnu.linkonce.s2." },
10899 { ".data.", ".gnu.linkonce.d." },
10900 { ".sdata.", ".gnu.linkonce.s." },
10901 { ".bss.", ".gnu.linkonce.b." },
10902 { ".sbss.", ".gnu.linkonce.sb." },
10903 { ".text.", ".gnu.linkonce.t." }
10906 if (TREE_CODE (decl) == FUNCTION_DECL)
10915 if (TREE_CODE (decl) == STRING_CST)
10916 readonly = ! flag_writable_strings;
10917 else if (TREE_CODE (decl) == VAR_DECL)
10918 readonly = (! (flag_pic && reloc)
10919 && TREE_READONLY (decl)
10920 && ! TREE_SIDE_EFFECTS (decl)
10921 && TREE_CONSTANT (DECL_INITIAL (decl)));
10923 size = int_size_in_bytes (TREE_TYPE (decl));
10924 needs_sdata = (size > 0
10925 && size <= g_switch_value
10926 && rs6000_sdata != SDATA_NONE
10927 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10929 if (DECL_INITIAL (decl) == 0
10930 || DECL_INITIAL (decl) == error_mark_node)
10932 else if (! readonly)
10939 /* .sdata2 is only for EABI. */
10940 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10946 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10947 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10948 len = strlen (name) + strlen (prefix);
10949 string = alloca (len + 1);
10951 sprintf (string, "%s%s", prefix, name);
10953 DECL_SECTION_NAME (decl) = build_string (len, string);
10957 /* If we are referencing a function that is static or is known to be
10958 in this file, make the SYMBOL_REF special. We can use this to indicate
10959 that we can branch to this function without emitting a no-op after the
10960 call. For real AIX calling sequences, we also replace the
10961 function name with the real name (1 or 2 leading .'s), rather than
10962 the function descriptor name. This saves a lot of overriding code
10963 to read the prefixes. */
10966 rs6000_encode_section_info (decl, first)
10973 if (TREE_CODE (decl) == FUNCTION_DECL)
10975 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10976 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10977 && ! DECL_WEAK (decl))
10978 SYMBOL_REF_FLAG (sym_ref) = 1;
10980 if (DEFAULT_ABI == ABI_AIX)
10982 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10983 size_t len2 = strlen (XSTR (sym_ref, 0));
10984 char *str = alloca (len1 + len2 + 1);
10987 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10989 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10992 else if (rs6000_sdata != SDATA_NONE
10993 && DEFAULT_ABI == ABI_V4
10994 && TREE_CODE (decl) == VAR_DECL)
10996 int size = int_size_in_bytes (TREE_TYPE (decl));
10997 tree section_name = DECL_SECTION_NAME (decl);
10998 const char *name = (char *)0;
11003 if (TREE_CODE (section_name) == STRING_CST)
11005 name = TREE_STRING_POINTER (section_name);
11006 len = TREE_STRING_LENGTH (section_name);
11012 if ((size > 0 && size <= g_switch_value)
11014 && ((len == sizeof (".sdata") - 1
11015 && strcmp (name, ".sdata") == 0)
11016 || (len == sizeof (".sdata2") - 1
11017 && strcmp (name, ".sdata2") == 0)
11018 || (len == sizeof (".sbss") - 1
11019 && strcmp (name, ".sbss") == 0)
11020 || (len == sizeof (".sbss2") - 1
11021 && strcmp (name, ".sbss2") == 0)
11022 || (len == sizeof (".PPC.EMB.sdata0") - 1
11023 && strcmp (name, ".PPC.EMB.sdata0") == 0)
11024 || (len == sizeof (".PPC.EMB.sbss0") - 1
11025 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11027 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11028 size_t len = strlen (XSTR (sym_ref, 0));
11029 char *str = alloca (len + 2);
11032 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11033 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11038 #endif /* USING_ELFOS_H */
11041 /* Return a REG that occurs in ADDR with coefficient 1.
11042 ADDR can be effectively incremented by incrementing REG.
11044 r0 is special and we must not select it as an address
11045 register by this routine since our caller will try to
11046 increment the returned register via an "la" instruction. */
11049 find_addr_reg (addr)
11052 while (GET_CODE (addr) == PLUS)
11054 if (GET_CODE (XEXP (addr, 0)) == REG
11055 && REGNO (XEXP (addr, 0)) != 0)
11056 addr = XEXP (addr, 0);
11057 else if (GET_CODE (XEXP (addr, 1)) == REG
11058 && REGNO (XEXP (addr, 1)) != 0)
11059 addr = XEXP (addr, 1);
11060 else if (CONSTANT_P (XEXP (addr, 0)))
11061 addr = XEXP (addr, 1);
11062 else if (CONSTANT_P (XEXP (addr, 1)))
11063 addr = XEXP (addr, 0);
11067 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11073 rs6000_fatal_bad_address (op)
11076 fatal_insn ("bad address", op);
11079 /* Called to register all of our global variables with the garbage
11083 rs6000_add_gc_roots ()
11085 ggc_add_rtx_root (&rs6000_compare_op0, 1);
11086 ggc_add_rtx_root (&rs6000_compare_op1, 1);
11088 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11089 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11090 toc_hash_mark_table);
11093 machopic_add_gc_roots ();
11100 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11101 reference and a constant. */
11104 symbolic_operand (op)
11107 switch (GET_CODE (op))
11114 return (GET_CODE (op) == SYMBOL_REF ||
11115 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11116 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11117 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11124 #ifdef RS6000_LONG_BRANCH
11126 static tree stub_list = 0;
11128 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11129 procedure calls to the linked list. */
11132 add_compiler_stub (label_name, function_name, line_number)
11134 tree function_name;
11137 tree stub = build_tree_list (function_name, label_name);
11138 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11139 TREE_CHAIN (stub) = stub_list;
11143 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11144 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11145 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11147 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11148 handling procedure calls from the linked list and initializes the
11152 output_compiler_stub ()
11155 char label_buf[256];
11157 tree tmp_stub, stub;
11160 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11162 fprintf (asm_out_file,
11163 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11165 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11166 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11167 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11168 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11170 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11172 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11175 label_buf[0] = '_';
11176 strcpy (label_buf+1,
11177 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11180 strcpy (tmp_buf, "lis r12,hi16(");
11181 strcat (tmp_buf, label_buf);
11182 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11183 strcat (tmp_buf, label_buf);
11184 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11185 output_asm_insn (tmp_buf, 0);
11187 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11188 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11189 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11190 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11196 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11197 already there or not. */
11200 no_previous_def (function_name)
11201 tree function_name;
11204 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11205 if (function_name == STUB_FUNCTION_NAME (stub))
11210 /* GET_PREV_LABEL gets the label name from the previous definition of
11214 get_prev_label (function_name)
11215 tree function_name;
11218 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11219 if (function_name == STUB_FUNCTION_NAME (stub))
11220 return STUB_LABEL_NAME (stub);
11224 /* INSN is either a function call or a millicode call. It may have an
11225 unconditional jump in its delay slot.
11227 CALL_DEST is the routine we are calling. */
11230 output_call (insn, call_dest, operand_number)
11233 int operand_number;
11235 static char buf[256];
11236 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11239 tree funname = get_identifier (XSTR (call_dest, 0));
11241 if (no_previous_def (funname))
11244 rtx label_rtx = gen_label_rtx ();
11245 char *label_buf, temp_buf[256];
11246 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11247 CODE_LABEL_NUMBER (label_rtx));
11248 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11249 labelname = get_identifier (label_buf);
11250 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11252 line_number = NOTE_LINE_NUMBER (insn);
11253 add_compiler_stub (labelname, funname, line_number);
11256 labelname = get_prev_label (funname);
11258 sprintf (buf, "jbsr %%z%d,%.246s",
11259 operand_number, IDENTIFIER_POINTER (labelname));
11264 sprintf (buf, "bl %%z%d", operand_number);
11269 #endif /* RS6000_LONG_BRANCH */
11271 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11273 const char *const symbol_ = (SYMBOL); \
11274 char *buffer_ = (BUF); \
11275 if (symbol_[0] == '"') \
11277 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11279 else if (name_needs_quotes(symbol_)) \
11281 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11285 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11290 /* Generate PIC and indirect symbol stubs. */
11293 machopic_output_stub (file, symb, stub)
11295 const char *symb, *stub;
11297 unsigned int length;
11298 char *symbol_name, *lazy_ptr_name;
11299 char *local_label_0;
11300 static int label = 0;
11302 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11303 STRIP_NAME_ENCODING (symb, symb);
11307 length = strlen (symb);
11308 symbol_name = alloca (length + 32);
11309 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11311 lazy_ptr_name = alloca (length + 32);
11312 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11314 local_label_0 = alloca (length + 32);
11315 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11318 machopic_picsymbol_stub_section ();
11320 machopic_symbol_stub_section ();
11322 fprintf (file, "%s:\n", stub);
11323 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11327 fprintf (file, "\tmflr r0\n");
11328 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11329 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11330 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11331 lazy_ptr_name, local_label_0);
11332 fprintf (file, "\tmtlr r0\n");
11333 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11334 lazy_ptr_name, local_label_0);
11335 fprintf (file, "\tmtctr r12\n");
11336 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11337 lazy_ptr_name, local_label_0);
11338 fprintf (file, "\tbctr\n");
11341 fprintf (file, "non-pure not supported\n");
11343 machopic_lazy_symbol_ptr_section ();
11344 fprintf (file, "%s:\n", lazy_ptr_name);
11345 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11346 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11349 /* Legitimize PIC addresses. If the address is already
11350 position-independent, we return ORIG. Newly generated
11351 position-independent addresses go into a reg. This is REG if non
11352 zero, otherwise we allocate register(s) as necessary. */
11354 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11357 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11359 enum machine_mode mode;
11364 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11365 reg = gen_reg_rtx (Pmode);
11367 if (GET_CODE (orig) == CONST)
11369 if (GET_CODE (XEXP (orig, 0)) == PLUS
11370 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11373 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11376 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11379 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11385 if (GET_CODE (offset) == CONST_INT)
11387 if (SMALL_INT (offset))
11388 return plus_constant (base, INTVAL (offset));
11389 else if (! reload_in_progress && ! reload_completed)
11390 offset = force_reg (Pmode, offset);
11393 rtx mem = force_const_mem (Pmode, orig);
11394 return machopic_legitimize_pic_address (mem, Pmode, reg);
11397 return gen_rtx (PLUS, Pmode, base, offset);
11400 /* Fall back on generic machopic code. */
11401 return machopic_legitimize_pic_address (orig, mode, reg);
11404 /* This is just a placeholder to make linking work without having to
11405 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11406 ever needed for Darwin (not too likely!) this would have to get a
11407 real definition. */
11414 #endif /* TARGET_MACHO */
11417 static unsigned int
11418 rs6000_elf_section_type_flags (decl, name, reloc)
11423 unsigned int flags = default_section_type_flags (decl, name, reloc);
11425 if (TARGET_RELOCATABLE)
11426 flags |= SECTION_WRITE;
11431 /* Record an element in the table of global constructors. SYMBOL is
11432 a SYMBOL_REF of the function to be called; PRIORITY is a number
11433 between 0 and MAX_INIT_PRIORITY.
11435 This differs from default_named_section_asm_out_constructor in
11436 that we have special handling for -mrelocatable. */
11439 rs6000_elf_asm_out_constructor (symbol, priority)
11443 const char *section = ".ctors";
11446 if (priority != DEFAULT_INIT_PRIORITY)
11448 sprintf (buf, ".ctors.%.5u",
11449 /* Invert the numbering so the linker puts us in the proper
11450 order; constructors are run from right to left, and the
11451 linker sorts in increasing order. */
11452 MAX_INIT_PRIORITY - priority);
11456 named_section_flags (section, SECTION_WRITE);
11457 assemble_align (POINTER_SIZE);
11459 if (TARGET_RELOCATABLE)
11461 fputs ("\t.long (", asm_out_file);
11462 output_addr_const (asm_out_file, symbol);
11463 fputs (")@fixup\n", asm_out_file);
11466 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11470 rs6000_elf_asm_out_destructor (symbol, priority)
11474 const char *section = ".dtors";
11477 if (priority != DEFAULT_INIT_PRIORITY)
11479 sprintf (buf, ".dtors.%.5u",
11480 /* Invert the numbering so the linker puts us in the proper
11481 order; constructors are run from right to left, and the
11482 linker sorts in increasing order. */
11483 MAX_INIT_PRIORITY - priority);
11487 named_section_flags (section, SECTION_WRITE);
11488 assemble_align (POINTER_SIZE);
11490 if (TARGET_RELOCATABLE)
11492 fputs ("\t.long (", asm_out_file);
11493 output_addr_const (asm_out_file, symbol);
11494 fputs (")@fixup\n", asm_out_file);
11497 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11501 #ifdef OBJECT_FORMAT_COFF
11503 xcoff_asm_named_section (name, flags)
11505 unsigned int flags ATTRIBUTE_UNUSED;
11507 fprintf (asm_out_file, "\t.csect %s\n", name);