1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
51 #ifndef TARGET_NO_PROTOTYPE
52 #define TARGET_NO_PROTOTYPE 0
55 #define min(A,B) ((A) < (B) ? (A) : (B))
56 #define max(A,B) ((A) > (B) ? (A) : (B))
60 enum processor_type rs6000_cpu;
61 struct rs6000_cpu_select rs6000_select[3] =
63 /* switch name, tune arch */
64 { (const char *)0, "--with-cpu=", 1, 1 },
65 { (const char *)0, "-mcpu=", 1, 1 },
66 { (const char *)0, "-mtune=", 1, 0 },
69 /* Size of long double */
70 const char *rs6000_long_double_size_string;
71 int rs6000_long_double_type_size;
73 /* Whether -mabi=altivec has appeared */
74 int rs6000_altivec_abi;
76 /* Set to non-zero once AIX common-mode calls have been defined. */
77 static int common_mode_defined;
79 /* Save information from a "cmpxx" operation until the branch or scc is
81 rtx rs6000_compare_op0, rs6000_compare_op1;
82 int rs6000_compare_fp_p;
84 /* Label number of label created for -mrelocatable, to call to so we can
85 get the address of the GOT section */
86 int rs6000_pic_labelno;
89 /* Which abi to adhere to */
90 const char *rs6000_abi_name = RS6000_ABI_NAME;
92 /* Semantics of the small data area */
93 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
95 /* Which small data model to use */
96 const char *rs6000_sdata_name = (char *)0;
98 /* Counter for labels which are to be placed in .fixup. */
102 /* ABI enumeration available for subtarget to use. */
103 enum rs6000_abi rs6000_current_abi;
105 /* ABI string from -mabi= option. */
106 const char *rs6000_abi_string;
109 const char *rs6000_debug_name;
110 int rs6000_debug_stack; /* debug stack applications */
111 int rs6000_debug_arg; /* debug argument handling */
113 /* Flag to say the TOC is initialized */
115 char toc_label_name[10];
117 /* Alias set for saves and restores from the rs6000 stack. */
118 static int rs6000_sr_alias_set;
120 static void rs6000_add_gc_roots PARAMS ((void));
121 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
122 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
123 static void validate_condition_mode
124 PARAMS ((enum rtx_code, enum machine_mode));
125 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
126 static void rs6000_maybe_dead PARAMS ((rtx));
127 static void rs6000_emit_stack_tie PARAMS ((void));
128 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
129 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
130 static unsigned rs6000_hash_constant PARAMS ((rtx));
131 static unsigned toc_hash_function PARAMS ((const void *));
132 static int toc_hash_eq PARAMS ((const void *, const void *));
133 static int toc_hash_mark_entry PARAMS ((void **, void *));
134 static void toc_hash_mark_table PARAMS ((void *));
135 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
136 static void rs6000_free_machine_status PARAMS ((struct function *));
137 static void rs6000_init_machine_status PARAMS ((struct function *));
138 static int rs6000_ra_ever_killed PARAMS ((void));
139 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
140 const struct attribute_spec rs6000_attribute_table[];
141 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
142 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
143 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
144 HOST_WIDE_INT, HOST_WIDE_INT));
146 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
148 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
149 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
151 #ifdef OBJECT_FORMAT_COFF
152 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
154 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
155 static int rs6000_adjust_priority PARAMS ((rtx, int));
156 static int rs6000_issue_rate PARAMS ((void));
158 static void rs6000_init_builtins PARAMS ((void));
159 static void altivec_init_builtins PARAMS ((void));
160 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
161 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
162 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
163 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
164 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static void rs6000_parse_abi_options PARAMS ((void));
166 static int first_altivec_reg_to_save PARAMS ((void));
167 static unsigned int compute_vrsave_mask PARAMS ((void));
168 static void is_altivec_return_reg PARAMS ((rtx, void *));
169 int vrsave_operation PARAMS ((rtx, enum machine_mode));
170 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *));
172 /* Default register names. */
173 char rs6000_reg_names[][8] =
175 "0", "1", "2", "3", "4", "5", "6", "7",
176 "8", "9", "10", "11", "12", "13", "14", "15",
177 "16", "17", "18", "19", "20", "21", "22", "23",
178 "24", "25", "26", "27", "28", "29", "30", "31",
179 "0", "1", "2", "3", "4", "5", "6", "7",
180 "8", "9", "10", "11", "12", "13", "14", "15",
181 "16", "17", "18", "19", "20", "21", "22", "23",
182 "24", "25", "26", "27", "28", "29", "30", "31",
183 "mq", "lr", "ctr","ap",
184 "0", "1", "2", "3", "4", "5", "6", "7",
186 /* AltiVec registers. */
187 "0", "1", "2", "3", "4", "5", "6", "7",
188 "8", "9", "10", "11", "12", "13", "14", "15",
189 "16", "17", "18", "19", "20", "21", "22", "23",
190 "24", "25", "26", "27", "28", "29", "30", "31",
194 #ifdef TARGET_REGNAMES
195 static const char alt_reg_names[][8] =
197 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
198 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
199 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
200 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
201 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
202 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
203 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
204 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
205 "mq", "lr", "ctr", "ap",
206 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
208 /* AltiVec registers. */
209 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
210 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
211 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
212 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
217 #ifndef MASK_STRICT_ALIGN
218 #define MASK_STRICT_ALIGN 0
221 /* Initialize the GCC target structure. */
222 #undef TARGET_ATTRIBUTE_TABLE
223 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
225 #undef TARGET_ASM_FUNCTION_PROLOGUE
226 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
227 #undef TARGET_ASM_FUNCTION_EPILOGUE
228 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
231 #undef TARGET_SECTION_TYPE_FLAGS
232 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
235 #undef TARGET_SCHED_ISSUE_RATE
236 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
237 #undef TARGET_SCHED_ADJUST_COST
238 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
239 #undef TARGET_SCHED_ADJUST_PRIORITY
240 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
242 #undef TARGET_INIT_BUILTINS
243 #define TARGET_INIT_BUILTINS rs6000_init_builtins
245 #undef TARGET_EXPAND_BUILTIN
246 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
248 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
249 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
251 struct gcc_target targetm = TARGET_INITIALIZER;
253 /* Override command line options. Mostly we process the processor
254 type and sometimes adjust other TARGET_ options. */
257 rs6000_override_options (default_cpu)
258 const char *default_cpu;
261 struct rs6000_cpu_select *ptr;
263 /* Simplify the entries below by making a mask for any POWER
264 variant and any PowerPC variant. */
266 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
267 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
268 | MASK_PPC_GFXOPT | MASK_POWERPC64)
269 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
273 const char *const name; /* Canonical processor name. */
274 const enum processor_type processor; /* Processor type enum value. */
275 const int target_enable; /* Target flags to enable. */
276 const int target_disable; /* Target flags to disable. */
277 } const processor_target_table[]
278 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
279 POWER_MASKS | POWERPC_MASKS},
280 {"power", PROCESSOR_POWER,
281 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
282 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
283 {"power2", PROCESSOR_POWER,
284 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
285 POWERPC_MASKS | MASK_NEW_MNEMONICS},
286 {"power3", PROCESSOR_PPC630,
287 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
288 POWER_MASKS | MASK_PPC_GPOPT},
289 {"powerpc", PROCESSOR_POWERPC,
290 MASK_POWERPC | MASK_NEW_MNEMONICS,
291 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
292 {"powerpc64", PROCESSOR_POWERPC64,
293 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
294 POWER_MASKS | POWERPC_OPT_MASKS},
295 {"rios", PROCESSOR_RIOS1,
296 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
297 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
298 {"rios1", PROCESSOR_RIOS1,
299 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
300 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
301 {"rsc", PROCESSOR_PPC601,
302 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
303 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
304 {"rsc1", PROCESSOR_PPC601,
305 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
306 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
307 {"rios2", PROCESSOR_RIOS2,
308 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
309 POWERPC_MASKS | MASK_NEW_MNEMONICS},
310 {"rs64a", PROCESSOR_RS64A,
311 MASK_POWERPC | MASK_NEW_MNEMONICS,
312 POWER_MASKS | POWERPC_OPT_MASKS},
313 {"401", PROCESSOR_PPC403,
314 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
315 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
316 {"403", PROCESSOR_PPC403,
317 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
318 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
319 {"405", PROCESSOR_PPC405,
320 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
321 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
322 {"505", PROCESSOR_MPCCORE,
323 MASK_POWERPC | MASK_NEW_MNEMONICS,
324 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
325 {"601", PROCESSOR_PPC601,
326 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
327 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
328 {"602", PROCESSOR_PPC603,
329 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
330 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
331 {"603", PROCESSOR_PPC603,
332 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
333 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
334 {"603e", PROCESSOR_PPC603,
335 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
336 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
337 {"ec603e", PROCESSOR_PPC603,
338 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
339 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
340 {"604", PROCESSOR_PPC604,
341 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
342 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
343 {"604e", PROCESSOR_PPC604e,
344 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
345 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
346 {"620", PROCESSOR_PPC620,
347 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
348 POWER_MASKS | MASK_PPC_GPOPT},
349 {"630", PROCESSOR_PPC630,
350 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
351 POWER_MASKS | MASK_PPC_GPOPT},
352 {"740", PROCESSOR_PPC750,
353 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
354 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
355 {"750", PROCESSOR_PPC750,
356 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
357 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
358 {"7400", PROCESSOR_PPC7400,
359 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
360 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
361 {"7450", PROCESSOR_PPC7450,
362 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
363 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
364 {"801", PROCESSOR_MPCCORE,
365 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
366 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
367 {"821", PROCESSOR_MPCCORE,
368 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
369 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
370 {"823", PROCESSOR_MPCCORE,
371 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
372 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
373 {"860", PROCESSOR_MPCCORE,
374 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
375 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
377 size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
379 /* Save current -mmultiple/-mno-multiple status. */
380 int multiple = TARGET_MULTIPLE;
381 /* Save current -mstring/-mno-string status. */
382 int string = TARGET_STRING;
384 /* Identify the processor type. */
385 rs6000_select[0].string = default_cpu;
386 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
388 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
390 ptr = &rs6000_select[i];
391 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
393 for (j = 0; j < ptt_size; j++)
394 if (! strcmp (ptr->string, processor_target_table[j].name))
397 rs6000_cpu = processor_target_table[j].processor;
401 target_flags |= processor_target_table[j].target_enable;
402 target_flags &= ~processor_target_table[j].target_disable;
408 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
412 /* If we are optimizing big endian systems for space, use the store
413 multiple instructions. */
414 if (BYTES_BIG_ENDIAN && optimize_size)
415 target_flags |= MASK_MULTIPLE;
417 /* If -mmultiple or -mno-multiple was explicitly used, don't
418 override with the processor default */
419 if (TARGET_MULTIPLE_SET)
420 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
422 /* If -mstring or -mno-string was explicitly used, don't override
423 with the processor default. */
424 if (TARGET_STRING_SET)
425 target_flags = (target_flags & ~MASK_STRING) | string;
427 /* Don't allow -mmultiple or -mstring on little endian systems
428 unless the cpu is a 750, because the hardware doesn't support the
429 instructions used in little endian mode, and causes an alignment
430 trap. The 750 does not cause an alignment trap (except when the
431 target is unaligned). */
433 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
437 target_flags &= ~MASK_MULTIPLE;
438 if (TARGET_MULTIPLE_SET)
439 warning ("-mmultiple is not supported on little endian systems");
444 target_flags &= ~MASK_STRING;
445 if (TARGET_STRING_SET)
446 warning ("-mstring is not supported on little endian systems");
450 if (flag_pic && DEFAULT_ABI == ABI_AIX)
452 warning ("-f%s ignored (all code is position independent)",
453 (flag_pic > 1) ? "PIC" : "pic");
457 #ifdef XCOFF_DEBUGGING_INFO
458 if (flag_function_sections && (write_symbols != NO_DEBUG)
459 && DEFAULT_ABI == ABI_AIX)
461 warning ("-ffunction-sections disabled on AIX when debugging");
462 flag_function_sections = 0;
465 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
467 warning ("-fdata-sections not supported on AIX");
468 flag_data_sections = 0;
472 /* Set debug flags */
473 if (rs6000_debug_name)
475 if (! strcmp (rs6000_debug_name, "all"))
476 rs6000_debug_stack = rs6000_debug_arg = 1;
477 else if (! strcmp (rs6000_debug_name, "stack"))
478 rs6000_debug_stack = 1;
479 else if (! strcmp (rs6000_debug_name, "arg"))
480 rs6000_debug_arg = 1;
482 error ("unknown -mdebug-%s switch", rs6000_debug_name);
485 /* Set size of long double */
486 rs6000_long_double_type_size = 64;
487 if (rs6000_long_double_size_string)
490 int size = strtol (rs6000_long_double_size_string, &tail, 10);
491 if (*tail != '\0' || (size != 64 && size != 128))
492 error ("Unknown switch -mlong-double-%s",
493 rs6000_long_double_size_string);
495 rs6000_long_double_type_size = size;
498 /* Handle -mabi= options. */
499 rs6000_parse_abi_options ();
501 #ifdef TARGET_REGNAMES
502 /* If the user desires alternate register names, copy in the
503 alternate names now. */
505 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
508 #ifdef SUBTARGET_OVERRIDE_OPTIONS
509 SUBTARGET_OVERRIDE_OPTIONS;
511 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
512 SUBSUBTARGET_OVERRIDE_OPTIONS;
515 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
516 If -maix-struct-return or -msvr4-struct-return was explicitly
517 used, don't override with the ABI default. */
518 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
520 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
521 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
523 target_flags |= MASK_AIX_STRUCT_RET;
526 /* Register global variables with the garbage collector. */
527 rs6000_add_gc_roots ();
529 /* Allocate an alias set for register saves & restores from stack. */
530 rs6000_sr_alias_set = new_alias_set ();
533 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
535 /* Arrange to save and restore machine status around nested functions. */
536 init_machine_status = rs6000_init_machine_status;
537 free_machine_status = rs6000_free_machine_status;
540 /* Handle -mabi= options. */
542 rs6000_parse_abi_options ()
544 if (rs6000_abi_string == 0)
546 else if (! strcmp (rs6000_abi_string, "altivec"))
547 rs6000_altivec_abi = 1;
549 error ("unknown ABI specified: '%s'", rs6000_abi_string);
553 optimization_options (level, size)
554 int level ATTRIBUTE_UNUSED;
555 int size ATTRIBUTE_UNUSED;
559 /* Do anything needed at the start of the asm file. */
562 rs6000_file_start (file, default_cpu)
564 const char *default_cpu;
568 const char *start = buffer;
569 struct rs6000_cpu_select *ptr;
571 if (flag_verbose_asm)
573 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
574 rs6000_select[0].string = default_cpu;
576 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
578 ptr = &rs6000_select[i];
579 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
581 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
587 switch (rs6000_sdata)
589 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
590 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
591 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
592 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
595 if (rs6000_sdata && g_switch_value)
597 fprintf (file, "%s -G %d", start, g_switch_value);
608 /* Create a CONST_DOUBLE from a string. */
611 rs6000_float_const (string, mode)
613 enum machine_mode mode;
615 REAL_VALUE_TYPE value;
616 value = REAL_VALUE_ATOF (string, mode);
617 return immed_real_const_1 (value, mode);
620 /* Return non-zero if this function is known to have a null epilogue. */
625 if (reload_completed)
627 rs6000_stack_t *info = rs6000_stack_info ();
629 if (info->first_gp_reg_save == 32
630 && info->first_fp_reg_save == 64
631 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
634 && info->vrsave_mask == 0
642 /* Returns 1 always. */
645 any_operand (op, mode)
646 rtx op ATTRIBUTE_UNUSED;
647 enum machine_mode mode ATTRIBUTE_UNUSED;
652 /* Returns 1 if op is the count register. */
654 count_register_operand (op, mode)
656 enum machine_mode mode ATTRIBUTE_UNUSED;
658 if (GET_CODE (op) != REG)
661 if (REGNO (op) == COUNT_REGISTER_REGNUM)
664 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
671 xer_operand (op, mode)
673 enum machine_mode mode ATTRIBUTE_UNUSED;
675 if (GET_CODE (op) != REG)
678 if (XER_REGNO_P (REGNO (op)))
684 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
685 by such constants completes more quickly. */
688 s8bit_cint_operand (op, mode)
690 enum machine_mode mode ATTRIBUTE_UNUSED;
692 return ( GET_CODE (op) == CONST_INT
693 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
696 /* Return 1 if OP is a constant that can fit in a D field. */
699 short_cint_operand (op, mode)
701 enum machine_mode mode ATTRIBUTE_UNUSED;
703 return (GET_CODE (op) == CONST_INT
704 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
707 /* Similar for an unsigned D field. */
710 u_short_cint_operand (op, mode)
712 enum machine_mode mode ATTRIBUTE_UNUSED;
714 return (GET_CODE (op) == CONST_INT
715 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'));
718 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
721 non_short_cint_operand (op, mode)
723 enum machine_mode mode ATTRIBUTE_UNUSED;
725 return (GET_CODE (op) == CONST_INT
726 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
729 /* Returns 1 if OP is a CONST_INT that is a positive value
730 and an exact power of 2. */
733 exact_log2_cint_operand (op, mode)
735 enum machine_mode mode ATTRIBUTE_UNUSED;
737 return (GET_CODE (op) == CONST_INT
739 && exact_log2 (INTVAL (op)) >= 0);
742 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
746 gpc_reg_operand (op, mode)
748 enum machine_mode mode;
750 return (register_operand (op, mode)
751 && (GET_CODE (op) != REG
752 || (REGNO (op) >= ARG_POINTER_REGNUM
753 && !XER_REGNO_P (REGNO (op)))
754 || REGNO (op) < MQ_REGNO));
757 /* Returns 1 if OP is either a pseudo-register or a register denoting a
761 cc_reg_operand (op, mode)
763 enum machine_mode mode;
765 return (register_operand (op, mode)
766 && (GET_CODE (op) != REG
767 || REGNO (op) >= FIRST_PSEUDO_REGISTER
768 || CR_REGNO_P (REGNO (op))));
771 /* Returns 1 if OP is either a pseudo-register or a register denoting a
772 CR field that isn't CR0. */
775 cc_reg_not_cr0_operand (op, mode)
777 enum machine_mode mode;
779 return (register_operand (op, mode)
780 && (GET_CODE (op) != REG
781 || REGNO (op) >= FIRST_PSEUDO_REGISTER
782 || CR_REGNO_NOT_CR0_P (REGNO (op))));
785 /* Returns 1 if OP is either a constant integer valid for a D-field or
786 a non-special register. If a register, it must be in the proper
787 mode unless MODE is VOIDmode. */
790 reg_or_short_operand (op, mode)
792 enum machine_mode mode;
794 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
797 /* Similar, except check if the negation of the constant would be
798 valid for a D-field. */
801 reg_or_neg_short_operand (op, mode)
803 enum machine_mode mode;
805 if (GET_CODE (op) == CONST_INT)
806 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
808 return gpc_reg_operand (op, mode);
811 /* Return 1 if the operand is either a register or an integer whose
812 high-order 16 bits are zero. */
815 reg_or_u_short_operand (op, mode)
817 enum machine_mode mode;
819 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
822 /* Return 1 is the operand is either a non-special register or ANY
826 reg_or_cint_operand (op, mode)
828 enum machine_mode mode;
830 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
833 /* Return 1 is the operand is either a non-special register or ANY
834 32-bit signed constant integer. */
837 reg_or_arith_cint_operand (op, mode)
839 enum machine_mode mode;
841 return (gpc_reg_operand (op, mode)
842 || (GET_CODE (op) == CONST_INT
843 #if HOST_BITS_PER_WIDE_INT != 32
844 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
845 < (unsigned HOST_WIDE_INT) 0x100000000ll)
850 /* Return 1 is the operand is either a non-special register or a 32-bit
851 signed constant integer valid for 64-bit addition. */
854 reg_or_add_cint64_operand (op, mode)
856 enum machine_mode mode;
858 return (gpc_reg_operand (op, mode)
859 || (GET_CODE (op) == CONST_INT
860 && INTVAL (op) < 0x7fff8000
861 #if HOST_BITS_PER_WIDE_INT != 32
862 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
868 /* Return 1 is the operand is either a non-special register or a 32-bit
869 signed constant integer valid for 64-bit subtraction. */
872 reg_or_sub_cint64_operand (op, mode)
874 enum machine_mode mode;
876 return (gpc_reg_operand (op, mode)
877 || (GET_CODE (op) == CONST_INT
878 && (- INTVAL (op)) < 0x7fff8000
879 #if HOST_BITS_PER_WIDE_INT != 32
880 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
886 /* Return 1 is the operand is either a non-special register or ANY
887 32-bit unsigned constant integer. */
890 reg_or_logical_cint_operand (op, mode)
892 enum machine_mode mode;
894 if (GET_CODE (op) == CONST_INT)
896 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
898 if (GET_MODE_BITSIZE (mode) <= 32)
905 return ((INTVAL (op) & GET_MODE_MASK (mode)
906 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
908 else if (GET_CODE (op) == CONST_DOUBLE)
910 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
914 return CONST_DOUBLE_HIGH (op) == 0;
917 return gpc_reg_operand (op, mode);
920 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
923 got_operand (op, mode)
925 enum machine_mode mode ATTRIBUTE_UNUSED;
927 return (GET_CODE (op) == SYMBOL_REF
928 || GET_CODE (op) == CONST
929 || GET_CODE (op) == LABEL_REF);
932 /* Return 1 if the operand is a simple references that can be loaded via
933 the GOT (labels involving addition aren't allowed). */
936 got_no_const_operand (op, mode)
938 enum machine_mode mode ATTRIBUTE_UNUSED;
940 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
943 /* Return the number of instructions it takes to form a constant in an
947 num_insns_constant_wide (value)
950 /* signed constant loadable with {cal|addi} */
951 if (CONST_OK_FOR_LETTER_P (value, 'I'))
954 /* constant loadable with {cau|addis} */
955 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
958 #if HOST_BITS_PER_WIDE_INT == 64
959 else if (TARGET_POWERPC64)
961 HOST_WIDE_INT low = value & 0xffffffff;
962 HOST_WIDE_INT high = value >> 32;
964 low = (low ^ 0x80000000) - 0x80000000; /* sign extend */
966 if (high == 0 && (low & 0x80000000) == 0)
969 else if (high == -1 && (low & 0x80000000) != 0)
973 return num_insns_constant_wide (high) + 1;
976 return (num_insns_constant_wide (high)
977 + num_insns_constant_wide (low) + 1);
986 num_insns_constant (op, mode)
988 enum machine_mode mode;
990 if (GET_CODE (op) == CONST_INT)
992 #if HOST_BITS_PER_WIDE_INT == 64
993 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
994 && mask64_operand (op, mode))
998 return num_insns_constant_wide (INTVAL (op));
1001 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1006 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1007 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1008 return num_insns_constant_wide ((HOST_WIDE_INT)l);
1011 else if (GET_CODE (op) == CONST_DOUBLE)
1017 int endian = (WORDS_BIG_ENDIAN == 0);
1019 if (mode == VOIDmode || mode == DImode)
1021 high = CONST_DOUBLE_HIGH (op);
1022 low = CONST_DOUBLE_LOW (op);
1026 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1027 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1029 low = l[1 - endian];
1033 return (num_insns_constant_wide (low)
1034 + num_insns_constant_wide (high));
1038 if (high == 0 && (low & 0x80000000) == 0)
1039 return num_insns_constant_wide (low);
1041 else if (high == -1 && (low & 0x80000000) != 0)
1042 return num_insns_constant_wide (low);
1044 else if (mask64_operand (op, mode))
1048 return num_insns_constant_wide (high) + 1;
1051 return (num_insns_constant_wide (high)
1052 + num_insns_constant_wide (low) + 1);
1060 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1061 register with one instruction per word. We only do this if we can
1062 safely read CONST_DOUBLE_{LOW,HIGH}. */
1065 easy_fp_constant (op, mode)
1067 enum machine_mode mode;
1069 if (GET_CODE (op) != CONST_DOUBLE
1070 || GET_MODE (op) != mode
1071 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1074 /* Consider all constants with -msoft-float to be easy. */
1075 if (TARGET_SOFT_FLOAT && mode != DImode)
1078 /* If we are using V.4 style PIC, consider all constants to be hard. */
1079 if (flag_pic && DEFAULT_ABI == ABI_V4)
1082 #ifdef TARGET_RELOCATABLE
1083 /* Similarly if we are using -mrelocatable, consider all constants
1085 if (TARGET_RELOCATABLE)
1094 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1095 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1097 return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1098 && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1101 else if (mode == SFmode)
1106 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1107 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1109 return num_insns_constant_wide (l) == 1;
1112 else if (mode == DImode)
1113 return ((TARGET_POWERPC64
1114 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1115 || (num_insns_constant (op, DImode) <= 2));
1117 else if (mode == SImode)
1123 /* Return 1 if the operand is 0.0. */
1125 zero_fp_constant (op, mode)
1127 enum machine_mode mode;
1129 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1132 /* Return 1 if the operand is in volatile memory. Note that during
1133 the RTL generation phase, memory_operand does not return TRUE for
1134 volatile memory references. So this function allows us to
1135 recognize volatile references where its safe. */
1138 volatile_mem_operand (op, mode)
1140 enum machine_mode mode;
1142 if (GET_CODE (op) != MEM)
1145 if (!MEM_VOLATILE_P (op))
1148 if (mode != GET_MODE (op))
1151 if (reload_completed)
1152 return memory_operand (op, mode);
1154 if (reload_in_progress)
1155 return strict_memory_address_p (mode, XEXP (op, 0));
1157 return memory_address_p (mode, XEXP (op, 0));
1160 /* Return 1 if the operand is an offsettable memory operand. */
1163 offsettable_mem_operand (op, mode)
1165 enum machine_mode mode;
1167 return ((GET_CODE (op) == MEM)
1168 && offsettable_address_p (reload_completed || reload_in_progress,
1169 mode, XEXP (op, 0)));
1172 /* Return 1 if the operand is either an easy FP constant (see above) or
1176 mem_or_easy_const_operand (op, mode)
1178 enum machine_mode mode;
1180 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1183 /* Return 1 if the operand is either a non-special register or an item
1184 that can be used as the operand of a `mode' add insn. */
1187 add_operand (op, mode)
1189 enum machine_mode mode;
1191 if (GET_CODE (op) == CONST_INT)
1192 return (CONST_OK_FOR_LETTER_P (INTVAL(op), 'I')
1193 || CONST_OK_FOR_LETTER_P (INTVAL(op), 'L'));
1195 return gpc_reg_operand (op, mode);
1198 /* Return 1 if OP is a constant but not a valid add_operand. */
1201 non_add_cint_operand (op, mode)
1203 enum machine_mode mode ATTRIBUTE_UNUSED;
1205 return (GET_CODE (op) == CONST_INT
1206 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000
1207 && ! CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1210 /* Return 1 if the operand is a non-special register or a constant that
1211 can be used as the operand of an OR or XOR insn on the RS/6000. */
1214 logical_operand (op, mode)
1216 enum machine_mode mode;
1218 HOST_WIDE_INT opl, oph;
1220 if (gpc_reg_operand (op, mode))
1223 if (GET_CODE (op) == CONST_INT)
1225 opl = INTVAL (op) & GET_MODE_MASK (mode);
1227 #if HOST_BITS_PER_WIDE_INT <= 32
1228 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1232 else if (GET_CODE (op) == CONST_DOUBLE)
1234 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1237 opl = CONST_DOUBLE_LOW (op);
1238 oph = CONST_DOUBLE_HIGH (op);
1245 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1246 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1249 /* Return 1 if C is a constant that is not a logical operand (as
1250 above), but could be split into one. */
1253 non_logical_cint_operand (op, mode)
1255 enum machine_mode mode;
1257 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1258 && ! logical_operand (op, mode)
1259 && reg_or_logical_cint_operand (op, mode));
1262 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1263 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1264 Reject all ones and all zeros, since these should have been optimized
1265 away and confuse the making of MB and ME. */
1268 mask_operand (op, mode)
1270 enum machine_mode mode ATTRIBUTE_UNUSED;
1272 HOST_WIDE_INT c, lsb;
1274 if (GET_CODE (op) != CONST_INT)
1279 /* We don't change the number of transitions by inverting,
1280 so make sure we start with the LS bit zero. */
1284 /* Reject all zeros or all ones. */
1288 /* Find the first transition. */
1291 /* Invert to look for a second transition. */
1294 /* Erase first transition. */
1297 /* Find the second transition (if any). */
1300 /* Match if all the bits above are 1's (or c is zero). */
1304 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1305 It is if there are no more than one 1->0 or 0->1 transitions.
1306 Reject all ones and all zeros, since these should have been optimized
1307 away and confuse the making of MB and ME. */
1310 mask64_operand (op, mode)
1312 enum machine_mode mode;
1314 if (GET_CODE (op) == CONST_INT)
1316 HOST_WIDE_INT c, lsb;
1318 /* We don't change the number of transitions by inverting,
1319 so make sure we start with the LS bit zero. */
1324 /* Reject all zeros or all ones. */
1328 /* Find the transition, and check that all bits above are 1's. */
1332 else if (GET_CODE (op) == CONST_DOUBLE
1333 && (mode == VOIDmode || mode == DImode))
1335 HOST_WIDE_INT low, high, lsb;
1337 if (HOST_BITS_PER_WIDE_INT < 64)
1338 high = CONST_DOUBLE_HIGH (op);
1340 low = CONST_DOUBLE_LOW (op);
1343 if (HOST_BITS_PER_WIDE_INT < 64)
1350 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1354 return high == -lsb;
1358 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1364 /* Return 1 if the operand is either a non-special register or a constant
1365 that can be used as the operand of a PowerPC64 logical AND insn. */
1368 and64_operand (op, mode)
1370 enum machine_mode mode;
1372 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1373 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1375 return (logical_operand (op, mode) || mask64_operand (op, mode));
1378 /* Return 1 if the operand is either a non-special register or a
1379 constant that can be used as the operand of an RS/6000 logical AND insn. */
1382 and_operand (op, mode)
1384 enum machine_mode mode;
1386 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1387 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1389 return (logical_operand (op, mode) || mask_operand (op, mode));
1392 /* Return 1 if the operand is a general register or memory operand. */
1395 reg_or_mem_operand (op, mode)
1397 enum machine_mode mode;
1399 return (gpc_reg_operand (op, mode)
1400 || memory_operand (op, mode)
1401 || volatile_mem_operand (op, mode));
1404 /* Return 1 if the operand is a general register or memory operand without
1405 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1409 lwa_operand (op, mode)
1411 enum machine_mode mode;
1415 if (reload_completed && GET_CODE (inner) == SUBREG)
1416 inner = SUBREG_REG (inner);
1418 return gpc_reg_operand (inner, mode)
1419 || (memory_operand (inner, mode)
1420 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1421 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1422 && (GET_CODE (XEXP (inner, 0)) != PLUS
1423 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1424 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1427 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1428 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1432 call_operand (op, mode)
1434 enum machine_mode mode;
1436 if (mode != VOIDmode && GET_MODE (op) != mode)
1439 return (GET_CODE (op) == SYMBOL_REF
1440 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1443 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1444 this file and the function is not weakly defined. */
1447 current_file_function_operand (op, mode)
1449 enum machine_mode mode ATTRIBUTE_UNUSED;
1451 return (GET_CODE (op) == SYMBOL_REF
1452 && (SYMBOL_REF_FLAG (op)
1453 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1454 && ! DECL_WEAK (current_function_decl))));
1457 /* Return 1 if this operand is a valid input for a move insn. */
1460 input_operand (op, mode)
1462 enum machine_mode mode;
1464 /* Memory is always valid. */
1465 if (memory_operand (op, mode))
1468 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1469 if (GET_CODE (op) == CONSTANT_P_RTX)
1472 /* For floating-point, easy constants are valid. */
1473 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1475 && easy_fp_constant (op, mode))
1478 /* Allow any integer constant. */
1479 if (GET_MODE_CLASS (mode) == MODE_INT
1480 && (GET_CODE (op) == CONST_INT
1481 || GET_CODE (op) == CONST_DOUBLE))
1484 /* For floating-point or multi-word mode, the only remaining valid type
1486 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1487 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1488 return register_operand (op, mode);
1490 /* The only cases left are integral modes one word or smaller (we
1491 do not get called for MODE_CC values). These can be in any
1493 if (register_operand (op, mode))
1496 /* A SYMBOL_REF referring to the TOC is valid. */
1497 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1500 /* A constant pool expression (relative to the TOC) is valid */
1501 if (TOC_RELATIVE_EXPR_P (op))
1504 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1506 if (DEFAULT_ABI == ABI_V4
1507 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1508 && small_data_operand (op, Pmode))
1514 /* Return 1 for an operand in small memory on V.4/eabi. */
1517 small_data_operand (op, mode)
1518 rtx op ATTRIBUTE_UNUSED;
1519 enum machine_mode mode ATTRIBUTE_UNUSED;
1524 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1527 if (DEFAULT_ABI != ABI_V4)
1530 if (GET_CODE (op) == SYMBOL_REF)
1533 else if (GET_CODE (op) != CONST
1534 || GET_CODE (XEXP (op, 0)) != PLUS
1535 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1536 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1541 rtx sum = XEXP (op, 0);
1542 HOST_WIDE_INT summand;
1544 /* We have to be careful here, because it is the referenced address
1545 that must be 32k from _SDA_BASE_, not just the symbol. */
1546 summand = INTVAL (XEXP (sum, 1));
1547 if (summand < 0 || summand > g_switch_value)
1550 sym_ref = XEXP (sum, 0);
1553 if (*XSTR (sym_ref, 0) != '@')
1564 constant_pool_expr_1 (op, have_sym, have_toc)
1569 switch (GET_CODE(op))
1572 if (CONSTANT_POOL_ADDRESS_P (op))
1574 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1582 else if (! strcmp (XSTR (op, 0), toc_label_name))
1591 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) &&
1592 constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc);
1594 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1603 constant_pool_expr_p (op)
1608 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1612 toc_relative_expr_p (op)
1617 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1620 /* Try machine-dependent ways of modifying an illegitimate address
1621 to be legitimate. If we find one, return the new, valid address.
1622 This is used from only one place: `memory_address' in explow.c.
1624 OLDX is the address as it was before break_out_memory_refs was
1625 called. In some cases it is useful to look at this to decide what
1628 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1630 It is always safe for this function to do nothing. It exists to
1631 recognize opportunities to optimize the output.
1633 On RS/6000, first check for the sum of a register with a constant
1634 integer that is out of range. If so, generate code to add the
1635 constant with the low-order 16 bits masked to the register and force
1636 this result into another register (this can be done with `cau').
1637 Then generate an address of REG+(CONST&0xffff), allowing for the
1638 possibility of bit 16 being a one.
1640 Then check for the sum of a register and something not constant, try to
1641 load the other things into a register and return the sum. */
1643 rs6000_legitimize_address (x, oldx, mode)
1645 rtx oldx ATTRIBUTE_UNUSED;
1646 enum machine_mode mode;
1648 if (GET_CODE (x) == PLUS
1649 && GET_CODE (XEXP (x, 0)) == REG
1650 && GET_CODE (XEXP (x, 1)) == CONST_INT
1651 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1653 HOST_WIDE_INT high_int, low_int;
1655 high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1656 low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1657 if (low_int & 0x8000)
1658 high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1659 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1660 GEN_INT (high_int)), 0);
1661 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1663 else if (GET_CODE (x) == PLUS
1664 && GET_CODE (XEXP (x, 0)) == REG
1665 && GET_CODE (XEXP (x, 1)) != CONST_INT
1666 && GET_MODE_NUNITS (mode) == 1
1667 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1668 && (TARGET_POWERPC64 || mode != DImode)
1671 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1672 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1674 else if (ALTIVEC_VECTOR_MODE (mode))
1678 /* Make sure both operands are registers. */
1679 if (GET_CODE (x) == PLUS)
1680 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1681 force_reg (Pmode, XEXP (x, 1)));
1683 reg = force_reg (Pmode, x);
1686 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1687 && GET_CODE (x) != CONST_INT
1688 && GET_CODE (x) != CONST_DOUBLE
1690 && GET_MODE_NUNITS (mode) == 1
1691 && (GET_MODE_BITSIZE (mode) <= 32
1692 || (TARGET_HARD_FLOAT && mode == DFmode)))
1694 rtx reg = gen_reg_rtx (Pmode);
1695 emit_insn (gen_elf_high (reg, (x)));
1696 return gen_rtx_LO_SUM (Pmode, reg, (x));
1698 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1700 && GET_CODE (x) != CONST_INT
1701 && GET_CODE (x) != CONST_DOUBLE
1703 && (TARGET_HARD_FLOAT || mode != DFmode)
1707 rtx reg = gen_reg_rtx (Pmode);
1708 emit_insn (gen_macho_high (reg, (x)));
1709 return gen_rtx_LO_SUM (Pmode, reg, (x));
1712 && CONSTANT_POOL_EXPR_P (x)
1713 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1715 return create_TOC_reference (x);
1721 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1722 that is a valid memory address for an instruction.
1723 The MODE argument is the machine mode for the MEM expression
1724 that wants to use this address.
1726 On the RS/6000, there are four valid address: a SYMBOL_REF that
1727 refers to a constant pool entry of an address (or the sum of it
1728 plus a constant), a short (16-bit signed) constant plus a register,
1729 the sum of two registers, or a register indirect, possibly with an
1730 auto-increment. For DFmode and DImode with an constant plus register,
1731 we must ensure that both words are addressable or PowerPC64 with offset
1734 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1735 32-bit DImode, TImode), indexed addressing cannot be used because
1736 adjacent memory cells are accessed by adding word-sized offsets
1737 during assembly output. */
1739 rs6000_legitimate_address (mode, x, reg_ok_strict)
1740 enum machine_mode mode;
1744 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
1746 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
1748 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
1750 if (LEGITIMATE_SMALL_DATA_P (mode, x))
1752 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
1754 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
1756 && GET_CODE (x) == PLUS
1757 && GET_CODE (XEXP (x, 0)) == REG
1758 && XEXP (x, 0) == virtual_stack_vars_rtx
1759 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1761 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
1764 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1765 && (TARGET_POWERPC64 || mode != DImode)
1766 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
1768 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
1773 /* Try to output insns to set TARGET equal to the constant C if it can
1774 be done in less than N insns. Do all computations in MODE.
1775 Returns the place where the output has been placed if it can be
1776 done and the insns have been emitted. If it would take more than N
1777 insns, zero is returned and no insns and emitted. */
1780 rs6000_emit_set_const (dest, mode, source, n)
1782 enum machine_mode mode;
1783 int n ATTRIBUTE_UNUSED;
1785 HOST_WIDE_INT c0, c1;
1787 if (mode == QImode || mode == HImode || mode == SImode)
1790 dest = gen_reg_rtx (mode);
1791 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
1795 if (GET_CODE (source) == CONST_INT)
1797 c0 = INTVAL (source);
1800 else if (GET_CODE (source) == CONST_DOUBLE)
1802 #if HOST_BITS_PER_WIDE_INT >= 64
1803 c0 = CONST_DOUBLE_LOW (source);
1806 c0 = CONST_DOUBLE_LOW (source);
1807 c1 = CONST_DOUBLE_HIGH (source);
1813 return rs6000_emit_set_long_const (dest, c0, c1);
1816 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
1817 fall back to a straight forward decomposition. We do this to avoid
1818 exponential run times encountered when looking for longer sequences
1819 with rs6000_emit_set_const. */
1821 rs6000_emit_set_long_const (dest, c1, c2)
1823 HOST_WIDE_INT c1, c2;
1825 if (!TARGET_POWERPC64)
1827 rtx operand1, operand2;
1829 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
1831 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
1833 emit_move_insn (operand1, GEN_INT (c1));
1834 emit_move_insn (operand2, GEN_INT (c2));
1838 HOST_WIDE_INT d1, d2, d3, d4;
1840 /* Decompose the entire word */
1841 #if HOST_BITS_PER_WIDE_INT >= 64
1842 if (c2 != -(c1 < 0))
1844 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
1846 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1847 c1 = (c1 - d2) >> 32;
1848 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
1850 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1854 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
1856 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1860 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
1862 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1867 /* Construct the high word */
1870 emit_move_insn (dest, GEN_INT (d4));
1872 emit_move_insn (dest,
1873 gen_rtx_PLUS (DImode, dest, GEN_INT (d3)));
1876 emit_move_insn (dest, GEN_INT (d3));
1878 /* Shift it into place */
1879 if (d3 != 0 || d4 != 0)
1880 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
1882 /* Add in the low bits. */
1884 emit_move_insn (dest, gen_rtx_PLUS (DImode, dest, GEN_INT (d2)));
1886 emit_move_insn (dest, gen_rtx_PLUS (DImode, dest, GEN_INT (d1)));
1892 /* Emit a move from SOURCE to DEST in mode MODE. */
1894 rs6000_emit_move (dest, source, mode)
1897 enum machine_mode mode;
1901 operands[1] = source;
1903 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
1904 if (GET_CODE (operands[1]) == CONST_DOUBLE
1905 && ! FLOAT_MODE_P (mode)
1906 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1908 /* FIXME. This should never happen. */
1909 /* Since it seems that it does, do the safe thing and convert
1912 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
1914 if (GET_CODE (operands[1]) == CONST_DOUBLE
1915 && ! FLOAT_MODE_P (mode)
1916 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
1917 && CONST_DOUBLE_LOW (operands[1]) >= 0)
1918 || (CONST_DOUBLE_HIGH (operands[1]) == -1
1919 && CONST_DOUBLE_LOW (operands[1]) < 0)))
1922 /* Check if GCC is setting up a block move that will end up using FP
1923 registers as temporaries. We must make sure this is acceptable. */
1924 if (GET_CODE (operands[0]) == MEM
1925 && GET_CODE (operands[1]) == MEM
1927 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
1928 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
1929 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
1930 ? 32 : MEM_ALIGN (operands[0])))
1931 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
1933 : MEM_ALIGN (operands[1]))))
1934 && ! MEM_VOLATILE_P (operands [0])
1935 && ! MEM_VOLATILE_P (operands [1]))
1937 emit_move_insn (adjust_address (operands[0], SImode, 0),
1938 adjust_address (operands[1], SImode, 0));
1939 emit_move_insn (adjust_address (operands[0], SImode, 4),
1940 adjust_address (operands[1], SImode, 4));
1944 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
1945 operands[1] = force_reg (mode, operands[1]);
1947 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
1948 && GET_CODE (operands[0]) == MEM)
1952 if (reload_in_progress || reload_completed)
1953 regnum = true_regnum (operands[1]);
1954 else if (GET_CODE (operands[1]) == REG)
1955 regnum = REGNO (operands[1]);
1959 /* If operands[1] is a register, on POWER it may have
1960 double-precision data in it, so truncate it to single
1962 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
1965 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
1966 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
1967 operands[1] = newreg;
1971 /* Handle the case where reload calls us with an invalid address;
1972 and the case of CONSTANT_P_RTX. */
1973 if (! general_operand (operands[1], mode)
1974 || ! nonimmediate_operand (operands[0], mode)
1975 || GET_CODE (operands[1]) == CONSTANT_P_RTX)
1977 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1981 /* FIXME: In the long term, this switch statement should go away
1982 and be replaced by a sequence of tests based on things like
1988 if (CONSTANT_P (operands[1])
1989 && GET_CODE (operands[1]) != CONST_INT)
1990 operands[1] = force_const_mem (mode, operands[1]);
1996 if (CONSTANT_P (operands[1])
1997 && ! easy_fp_constant (operands[1], mode))
1998 operands[1] = force_const_mem (mode, operands[1]);
2005 /* fixme: aldyh -- allow vector constants when they are implemented. */
2006 if (CONSTANT_P (operands[1]))
2007 operands[1] = force_const_mem (mode, operands[1]);
2012 /* Use default pattern for address of ELF small data */
2015 && DEFAULT_ABI == ABI_V4
2016 && (GET_CODE (operands[1]) == SYMBOL_REF
2017 || GET_CODE (operands[1]) == CONST)
2018 && small_data_operand (operands[1], mode))
2020 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2024 if (DEFAULT_ABI == ABI_V4
2025 && mode == Pmode && mode == SImode
2026 && flag_pic == 1 && got_operand (operands[1], mode))
2028 emit_insn (gen_movsi_got (operands[0], operands[1]));
2032 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2033 && TARGET_NO_TOC && ! flag_pic
2035 && CONSTANT_P (operands[1])
2036 && GET_CODE (operands[1]) != HIGH
2037 && GET_CODE (operands[1]) != CONST_INT)
2039 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2041 /* If this is a function address on -mcall-aixdesc,
2042 convert it to the address of the descriptor. */
2043 if (DEFAULT_ABI == ABI_AIX
2044 && GET_CODE (operands[1]) == SYMBOL_REF
2045 && XSTR (operands[1], 0)[0] == '.')
2047 const char *name = XSTR (operands[1], 0);
2049 while (*name == '.')
2051 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2052 CONSTANT_POOL_ADDRESS_P (new_ref)
2053 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2054 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2055 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2056 operands[1] = new_ref;
2059 if (DEFAULT_ABI == ABI_DARWIN)
2061 emit_insn (gen_macho_high (target, operands[1]));
2062 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2066 emit_insn (gen_elf_high (target, operands[1]));
2067 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2071 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2072 and we have put it in the TOC, we just need to make a TOC-relative
2075 && GET_CODE (operands[1]) == SYMBOL_REF
2076 && CONSTANT_POOL_EXPR_P (operands[1])
2077 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2078 get_pool_mode (operands[1])))
2080 operands[1] = create_TOC_reference (operands[1]);
2082 else if (mode == Pmode
2083 && CONSTANT_P (operands[1])
2084 && ((GET_CODE (operands[1]) != CONST_INT
2085 && ! easy_fp_constant (operands[1], mode))
2086 || (GET_CODE (operands[1]) == CONST_INT
2087 && num_insns_constant (operands[1], mode) > 2)
2088 || (GET_CODE (operands[0]) == REG
2089 && FP_REGNO_P (REGNO (operands[0]))))
2090 && GET_CODE (operands[1]) != HIGH
2091 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2092 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2094 /* Emit a USE operation so that the constant isn't deleted if
2095 expensive optimizations are turned on because nobody
2096 references it. This should only be done for operands that
2097 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2098 This should not be done for operands that contain LABEL_REFs.
2099 For now, we just handle the obvious case. */
2100 if (GET_CODE (operands[1]) != LABEL_REF)
2101 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2104 /* Darwin uses a special PIC legitimizer. */
2105 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2108 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2110 if (operands[0] != operands[1])
2111 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2116 /* If we are to limit the number of things we put in the TOC and
2117 this is a symbol plus a constant we can add in one insn,
2118 just put the symbol in the TOC and add the constant. Don't do
2119 this if reload is in progress. */
2120 if (GET_CODE (operands[1]) == CONST
2121 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2122 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2123 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2124 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2125 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2126 && ! side_effects_p (operands[0]))
2129 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2130 rtx other = XEXP (XEXP (operands[1], 0), 1);
2132 sym = force_reg (mode, sym);
2134 emit_insn (gen_addsi3 (operands[0], sym, other));
2136 emit_insn (gen_adddi3 (operands[0], sym, other));
2140 operands[1] = force_const_mem (mode, operands[1]);
2143 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2144 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2145 get_pool_constant (XEXP (operands[1], 0)),
2146 get_pool_mode (XEXP (operands[1], 0))))
2149 = gen_rtx_MEM (mode,
2150 create_TOC_reference (XEXP (operands[1], 0)));
2151 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2152 RTX_UNCHANGING_P (operands[1]) = 1;
2158 if (GET_CODE (operands[0]) == MEM
2159 && GET_CODE (XEXP (operands[0], 0)) != REG
2160 && ! reload_in_progress)
2162 = replace_equiv_address (operands[0],
2163 copy_addr_to_reg (XEXP (operands[0], 0)));
2165 if (GET_CODE (operands[1]) == MEM
2166 && GET_CODE (XEXP (operands[1], 0)) != REG
2167 && ! reload_in_progress)
2169 = replace_equiv_address (operands[1],
2170 copy_addr_to_reg (XEXP (operands[1], 0)));
2177 /* Above, we may have called force_const_mem which may have returned
2178 an invalid address. If we can, fix this up; otherwise, reload will
2179 have to deal with it. */
2180 if (GET_CODE (operands[1]) == MEM
2181 && ! memory_address_p (mode, XEXP (operands[1], 0))
2182 && ! reload_in_progress)
2183 operands[1] = adjust_address (operands[1], mode, 0);
2185 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2189 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2190 for a call to a function whose data type is FNTYPE.
2191 For a library call, FNTYPE is 0.
2193 For incoming args we set the number of arguments in the prototype large
2194 so we never return a PARALLEL. */
2197 init_cumulative_args (cum, fntype, libname, incoming)
2198 CUMULATIVE_ARGS *cum;
2200 rtx libname ATTRIBUTE_UNUSED;
2203 static CUMULATIVE_ARGS zero_cumulative;
2205 *cum = zero_cumulative;
2207 cum->fregno = FP_ARG_MIN_REG;
2208 cum->vregno = ALTIVEC_ARG_MIN_REG;
2209 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2210 cum->call_cookie = CALL_NORMAL;
2211 cum->sysv_gregno = GP_ARG_MIN_REG;
2214 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2216 else if (cum->prototype)
2217 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2218 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2219 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2222 cum->nargs_prototype = 0;
2224 cum->orig_nargs = cum->nargs_prototype;
2226 /* Check for longcall's */
2227 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2228 cum->call_cookie = CALL_LONG;
2230 if (TARGET_DEBUG_ARG)
2232 fprintf (stderr, "\ninit_cumulative_args:");
2235 tree ret_type = TREE_TYPE (fntype);
2236 fprintf (stderr, " ret code = %s,",
2237 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2240 if (cum->call_cookie & CALL_LONG)
2241 fprintf (stderr, " longcall,");
2243 fprintf (stderr, " proto = %d, nargs = %d\n",
2244 cum->prototype, cum->nargs_prototype);
2248 /* If defined, a C expression which determines whether, and in which
2249 direction, to pad out an argument with extra space. The value
2250 should be of type `enum direction': either `upward' to pad above
2251 the argument, `downward' to pad below, or `none' to inhibit
2254 For the AIX ABI structs are always stored left shifted in their
2258 function_arg_padding (mode, type)
2259 enum machine_mode mode;
2262 if (type != 0 && AGGREGATE_TYPE_P (type))
2265 /* This is the default definition. */
2266 return (! BYTES_BIG_ENDIAN
2269 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2270 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2271 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2272 ? downward : upward));
2275 /* If defined, a C expression that gives the alignment boundary, in bits,
2276 of an argument with the specified mode and type. If it is not defined,
2277 PARM_BOUNDARY is used for all arguments.
2279 V.4 wants long longs to be double word aligned. */
2282 function_arg_boundary (mode, type)
2283 enum machine_mode mode;
2284 tree type ATTRIBUTE_UNUSED;
2286 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2288 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2291 return PARM_BOUNDARY;
2294 /* Update the data in CUM to advance over an argument
2295 of mode MODE and data type TYPE.
2296 (TYPE is null for libcalls where that information may not be available.) */
2299 function_arg_advance (cum, mode, type, named)
2300 CUMULATIVE_ARGS *cum;
2301 enum machine_mode mode;
2305 cum->nargs_prototype--;
2307 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2309 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2312 cum->words += RS6000_ARG_SIZE (mode, type);
2314 else if (DEFAULT_ABI == ABI_V4)
2316 if (TARGET_HARD_FLOAT
2317 && (mode == SFmode || mode == DFmode))
2319 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2324 cum->words += cum->words & 1;
2325 cum->words += RS6000_ARG_SIZE (mode, type);
2331 int gregno = cum->sysv_gregno;
2333 /* Aggregates and IEEE quad get passed by reference. */
2334 if ((type && AGGREGATE_TYPE_P (type))
2338 n_words = RS6000_ARG_SIZE (mode, type);
2340 /* Long long is put in odd registers. */
2341 if (n_words == 2 && (gregno & 1) == 0)
2344 /* Long long is not split between registers and stack. */
2345 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2347 /* Long long is aligned on the stack. */
2349 cum->words += cum->words & 1;
2350 cum->words += n_words;
2353 /* Note: continuing to accumulate gregno past when we've started
2354 spilling to the stack indicates the fact that we've started
2355 spilling to the stack to expand_builtin_saveregs. */
2356 cum->sysv_gregno = gregno + n_words;
2359 if (TARGET_DEBUG_ARG)
2361 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2362 cum->words, cum->fregno);
2363 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2364 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2365 fprintf (stderr, "mode = %4s, named = %d\n",
2366 GET_MODE_NAME (mode), named);
2371 int align = (TARGET_32BIT && (cum->words & 1) != 0
2372 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2374 cum->words += align + RS6000_ARG_SIZE (mode, type);
2376 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2379 if (TARGET_DEBUG_ARG)
2381 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2382 cum->words, cum->fregno);
2383 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2384 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2385 fprintf (stderr, "named = %d, align = %d\n", named, align);
2390 /* Determine where to put an argument to a function.
2391 Value is zero to push the argument on the stack,
2392 or a hard register in which to store the argument.
2394 MODE is the argument's machine mode.
2395 TYPE is the data type of the argument (as a tree).
2396 This is null for libcalls where that information may
2398 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2399 the preceding args and about the function being called.
2400 NAMED is nonzero if this argument is a named parameter
2401 (otherwise it is an extra parameter matching an ellipsis).
2403 On RS/6000 the first eight words of non-FP are normally in registers
2404 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2405 Under V.4, the first 8 FP args are in registers.
2407 If this is floating-point and no prototype is specified, we use
2408 both an FP and integer register (or possibly FP reg and stack). Library
2409 functions (when TYPE is zero) always have the proper types for args,
2410 so we can pass the FP value just in one register. emit_library_function
2411 doesn't support PARALLEL anyway. */
2414 function_arg (cum, mode, type, named)
2415 CUMULATIVE_ARGS *cum;
2416 enum machine_mode mode;
2420 enum rs6000_abi abi = DEFAULT_ABI;
2422 /* Return a marker to indicate whether CR1 needs to set or clear the
2423 bit that V.4 uses to say fp args were passed in registers.
2424 Assume that we don't need the marker for software floating point,
2425 or compiler generated library calls. */
2426 if (mode == VOIDmode)
2429 && TARGET_HARD_FLOAT
2430 && cum->nargs_prototype < 0
2431 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2433 return GEN_INT (cum->call_cookie
2434 | ((cum->fregno == FP_ARG_MIN_REG)
2435 ? CALL_V4_SET_FP_ARGS
2436 : CALL_V4_CLEAR_FP_ARGS));
2439 return GEN_INT (cum->call_cookie);
2442 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2444 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2445 return gen_rtx_REG (mode, cum->vregno);
2449 else if (abi == ABI_V4)
2451 if (TARGET_HARD_FLOAT
2452 && (mode == SFmode || mode == DFmode))
2454 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2455 return gen_rtx_REG (mode, cum->fregno);
2462 int gregno = cum->sysv_gregno;
2464 /* Aggregates and IEEE quad get passed by reference. */
2465 if ((type && AGGREGATE_TYPE_P (type))
2469 n_words = RS6000_ARG_SIZE (mode, type);
2471 /* Long long is put in odd registers. */
2472 if (n_words == 2 && (gregno & 1) == 0)
2475 /* Long long is not split between registers and stack. */
2476 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2477 return gen_rtx_REG (mode, gregno);
2484 int align = (TARGET_32BIT && (cum->words & 1) != 0
2485 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2486 int align_words = cum->words + align;
2488 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2491 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2494 || ((cum->nargs_prototype > 0)
2495 /* IBM AIX extended its linkage convention definition always
2496 to require FP args after register save area hole on the
2498 && (DEFAULT_ABI != ABI_AIX
2500 || (align_words < GP_ARG_NUM_REG))))
2501 return gen_rtx_REG (mode, cum->fregno);
2503 return gen_rtx_PARALLEL (mode,
2505 gen_rtx_EXPR_LIST (VOIDmode,
2506 ((align_words >= GP_ARG_NUM_REG)
2509 + RS6000_ARG_SIZE (mode, type)
2511 /* If this is partially on the stack, then
2512 we only include the portion actually
2513 in registers here. */
2514 ? gen_rtx_REG (SImode,
2515 GP_ARG_MIN_REG + align_words)
2516 : gen_rtx_REG (mode,
2517 GP_ARG_MIN_REG + align_words))),
2519 gen_rtx_EXPR_LIST (VOIDmode,
2520 gen_rtx_REG (mode, cum->fregno),
2523 else if (align_words < GP_ARG_NUM_REG)
2524 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2530 /* For an arg passed partly in registers and partly in memory,
2531 this is the number of registers used.
2532 For args passed entirely in registers or entirely in memory, zero. */
2535 function_arg_partial_nregs (cum, mode, type, named)
2536 CUMULATIVE_ARGS *cum;
2537 enum machine_mode mode;
2539 int named ATTRIBUTE_UNUSED;
2541 if (DEFAULT_ABI == ABI_V4)
2544 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2545 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2547 if (cum->nargs_prototype >= 0)
2551 if (cum->words < GP_ARG_NUM_REG
2552 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2554 int ret = GP_ARG_NUM_REG - cum->words;
2555 if (ret && TARGET_DEBUG_ARG)
2556 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2564 /* A C expression that indicates when an argument must be passed by
2565 reference. If nonzero for an argument, a copy of that argument is
2566 made in memory and a pointer to the argument is passed instead of
2567 the argument itself. The pointer is passed in whatever way is
2568 appropriate for passing a pointer to that type.
2570 Under V.4, structures and unions are passed by reference. */
2573 function_arg_pass_by_reference (cum, mode, type, named)
2574 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2575 enum machine_mode mode ATTRIBUTE_UNUSED;
2577 int named ATTRIBUTE_UNUSED;
2579 if (DEFAULT_ABI == ABI_V4
2580 && ((type && AGGREGATE_TYPE_P (type))
2583 if (TARGET_DEBUG_ARG)
2584 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2592 /* Perform any needed actions needed for a function that is receiving a
2593 variable number of arguments.
2597 MODE and TYPE are the mode and type of the current parameter.
2599 PRETEND_SIZE is a variable that should be set to the amount of stack
2600 that must be pushed by the prolog to pretend that our caller pushed
2603 Normally, this macro will push all remaining incoming registers on the
2604 stack and set PRETEND_SIZE to the length of the registers pushed. */
2607 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2608 CUMULATIVE_ARGS *cum;
2609 enum machine_mode mode;
2615 CUMULATIVE_ARGS next_cum;
2616 int reg_size = TARGET_32BIT ? 4 : 8;
2617 rtx save_area = NULL_RTX, mem;
2618 int first_reg_offset, set;
2622 fntype = TREE_TYPE (current_function_decl);
2623 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2624 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2625 != void_type_node));
2627 /* For varargs, we do not want to skip the dummy va_dcl argument.
2628 For stdargs, we do want to skip the last named argument. */
2631 function_arg_advance (&next_cum, mode, type, 1);
2633 if (DEFAULT_ABI == ABI_V4)
2635 /* Indicate to allocate space on the stack for varargs save area. */
2636 /* ??? Does this really have to be located at a magic spot on the
2637 stack, or can we allocate this with assign_stack_local instead. */
2638 cfun->machine->sysv_varargs_p = 1;
2640 save_area = plus_constant (virtual_stack_vars_rtx,
2641 - RS6000_VARARGS_SIZE);
2643 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2647 first_reg_offset = next_cum.words;
2648 save_area = virtual_incoming_args_rtx;
2649 cfun->machine->sysv_varargs_p = 0;
2651 if (MUST_PASS_IN_STACK (mode, type))
2652 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2655 set = get_varargs_alias_set ();
2656 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2658 mem = gen_rtx_MEM (BLKmode,
2659 plus_constant (save_area,
2660 first_reg_offset * reg_size)),
2661 set_mem_alias_set (mem, set);
2662 set_mem_align (mem, BITS_PER_WORD);
2665 (GP_ARG_MIN_REG + first_reg_offset, mem,
2666 GP_ARG_NUM_REG - first_reg_offset,
2667 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2669 /* ??? Does ABI_V4 need this at all? */
2670 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2673 /* Save FP registers if needed. */
2674 if (DEFAULT_ABI == ABI_V4
2675 && TARGET_HARD_FLOAT && ! no_rtl
2676 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2678 int fregno = next_cum.fregno;
2679 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2680 rtx lab = gen_label_rtx ();
2681 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2683 emit_jump_insn (gen_rtx_SET (VOIDmode,
2685 gen_rtx_IF_THEN_ELSE (VOIDmode,
2686 gen_rtx_NE (VOIDmode, cr1,
2688 gen_rtx_LABEL_REF (VOIDmode, lab),
2691 while (fregno <= FP_ARG_V4_MAX_REG)
2693 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2694 set_mem_alias_set (mem, set);
2695 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2704 /* Create the va_list data type. */
2707 rs6000_build_va_list ()
2709 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2711 /* For AIX, prefer 'char *' because that's what the system
2712 header files like. */
2713 if (DEFAULT_ABI != ABI_V4)
2714 return build_pointer_type (char_type_node);
2716 record = make_lang_type (RECORD_TYPE);
2717 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2719 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2720 unsigned_char_type_node);
2721 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2722 unsigned_char_type_node);
2723 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2725 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2728 DECL_FIELD_CONTEXT (f_gpr) = record;
2729 DECL_FIELD_CONTEXT (f_fpr) = record;
2730 DECL_FIELD_CONTEXT (f_ovf) = record;
2731 DECL_FIELD_CONTEXT (f_sav) = record;
2733 TREE_CHAIN (record) = type_decl;
2734 TYPE_NAME (record) = type_decl;
2735 TYPE_FIELDS (record) = f_gpr;
2736 TREE_CHAIN (f_gpr) = f_fpr;
2737 TREE_CHAIN (f_fpr) = f_ovf;
2738 TREE_CHAIN (f_ovf) = f_sav;
2740 layout_type (record);
2742 /* The correct type is an array type of one element. */
2743 return build_array_type (record, build_index_type (size_zero_node));
2746 /* Implement va_start. */
2749 rs6000_va_start (stdarg_p, valist, nextarg)
2754 HOST_WIDE_INT words, n_gpr, n_fpr;
2755 tree f_gpr, f_fpr, f_ovf, f_sav;
2756 tree gpr, fpr, ovf, sav, t;
2758 /* Only SVR4 needs something special. */
2759 if (DEFAULT_ABI != ABI_V4)
2761 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2765 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2766 f_fpr = TREE_CHAIN (f_gpr);
2767 f_ovf = TREE_CHAIN (f_fpr);
2768 f_sav = TREE_CHAIN (f_ovf);
2770 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2771 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2772 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2773 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2774 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2776 /* Count number of gp and fp argument registers used. */
2777 words = current_function_args_info.words;
2778 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
2779 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
2781 if (TARGET_DEBUG_ARG)
2783 fputs ("va_start: words = ", stderr);
2784 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
2785 fputs (", n_gpr = ", stderr);
2786 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
2787 fputs (", n_fpr = ", stderr);
2788 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
2789 putc ('\n', stderr);
2792 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
2793 TREE_SIDE_EFFECTS (t) = 1;
2794 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2796 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
2797 TREE_SIDE_EFFECTS (t) = 1;
2798 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2800 /* Find the overflow area. */
2801 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
2803 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
2804 build_int_2 (words * UNITS_PER_WORD, 0));
2805 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
2806 TREE_SIDE_EFFECTS (t) = 1;
2807 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2809 /* Find the register save area. */
2810 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
2811 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
2812 build_int_2 (-RS6000_VARARGS_SIZE, -1));
2813 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
2814 TREE_SIDE_EFFECTS (t) = 1;
2815 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2818 /* Implement va_arg. */
2821 rs6000_va_arg (valist, type)
2824 tree f_gpr, f_fpr, f_ovf, f_sav;
2825 tree gpr, fpr, ovf, sav, reg, t, u;
2826 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
2827 rtx lab_false, lab_over, addr_rtx, r;
2829 /* For AIX, the rule is that structures are passed left-aligned in
2830 their stack slot. However, GCC does not presently do this:
2831 structures which are the same size as integer types are passed
2832 right-aligned, as if they were in fact integers. This only
2833 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
2834 if (DEFAULT_ABI != ABI_V4)
2836 HOST_WIDE_INT align, rounded_size;
2837 enum machine_mode mode;
2840 /* Compute the rounded size of the type. */
2841 align = PARM_BOUNDARY / BITS_PER_UNIT;
2842 rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
2847 mode = TYPE_MODE (type);
2848 if (mode != BLKmode)
2851 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
2852 if (rounded_size > align)
2855 addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
2856 build_int_2 (rounded_size - adj, 0));
2859 addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2860 addr_rtx = copy_to_reg (addr_rtx);
2862 /* Compute new value for AP. */
2863 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
2864 build (PLUS_EXPR, TREE_TYPE (valist), valist,
2865 build_int_2 (rounded_size, 0)));
2866 TREE_SIDE_EFFECTS (t) = 1;
2867 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2872 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2873 f_fpr = TREE_CHAIN (f_gpr);
2874 f_ovf = TREE_CHAIN (f_fpr);
2875 f_sav = TREE_CHAIN (f_ovf);
2877 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2878 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2879 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2880 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2881 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2883 size = int_size_in_bytes (type);
2884 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2886 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
2888 /* Aggregates and long doubles are passed by reference. */
2894 size = rsize = UNITS_PER_WORD;
2896 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
2898 /* FP args go in FP registers, if present. */
2907 /* Otherwise into GP registers. */
2915 /* Pull the value out of the saved registers ... */
2917 lab_false = gen_label_rtx ();
2918 lab_over = gen_label_rtx ();
2919 addr_rtx = gen_reg_rtx (Pmode);
2921 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
2922 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
2925 /* Long long is aligned in the registers. */
2928 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
2929 build_int_2 (n_reg - 1, 0));
2930 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
2931 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
2932 TREE_SIDE_EFFECTS (u) = 1;
2933 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
2937 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
2941 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0));
2942 TREE_SIDE_EFFECTS (u) = 1;
2944 u = build1 (CONVERT_EXPR, integer_type_node, u);
2945 TREE_SIDE_EFFECTS (u) = 1;
2947 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
2948 TREE_SIDE_EFFECTS (u) = 1;
2950 t = build (PLUS_EXPR, ptr_type_node, t, u);
2951 TREE_SIDE_EFFECTS (t) = 1;
2953 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
2955 emit_move_insn (addr_rtx, r);
2957 emit_jump_insn (gen_jump (lab_over));
2959 emit_label (lab_false);
2961 /* ... otherwise out of the overflow area. */
2963 /* Make sure we don't find reg 7 for the next int arg. */
2966 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
2967 TREE_SIDE_EFFECTS (t) = 1;
2968 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2971 /* Care for on-stack alignment if needed. */
2976 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0));
2977 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1));
2981 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
2983 emit_move_insn (addr_rtx, r);
2985 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
2986 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
2987 TREE_SIDE_EFFECTS (t) = 1;
2988 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2990 emit_label (lab_over);
2994 r = gen_rtx_MEM (Pmode, addr_rtx);
2995 set_mem_alias_set (r, get_varargs_alias_set ());
2996 emit_move_insn (addr_rtx, r);
3004 #define def_builtin(MASK, NAME, TYPE, CODE) \
3006 if ((MASK) & target_flags) \
3007 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3010 struct builtin_description
3012 const unsigned int mask;
3013 const enum insn_code icode;
3014 const char *const name;
3015 const enum rs6000_builtins code;
3017 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc) */
3018 static const struct builtin_description bdesc_3arg[] =
3020 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3021 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3022 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3023 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3024 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3025 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3026 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3027 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3028 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3029 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3030 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3031 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3032 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3033 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3034 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3035 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3036 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3037 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3038 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3041 /* Simple binary operations: VECc = foo (VECa, VECb). */
3042 static const struct builtin_description bdesc_2arg[] =
3044 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3045 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3046 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3047 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3048 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3049 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3050 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3051 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3052 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3053 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3054 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3055 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3056 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3057 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3058 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3059 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3060 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3061 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3062 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3063 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3064 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3065 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3066 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3067 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3068 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3069 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3070 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3071 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3072 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3073 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3074 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3075 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3076 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3077 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3078 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3079 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3080 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3081 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3082 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3083 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3084 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3085 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3086 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3087 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3088 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3089 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3090 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3091 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3092 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3093 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3094 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3095 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3096 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3097 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3098 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3099 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3100 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3101 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3102 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3103 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3104 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3105 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3106 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3107 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3108 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3109 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3110 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3111 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3112 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3113 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3114 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3115 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3116 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3117 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3118 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3119 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3120 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3121 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3122 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3123 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3124 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3125 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3126 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3127 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3128 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3129 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3130 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3131 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3132 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3133 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3134 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3135 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3136 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3137 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3138 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3139 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3140 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3141 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3142 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3143 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3144 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3145 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3146 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3147 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3148 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3149 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3150 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3151 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3152 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3153 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3154 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3155 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3156 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3158 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3160 static const struct builtin_description bdesc_1arg[] =
3162 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3163 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3164 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3165 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3166 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3167 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3168 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3169 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3170 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3171 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3172 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3176 altivec_expand_unop_builtin (icode, arglist, target)
3177 enum insn_code icode;
3182 tree arg0 = TREE_VALUE (arglist);
3183 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3184 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3185 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3188 || GET_MODE (target) != tmode
3189 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3190 target = gen_reg_rtx (tmode);
3192 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3193 op0 = copy_to_mode_reg (mode0, op0);
3195 pat = GEN_FCN (icode) (target, op0);
3203 altivec_expand_binop_builtin (icode, arglist, target)
3204 enum insn_code icode;
3209 tree arg0 = TREE_VALUE (arglist);
3210 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3211 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3212 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3213 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3214 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3215 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3218 || GET_MODE (target) != tmode
3219 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3220 target = gen_reg_rtx (tmode);
3222 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3223 op0 = copy_to_mode_reg (mode0, op0);
3224 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3225 op1 = copy_to_mode_reg (mode1, op1);
3227 pat = GEN_FCN (icode) (target, op0, op1);
3235 altivec_expand_ternop_builtin (icode, arglist, target)
3236 enum insn_code icode;
3241 tree arg0 = TREE_VALUE (arglist);
3242 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3243 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3244 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3245 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3246 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3247 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3248 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3249 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3250 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3253 || GET_MODE (target) != tmode
3254 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3255 target = gen_reg_rtx (tmode);
3257 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3258 op0 = copy_to_mode_reg (mode0, op0);
3259 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3260 op1 = copy_to_mode_reg (mode1, op1);
3261 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3262 op2 = copy_to_mode_reg (mode2, op2);
3264 pat = GEN_FCN (icode) (target, op0, op1, op2);
3272 altivec_expand_builtin (exp, target)
3276 struct builtin_description *d;
3278 enum insn_code icode;
3279 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3280 tree arglist = TREE_OPERAND (exp, 1);
3283 enum machine_mode tmode, mode0, mode1;
3284 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3288 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3289 icode = CODE_FOR_altivec_lvx_16qi;
3290 arg0 = TREE_VALUE (arglist);
3291 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3292 tmode = insn_data[icode].operand[0].mode;
3293 mode0 = insn_data[icode].operand[1].mode;
3296 || GET_MODE (target) != tmode
3297 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3298 target = gen_reg_rtx (tmode);
3300 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3301 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3303 pat = GEN_FCN (icode) (target, op0);
3308 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3309 icode = CODE_FOR_altivec_lvx_8hi;
3310 arg0 = TREE_VALUE (arglist);
3311 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3312 tmode = insn_data[icode].operand[0].mode;
3313 mode0 = insn_data[icode].operand[1].mode;
3316 || GET_MODE (target) != tmode
3317 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3318 target = gen_reg_rtx (tmode);
3320 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3321 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3323 pat = GEN_FCN (icode) (target, op0);
3328 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3329 icode = CODE_FOR_altivec_lvx_4si;
3330 arg0 = TREE_VALUE (arglist);
3331 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3332 tmode = insn_data[icode].operand[0].mode;
3333 mode0 = insn_data[icode].operand[1].mode;
3336 || GET_MODE (target) != tmode
3337 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3338 target = gen_reg_rtx (tmode);
3340 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3341 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3343 pat = GEN_FCN (icode) (target, op0);
3348 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3349 icode = CODE_FOR_altivec_lvx_4sf;
3350 arg0 = TREE_VALUE (arglist);
3351 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3352 tmode = insn_data[icode].operand[0].mode;
3353 mode0 = insn_data[icode].operand[1].mode;
3356 || GET_MODE (target) != tmode
3357 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3358 target = gen_reg_rtx (tmode);
3360 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3361 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3363 pat = GEN_FCN (icode) (target, op0);
3369 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3370 icode = CODE_FOR_altivec_stvx_16qi;
3371 arg0 = TREE_VALUE (arglist);
3372 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3373 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3374 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3375 mode0 = insn_data[icode].operand[0].mode;
3376 mode1 = insn_data[icode].operand[1].mode;
3378 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3379 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3380 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3381 op1 = copy_to_mode_reg (mode1, op1);
3383 pat = GEN_FCN (icode) (op0, op1);
3388 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3389 icode = CODE_FOR_altivec_stvx_8hi;
3390 arg0 = TREE_VALUE (arglist);
3391 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3392 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3393 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3394 mode0 = insn_data[icode].operand[0].mode;
3395 mode1 = insn_data[icode].operand[1].mode;
3397 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3398 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3399 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3400 op1 = copy_to_mode_reg (mode1, op1);
3402 pat = GEN_FCN (icode) (op0, op1);
3407 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3408 icode = CODE_FOR_altivec_stvx_4si;
3409 arg0 = TREE_VALUE (arglist);
3410 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3411 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3412 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3413 mode0 = insn_data[icode].operand[0].mode;
3414 mode1 = insn_data[icode].operand[1].mode;
3416 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3417 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3418 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3419 op1 = copy_to_mode_reg (mode1, op1);
3421 pat = GEN_FCN (icode) (op0, op1);
3426 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3427 icode = CODE_FOR_altivec_stvx_4sf;
3428 arg0 = TREE_VALUE (arglist);
3429 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3430 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3431 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3432 mode0 = insn_data[icode].operand[0].mode;
3433 mode1 = insn_data[icode].operand[1].mode;
3435 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3436 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3437 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3438 op1 = copy_to_mode_reg (mode1, op1);
3440 pat = GEN_FCN (icode) (op0, op1);
3446 /* Handle simple unary operations. */
3447 d = (struct builtin_description *) bdesc_1arg;
3448 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
3449 if (d->code == fcode)
3450 return altivec_expand_unop_builtin (d->icode, arglist, target);
3452 /* Handle simple binary operations. */
3453 d = (struct builtin_description *) bdesc_2arg;
3454 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3455 if (d->code == fcode)
3456 return altivec_expand_binop_builtin (d->icode, arglist, target);
3458 /* Handle simple ternary operations. */
3459 d = (struct builtin_description *) bdesc_3arg;
3460 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
3461 if (d->code == fcode)
3462 return altivec_expand_ternop_builtin (d->icode, arglist, target);
3468 /* Expand an expression EXP that calls a built-in function,
3469 with result going to TARGET if that's convenient
3470 (and in mode MODE if that's convenient).
3471 SUBTARGET may be used as the target for computing one of EXP's operands.
3472 IGNORE is nonzero if the value is to be ignored. */
3475 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
3478 rtx subtarget ATTRIBUTE_UNUSED;
3479 enum machine_mode mode ATTRIBUTE_UNUSED;
3480 int ignore ATTRIBUTE_UNUSED;
3483 return altivec_expand_builtin (exp, target);
3489 rs6000_init_builtins ()
3492 altivec_init_builtins ();
3496 altivec_init_builtins (void)
3498 struct builtin_description * d;
3501 tree endlink = void_list_node;
3503 tree pint_type_node = build_pointer_type (integer_type_node);
3504 tree pshort_type_node = build_pointer_type (short_integer_type_node);
3505 tree pchar_type_node = build_pointer_type (char_type_node);
3506 tree pfloat_type_node = build_pointer_type (float_type_node);
3507 tree v4sf_ftype_v4sf_v4sf_v16qi
3508 = build_function_type (V4SF_type_node,
3509 tree_cons (NULL_TREE, V4SF_type_node,
3510 tree_cons (NULL_TREE, V4SF_type_node,
3511 tree_cons (NULL_TREE,
3514 tree v4si_ftype_v4si_v4si_v16qi
3515 = build_function_type (V4SI_type_node,
3516 tree_cons (NULL_TREE, V4SI_type_node,
3517 tree_cons (NULL_TREE, V4SI_type_node,
3518 tree_cons (NULL_TREE,
3521 tree v8hi_ftype_v8hi_v8hi_v16qi
3522 = build_function_type (V8HI_type_node,
3523 tree_cons (NULL_TREE, V8HI_type_node,
3524 tree_cons (NULL_TREE, V8HI_type_node,
3525 tree_cons (NULL_TREE,
3528 tree v16qi_ftype_v16qi_v16qi_v16qi
3529 = build_function_type (V16QI_type_node,
3530 tree_cons (NULL_TREE, V16QI_type_node,
3531 tree_cons (NULL_TREE, V16QI_type_node,
3532 tree_cons (NULL_TREE,
3536 /* V4SI foo (char) */
3537 tree v4si_ftype_char
3538 = build_function_type (V4SI_type_node,
3539 tree_cons (NULL_TREE, char_type_node, endlink));
3541 /* V8HI foo (char) */
3542 tree v8hi_ftype_char
3543 = build_function_type (V8HI_type_node,
3544 tree_cons (NULL_TREE, char_type_node, endlink));
3546 /* V16QI foo (char) */
3547 tree v16qi_ftype_char
3548 = build_function_type (V16QI_type_node,
3549 tree_cons (NULL_TREE, char_type_node, endlink));
3550 /* V4SF foo (V4SF) */
3551 tree v4sf_ftype_v4sf
3552 = build_function_type (V4SF_type_node,
3553 tree_cons (NULL_TREE, V4SF_type_node, endlink));
3555 /* V4SI foo (int *). */
3556 tree v4si_ftype_pint
3557 = build_function_type (V4SI_type_node,
3558 tree_cons (NULL_TREE, pint_type_node, endlink));
3559 /* V8HI foo (short *). */
3560 tree v8hi_ftype_pshort
3561 = build_function_type (V8HI_type_node,
3562 tree_cons (NULL_TREE, pshort_type_node, endlink));
3563 /* V16QI foo (char *). */
3564 tree v16qi_ftype_pchar
3565 = build_function_type (V16QI_type_node,
3566 tree_cons (NULL_TREE, pchar_type_node, endlink));
3567 /* V4SF foo (float *). */
3568 tree v4sf_ftype_pfloat
3569 = build_function_type (V4SF_type_node,
3570 tree_cons (NULL_TREE, pfloat_type_node, endlink));
3572 /* void foo (int *, V4SI). */
3573 tree void_ftype_pint_v4si
3574 = build_function_type (void_type_node,
3575 tree_cons (NULL_TREE, pint_type_node,
3576 tree_cons (NULL_TREE, V4SI_type_node,
3578 /* void foo (short *, V8HI). */
3579 tree void_ftype_pshort_v8hi
3580 = build_function_type (void_type_node,
3581 tree_cons (NULL_TREE, pshort_type_node,
3582 tree_cons (NULL_TREE, V8HI_type_node,
3584 /* void foo (char *, V16QI). */
3585 tree void_ftype_pchar_v16qi
3586 = build_function_type (void_type_node,
3587 tree_cons (NULL_TREE, pchar_type_node,
3588 tree_cons (NULL_TREE, V16QI_type_node,
3590 /* void foo (float *, V4SF). */
3591 tree void_ftype_pfloat_v4sf
3592 = build_function_type (void_type_node,
3593 tree_cons (NULL_TREE, pfloat_type_node,
3594 tree_cons (NULL_TREE, V4SF_type_node,
3597 tree v4si_ftype_v4si_v4si
3598 = build_function_type (V4SI_type_node,
3599 tree_cons (NULL_TREE, V4SI_type_node,
3600 tree_cons (NULL_TREE, V4SI_type_node,
3602 /* These are really for the unsigned 5 bit literals */
3603 tree v4sf_ftype_v4si_char
3604 = build_function_type (V4SF_type_node,
3605 tree_cons (NULL_TREE, V4SI_type_node,
3606 tree_cons (NULL_TREE, char_type_node,
3608 tree v4si_ftype_v4sf_char
3609 = build_function_type (V4SI_type_node,
3610 tree_cons (NULL_TREE, V4SF_type_node,
3611 tree_cons (NULL_TREE, char_type_node,
3613 tree v4si_ftype_v4si_char
3614 = build_function_type (V4SI_type_node,
3615 tree_cons (NULL_TREE, V4SI_type_node,
3616 tree_cons (NULL_TREE, char_type_node,
3618 tree v8hi_ftype_v8hi_char
3619 = build_function_type (V8HI_type_node,
3620 tree_cons (NULL_TREE, V8HI_type_node,
3621 tree_cons (NULL_TREE, char_type_node,
3623 tree v16qi_ftype_v16qi_char
3624 = build_function_type (V16QI_type_node,
3625 tree_cons (NULL_TREE, V16QI_type_node,
3626 tree_cons (NULL_TREE, char_type_node,
3629 tree v4sf_ftype_v4sf_v4sf
3630 = build_function_type (V4SF_type_node,
3631 tree_cons (NULL_TREE, V4SF_type_node,
3632 tree_cons (NULL_TREE, V4SF_type_node,
3634 tree v4sf_ftype_v4sf_v4sf_v4si
3635 = build_function_type (V4SF_type_node,
3636 tree_cons (NULL_TREE, V4SF_type_node,
3637 tree_cons (NULL_TREE, V4SF_type_node,
3638 tree_cons (NULL_TREE,
3641 tree v4sf_ftype_v4sf_v4sf_v4sf
3642 = build_function_type (V4SF_type_node,
3643 tree_cons (NULL_TREE, V4SF_type_node,
3644 tree_cons (NULL_TREE, V4SF_type_node,
3645 tree_cons (NULL_TREE,
3648 tree v4si_ftype_v4si_v4si_v4si
3649 = build_function_type (V4SI_type_node,
3650 tree_cons (NULL_TREE, V4SI_type_node,
3651 tree_cons (NULL_TREE, V4SI_type_node,
3652 tree_cons (NULL_TREE,
3656 tree v8hi_ftype_v8hi_v8hi
3657 = build_function_type (V8HI_type_node,
3658 tree_cons (NULL_TREE, V8HI_type_node,
3659 tree_cons (NULL_TREE, V8HI_type_node,
3661 tree v8hi_ftype_v8hi_v8hi_v8hi
3662 = build_function_type (V8HI_type_node,
3663 tree_cons (NULL_TREE, V8HI_type_node,
3664 tree_cons (NULL_TREE, V8HI_type_node,
3665 tree_cons (NULL_TREE,
3668 tree v4si_ftype_v8hi_v8hi_v4si
3669 = build_function_type (V4SI_type_node,
3670 tree_cons (NULL_TREE, V8HI_type_node,
3671 tree_cons (NULL_TREE, V8HI_type_node,
3672 tree_cons (NULL_TREE,
3675 tree v4si_ftype_v16qi_v16qi_v4si
3676 = build_function_type (V4SI_type_node,
3677 tree_cons (NULL_TREE, V16QI_type_node,
3678 tree_cons (NULL_TREE, V16QI_type_node,
3679 tree_cons (NULL_TREE,
3683 tree v16qi_ftype_v16qi_v16qi
3684 = build_function_type (V16QI_type_node,
3685 tree_cons (NULL_TREE, V16QI_type_node,
3686 tree_cons (NULL_TREE, V16QI_type_node,
3689 tree v4si_ftype_v4sf_v4sf
3690 = build_function_type (V4SI_type_node,
3691 tree_cons (NULL_TREE, V4SF_type_node,
3692 tree_cons (NULL_TREE, V4SF_type_node,
3695 tree v8hi_ftype_v16qi_v16qi
3696 = build_function_type (V8HI_type_node,
3697 tree_cons (NULL_TREE, V16QI_type_node,
3698 tree_cons (NULL_TREE, V16QI_type_node,
3701 tree v4si_ftype_v8hi_v8hi
3702 = build_function_type (V4SI_type_node,
3703 tree_cons (NULL_TREE, V8HI_type_node,
3704 tree_cons (NULL_TREE, V8HI_type_node,
3707 tree v8hi_ftype_v4si_v4si
3708 = build_function_type (V8HI_type_node,
3709 tree_cons (NULL_TREE, V4SI_type_node,
3710 tree_cons (NULL_TREE, V4SI_type_node,
3713 tree v16qi_ftype_v8hi_v8hi
3714 = build_function_type (V16QI_type_node,
3715 tree_cons (NULL_TREE, V8HI_type_node,
3716 tree_cons (NULL_TREE, V8HI_type_node,
3719 tree v4si_ftype_v16qi_v4si
3720 = build_function_type (V4SI_type_node,
3721 tree_cons (NULL_TREE, V16QI_type_node,
3722 tree_cons (NULL_TREE, V4SI_type_node,
3725 tree v4si_ftype_v8hi_v4si
3726 = build_function_type (V4SI_type_node,
3727 tree_cons (NULL_TREE, V8HI_type_node,
3728 tree_cons (NULL_TREE, V4SI_type_node,
3731 tree int_ftype_v4si_v4si
3732 = build_function_type (integer_type_node,
3733 tree_cons (NULL_TREE, V4SI_type_node,
3734 tree_cons (NULL_TREE, V4SI_type_node,
3737 tree int_ftype_v4sf_v4sf
3738 = build_function_type (integer_type_node,
3739 tree_cons (NULL_TREE, V4SF_type_node,
3740 tree_cons (NULL_TREE, V4SF_type_node,
3743 tree int_ftype_v16qi_v16qi
3744 = build_function_type (integer_type_node,
3745 tree_cons (NULL_TREE, V16QI_type_node,
3746 tree_cons (NULL_TREE, V16QI_type_node,
3749 tree int_ftype_v8hi_v8hi
3750 = build_function_type (integer_type_node,
3751 tree_cons (NULL_TREE, V8HI_type_node,
3752 tree_cons (NULL_TREE, V8HI_type_node,
3755 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
3756 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
3757 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
3758 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
3759 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
3760 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
3761 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
3762 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
3764 /* Add the simple ternary operators. */
3765 d = (struct builtin_description *) bdesc_3arg;
3766 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
3769 enum machine_mode mode0, mode1, mode2, mode3;
3775 mode0 = insn_data[d->icode].operand[0].mode;
3776 mode1 = insn_data[d->icode].operand[1].mode;
3777 mode2 = insn_data[d->icode].operand[2].mode;
3778 mode3 = insn_data[d->icode].operand[3].mode;
3780 /* When all four are of the same mode. */
3781 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
3786 type = v4si_ftype_v4si_v4si_v4si;
3789 type = v4sf_ftype_v4sf_v4sf_v4sf;
3792 type = v8hi_ftype_v8hi_v8hi_v8hi;
3795 type = v16qi_ftype_v16qi_v16qi_v16qi;
3801 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
3806 type = v4si_ftype_v4si_v4si_v16qi;
3809 type = v4sf_ftype_v4sf_v4sf_v16qi;
3812 type = v8hi_ftype_v8hi_v8hi_v16qi;
3815 type = v16qi_ftype_v16qi_v16qi_v16qi;
3821 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
3822 && mode3 == V4SImode)
3823 type = v4si_ftype_v16qi_v16qi_v4si;
3824 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
3825 && mode3 == V4SImode)
3826 type = v4si_ftype_v8hi_v8hi_v4si;
3827 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
3828 && mode3 == V4SImode)
3829 type = v4sf_ftype_v4sf_v4sf_v4si;
3833 def_builtin (d->mask, d->name, type, d->code);
3836 /* Add the simple binary operators. */
3837 d = (struct builtin_description *) bdesc_2arg;
3838 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3840 enum machine_mode mode0, mode1, mode2;
3846 mode0 = insn_data[d->icode].operand[0].mode;
3847 mode1 = insn_data[d->icode].operand[1].mode;
3848 mode2 = insn_data[d->icode].operand[2].mode;
3850 /* When all three operands are of the same mode. */
3851 if (mode0 == mode1 && mode1 == mode2)
3856 type = v4sf_ftype_v4sf_v4sf;
3859 type = v4si_ftype_v4si_v4si;
3862 type = v16qi_ftype_v16qi_v16qi;
3865 type = v8hi_ftype_v8hi_v8hi;
3872 /* A few other combos we really don't want to do manually. */
3874 /* vint, vfloat, vfloat. */
3875 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
3876 type = v4si_ftype_v4sf_v4sf;
3878 /* vshort, vchar, vchar. */
3879 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
3880 type = v8hi_ftype_v16qi_v16qi;
3882 /* vint, vshort, vshort. */
3883 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
3884 type = v4si_ftype_v8hi_v8hi;
3886 /* vshort, vint, vint. */
3887 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
3888 type = v8hi_ftype_v4si_v4si;
3890 /* vchar, vshort, vshort. */
3891 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
3892 type = v16qi_ftype_v8hi_v8hi;
3894 /* vint, vchar, vint. */
3895 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
3896 type = v4si_ftype_v16qi_v4si;
3898 /* vint, vshort, vint. */
3899 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
3900 type = v4si_ftype_v8hi_v4si;
3902 /* vint, vint, 5 bit literal. */
3903 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
3904 type = v4si_ftype_v4si_char;
3906 /* vshort, vshort, 5 bit literal. */
3907 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
3908 type = v8hi_ftype_v8hi_char;
3910 /* vchar, vchar, 5 bit literal. */
3911 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
3912 type = v16qi_ftype_v16qi_char;
3914 /* vfloat, vint, 5 bit literal. */
3915 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
3916 type = v4sf_ftype_v4si_char;
3918 /* vint, vfloat, 5 bit literal. */
3919 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
3920 type = v4si_ftype_v4sf_char;
3924 else if (mode0 == SImode)
3929 type = int_ftype_v4si_v4si;
3932 type = int_ftype_v4sf_v4sf;
3935 type = int_ftype_v16qi_v16qi;
3938 type = int_ftype_v8hi_v8hi;
3948 def_builtin (d->mask, d->name, type, d->code);
3950 /* Add the simple unary operators. */
3951 d = (struct builtin_description *) bdesc_1arg;
3952 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
3954 enum machine_mode mode0, mode1;
3960 mode0 = insn_data[d->icode].operand[0].mode;
3961 mode1 = insn_data[d->icode].operand[1].mode;
3963 if (mode0 == V4SImode && mode1 == QImode)
3964 type = v4si_ftype_char;
3965 else if (mode0 == V8HImode && mode1 == QImode)
3966 type = v8hi_ftype_char;
3967 else if (mode0 == V16QImode && mode1 == QImode)
3968 type = v16qi_ftype_char;
3969 else if (mode0 == V4SFmode && mode1 == V4SFmode)
3970 type = v4sf_ftype_v4sf;
3974 def_builtin (d->mask, d->name, type, d->code);
3979 /* Generate a memory reference for expand_block_move, copying volatile,
3980 and other bits from an original memory reference. */
3983 expand_block_move_mem (mode, addr, orig_mem)
3984 enum machine_mode mode;
3988 rtx mem = gen_rtx_MEM (mode, addr);
3990 MEM_COPY_ATTRIBUTES (mem, orig_mem);
3994 /* Expand a block move operation, and return 1 if successful. Return 0
3995 if we should let the compiler generate normal code.
3997 operands[0] is the destination
3998 operands[1] is the source
3999 operands[2] is the length
4000 operands[3] is the alignment */
4002 #define MAX_MOVE_REG 4
4005 expand_block_move (operands)
4008 rtx orig_dest = operands[0];
4009 rtx orig_src = operands[1];
4010 rtx bytes_rtx = operands[2];
4011 rtx align_rtx = operands[3];
4012 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4023 rtx stores[MAX_MOVE_REG];
4026 /* If this is not a fixed size move, just call memcpy */
4030 /* If this is not a fixed size alignment, abort */
4031 if (GET_CODE (align_rtx) != CONST_INT)
4033 align = INTVAL (align_rtx);
4035 /* Anything to move? */
4036 bytes = INTVAL (bytes_rtx);
4040 /* Don't support real large moves. If string instructions are not used,
4041 then don't generate more than 8 loads. */
4047 else if (! STRICT_ALIGNMENT)
4049 if (TARGET_POWERPC64 && align >= 4)
4058 else if (bytes > 8*align)
4061 /* Move the address into scratch registers. */
4062 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4063 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4065 if (TARGET_STRING) /* string instructions are available */
4067 for ( ; bytes > 0; bytes -= move_bytes)
4069 if (bytes > 24 /* move up to 32 bytes at a time */
4077 && ! fixed_regs[12])
4079 move_bytes = (bytes > 32) ? 32 : bytes;
4080 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4083 expand_block_move_mem (BLKmode,
4086 GEN_INT ((move_bytes == 32)
4090 else if (bytes > 16 /* move up to 24 bytes at a time */
4096 && ! fixed_regs[10])
4098 move_bytes = (bytes > 24) ? 24 : bytes;
4099 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4102 expand_block_move_mem (BLKmode,
4105 GEN_INT (move_bytes),
4108 else if (bytes > 8 /* move up to 16 bytes at a time */
4114 move_bytes = (bytes > 16) ? 16 : bytes;
4115 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
4118 expand_block_move_mem (BLKmode,
4121 GEN_INT (move_bytes),
4124 else if (bytes >= 8 && TARGET_POWERPC64
4125 /* 64-bit loads and stores require word-aligned
4127 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4130 tmp_reg = gen_reg_rtx (DImode);
4131 emit_move_insn (tmp_reg,
4132 expand_block_move_mem (DImode,
4133 src_reg, orig_src));
4134 emit_move_insn (expand_block_move_mem (DImode,
4135 dest_reg, orig_dest),
4138 else if (bytes > 4 && !TARGET_POWERPC64)
4139 { /* move up to 8 bytes at a time */
4140 move_bytes = (bytes > 8) ? 8 : bytes;
4141 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
4144 expand_block_move_mem (BLKmode,
4147 GEN_INT (move_bytes),
4150 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4151 { /* move 4 bytes */
4153 tmp_reg = gen_reg_rtx (SImode);
4154 emit_move_insn (tmp_reg,
4155 expand_block_move_mem (SImode,
4156 src_reg, orig_src));
4157 emit_move_insn (expand_block_move_mem (SImode,
4158 dest_reg, orig_dest),
4161 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4162 { /* move 2 bytes */
4164 tmp_reg = gen_reg_rtx (HImode);
4165 emit_move_insn (tmp_reg,
4166 expand_block_move_mem (HImode,
4167 src_reg, orig_src));
4168 emit_move_insn (expand_block_move_mem (HImode,
4169 dest_reg, orig_dest),
4172 else if (bytes == 1) /* move 1 byte */
4175 tmp_reg = gen_reg_rtx (QImode);
4176 emit_move_insn (tmp_reg,
4177 expand_block_move_mem (QImode,
4178 src_reg, orig_src));
4179 emit_move_insn (expand_block_move_mem (QImode,
4180 dest_reg, orig_dest),
4184 { /* move up to 4 bytes at a time */
4185 move_bytes = (bytes > 4) ? 4 : bytes;
4186 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
4189 expand_block_move_mem (BLKmode,
4192 GEN_INT (move_bytes),
4196 if (bytes > move_bytes)
4198 if (! TARGET_POWERPC64)
4200 emit_insn (gen_addsi3 (src_reg, src_reg,
4201 GEN_INT (move_bytes)));
4202 emit_insn (gen_addsi3 (dest_reg, dest_reg,
4203 GEN_INT (move_bytes)));
4207 emit_insn (gen_adddi3 (src_reg, src_reg,
4208 GEN_INT (move_bytes)));
4209 emit_insn (gen_adddi3 (dest_reg, dest_reg,
4210 GEN_INT (move_bytes)));
4216 else /* string instructions not available */
4218 num_reg = offset = 0;
4219 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
4221 /* Calculate the correct offset for src/dest */
4225 dest_addr = dest_reg;
4229 src_addr = plus_constant (src_reg, offset);
4230 dest_addr = plus_constant (dest_reg, offset);
4233 /* Generate the appropriate load and store, saving the stores
4235 if (bytes >= 8 && TARGET_POWERPC64
4236 /* 64-bit loads and stores require word-aligned
4238 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4241 tmp_reg = gen_reg_rtx (DImode);
4242 emit_insn (gen_movdi (tmp_reg,
4243 expand_block_move_mem (DImode,
4246 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
4251 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4254 tmp_reg = gen_reg_rtx (SImode);
4255 emit_insn (gen_movsi (tmp_reg,
4256 expand_block_move_mem (SImode,
4259 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
4264 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4267 tmp_reg = gen_reg_rtx (HImode);
4268 emit_insn (gen_movhi (tmp_reg,
4269 expand_block_move_mem (HImode,
4272 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
4280 tmp_reg = gen_reg_rtx (QImode);
4281 emit_insn (gen_movqi (tmp_reg,
4282 expand_block_move_mem (QImode,
4285 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
4291 if (num_reg >= MAX_MOVE_REG)
4293 for (i = 0; i < num_reg; i++)
4294 emit_insn (stores[i]);
4299 for (i = 0; i < num_reg; i++)
4300 emit_insn (stores[i]);
4307 /* Return 1 if OP is a load multiple operation. It is known to be a
4308 PARALLEL and the first section will be tested. */
4311 load_multiple_operation (op, mode)
4313 enum machine_mode mode ATTRIBUTE_UNUSED;
4315 int count = XVECLEN (op, 0);
4316 unsigned int dest_regno;
4320 /* Perform a quick check so we don't blow up below. */
4322 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4323 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4324 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
4327 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4328 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
4330 for (i = 1; i < count; i++)
4332 rtx elt = XVECEXP (op, 0, i);
4334 if (GET_CODE (elt) != SET
4335 || GET_CODE (SET_DEST (elt)) != REG
4336 || GET_MODE (SET_DEST (elt)) != SImode
4337 || REGNO (SET_DEST (elt)) != dest_regno + i
4338 || GET_CODE (SET_SRC (elt)) != MEM
4339 || GET_MODE (SET_SRC (elt)) != SImode
4340 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4341 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4342 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4343 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
4350 /* Similar, but tests for store multiple. Here, the second vector element
4351 is a CLOBBER. It will be tested later. */
4354 store_multiple_operation (op, mode)
4356 enum machine_mode mode ATTRIBUTE_UNUSED;
4358 int count = XVECLEN (op, 0) - 1;
4359 unsigned int src_regno;
4363 /* Perform a quick check so we don't blow up below. */
4365 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4366 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
4367 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
4370 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4371 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
4373 for (i = 1; i < count; i++)
4375 rtx elt = XVECEXP (op, 0, i + 1);
4377 if (GET_CODE (elt) != SET
4378 || GET_CODE (SET_SRC (elt)) != REG
4379 || GET_MODE (SET_SRC (elt)) != SImode
4380 || REGNO (SET_SRC (elt)) != src_regno + i
4381 || GET_CODE (SET_DEST (elt)) != MEM
4382 || GET_MODE (SET_DEST (elt)) != SImode
4383 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4384 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4385 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4386 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
4393 /* Return 1 for a parallel vrsave operation. */
4396 vrsave_operation (op, mode)
4398 enum machine_mode mode ATTRIBUTE_UNUSED;
4400 int count = XVECLEN (op, 0);
4401 unsigned int dest_regno, src_regno;
4405 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4406 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4407 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4410 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4411 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4413 if (dest_regno != VRSAVE_REGNO
4414 && src_regno != VRSAVE_REGNO)
4417 for (i = 1; i < count; i++)
4419 rtx elt = XVECEXP (op, 0, i);
4421 if (GET_CODE (elt) != CLOBBER)
4428 /* Return 1 for an PARALLEL suitable for mtcrf. */
4431 mtcrf_operation (op, mode)
4433 enum machine_mode mode ATTRIBUTE_UNUSED;
4435 int count = XVECLEN (op, 0);
4439 /* Perform a quick check so we don't blow up below. */
4441 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4442 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
4443 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
4445 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
4447 if (GET_CODE (src_reg) != REG
4448 || GET_MODE (src_reg) != SImode
4449 || ! INT_REGNO_P (REGNO (src_reg)))
4452 for (i = 0; i < count; i++)
4454 rtx exp = XVECEXP (op, 0, i);
4458 if (GET_CODE (exp) != SET
4459 || GET_CODE (SET_DEST (exp)) != REG
4460 || GET_MODE (SET_DEST (exp)) != CCmode
4461 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
4463 unspec = SET_SRC (exp);
4464 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
4466 if (GET_CODE (unspec) != UNSPEC
4467 || XINT (unspec, 1) != 20
4468 || XVECLEN (unspec, 0) != 2
4469 || XVECEXP (unspec, 0, 0) != src_reg
4470 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
4471 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
4477 /* Return 1 for an PARALLEL suitable for lmw. */
4480 lmw_operation (op, mode)
4482 enum machine_mode mode ATTRIBUTE_UNUSED;
4484 int count = XVECLEN (op, 0);
4485 unsigned int dest_regno;
4487 unsigned int base_regno;
4488 HOST_WIDE_INT offset;
4491 /* Perform a quick check so we don't blow up below. */
4493 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4494 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4495 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
4498 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4499 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
4502 || count != 32 - (int) dest_regno)
4505 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
4508 base_regno = REGNO (src_addr);
4509 if (base_regno == 0)
4512 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
4514 offset = INTVAL (XEXP (src_addr, 1));
4515 base_regno = REGNO (XEXP (src_addr, 0));
4520 for (i = 0; i < count; i++)
4522 rtx elt = XVECEXP (op, 0, i);
4525 HOST_WIDE_INT newoffset;
4527 if (GET_CODE (elt) != SET
4528 || GET_CODE (SET_DEST (elt)) != REG
4529 || GET_MODE (SET_DEST (elt)) != SImode
4530 || REGNO (SET_DEST (elt)) != dest_regno + i
4531 || GET_CODE (SET_SRC (elt)) != MEM
4532 || GET_MODE (SET_SRC (elt)) != SImode)
4534 newaddr = XEXP (SET_SRC (elt), 0);
4535 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
4540 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
4542 addr_reg = XEXP (newaddr, 0);
4543 newoffset = INTVAL (XEXP (newaddr, 1));
4547 if (REGNO (addr_reg) != base_regno
4548 || newoffset != offset + 4 * i)
4555 /* Return 1 for an PARALLEL suitable for stmw. */
4558 stmw_operation (op, mode)
4560 enum machine_mode mode ATTRIBUTE_UNUSED;
4562 int count = XVECLEN (op, 0);
4563 unsigned int src_regno;
4565 unsigned int base_regno;
4566 HOST_WIDE_INT offset;
4569 /* Perform a quick check so we don't blow up below. */
4571 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4572 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
4573 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
4576 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4577 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
4580 || count != 32 - (int) src_regno)
4583 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
4586 base_regno = REGNO (dest_addr);
4587 if (base_regno == 0)
4590 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
4592 offset = INTVAL (XEXP (dest_addr, 1));
4593 base_regno = REGNO (XEXP (dest_addr, 0));
4598 for (i = 0; i < count; i++)
4600 rtx elt = XVECEXP (op, 0, i);
4603 HOST_WIDE_INT newoffset;
4605 if (GET_CODE (elt) != SET
4606 || GET_CODE (SET_SRC (elt)) != REG
4607 || GET_MODE (SET_SRC (elt)) != SImode
4608 || REGNO (SET_SRC (elt)) != src_regno + i
4609 || GET_CODE (SET_DEST (elt)) != MEM
4610 || GET_MODE (SET_DEST (elt)) != SImode)
4612 newaddr = XEXP (SET_DEST (elt), 0);
4613 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
4618 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
4620 addr_reg = XEXP (newaddr, 0);
4621 newoffset = INTVAL (XEXP (newaddr, 1));
4625 if (REGNO (addr_reg) != base_regno
4626 || newoffset != offset + 4 * i)
4633 /* A validation routine: say whether CODE, a condition code, and MODE
4634 match. The other alternatives either don't make sense or should
4635 never be generated. */
4638 validate_condition_mode (code, mode)
4640 enum machine_mode mode;
4642 if (GET_RTX_CLASS (code) != '<'
4643 || GET_MODE_CLASS (mode) != MODE_CC)
4646 /* These don't make sense. */
4647 if ((code == GT || code == LT || code == GE || code == LE)
4648 && mode == CCUNSmode)
4651 if ((code == GTU || code == LTU || code == GEU || code == LEU)
4652 && mode != CCUNSmode)
4655 if (mode != CCFPmode
4656 && (code == ORDERED || code == UNORDERED
4657 || code == UNEQ || code == LTGT
4658 || code == UNGT || code == UNLT
4659 || code == UNGE || code == UNLE))
4662 /* These should never be generated except for
4663 flag_unsafe_math_optimizations. */
4664 if (mode == CCFPmode
4665 && ! flag_unsafe_math_optimizations
4666 && (code == LE || code == GE
4667 || code == UNEQ || code == LTGT
4668 || code == UNGT || code == UNLT))
4671 /* These are invalid; the information is not there. */
4672 if (mode == CCEQmode
4673 && code != EQ && code != NE)
4677 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
4678 We only check the opcode against the mode of the CC value here. */
4681 branch_comparison_operator (op, mode)
4683 enum machine_mode mode ATTRIBUTE_UNUSED;
4685 enum rtx_code code = GET_CODE (op);
4686 enum machine_mode cc_mode;
4688 if (GET_RTX_CLASS (code) != '<')
4691 cc_mode = GET_MODE (XEXP (op, 0));
4692 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
4695 validate_condition_mode (code, cc_mode);
4700 /* Return 1 if OP is a comparison operation that is valid for a branch
4701 insn and which is true if the corresponding bit in the CC register
4705 branch_positive_comparison_operator (op, mode)
4707 enum machine_mode mode;
4711 if (! branch_comparison_operator (op, mode))
4714 code = GET_CODE (op);
4715 return (code == EQ || code == LT || code == GT
4716 || code == LTU || code == GTU
4717 || code == UNORDERED);
4720 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
4721 We check the opcode against the mode of the CC value and disallow EQ or
4722 NE comparisons for integers. */
4725 scc_comparison_operator (op, mode)
4727 enum machine_mode mode;
4729 enum rtx_code code = GET_CODE (op);
4730 enum machine_mode cc_mode;
4732 if (GET_MODE (op) != mode && mode != VOIDmode)
4735 if (GET_RTX_CLASS (code) != '<')
4738 cc_mode = GET_MODE (XEXP (op, 0));
4739 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
4742 validate_condition_mode (code, cc_mode);
4744 if (code == NE && cc_mode != CCFPmode)
4751 trap_comparison_operator (op, mode)
4753 enum machine_mode mode;
4755 if (mode != VOIDmode && mode != GET_MODE (op))
4757 return GET_RTX_CLASS (GET_CODE (op)) == '<';
4761 boolean_operator (op, mode)
4763 enum machine_mode mode ATTRIBUTE_UNUSED;
4765 enum rtx_code code = GET_CODE (op);
4766 return (code == AND || code == IOR || code == XOR);
4770 boolean_or_operator (op, mode)
4772 enum machine_mode mode ATTRIBUTE_UNUSED;
4774 enum rtx_code code = GET_CODE (op);
4775 return (code == IOR || code == XOR);
4779 min_max_operator (op, mode)
4781 enum machine_mode mode ATTRIBUTE_UNUSED;
4783 enum rtx_code code = GET_CODE (op);
4784 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
4787 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
4788 mask required to convert the result of a rotate insn into a shift
4789 left insn of SHIFTOP bits. Both are known to be CONST_INT. */
4792 includes_lshift_p (shiftop, andop)
4796 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
4798 shift_mask <<= INTVAL (shiftop);
4800 return (INTVAL (andop) & ~shift_mask) == 0;
4803 /* Similar, but for right shift. */
4806 includes_rshift_p (shiftop, andop)
4810 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
4812 shift_mask >>= INTVAL (shiftop);
4814 return (INTVAL (andop) & ~shift_mask) == 0;
4817 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
4818 to perform a left shift. It must have exactly SHIFTOP least
4819 signifigant 0's, then one or more 1's, then zero or more 0's. */
4822 includes_rldic_lshift_p (shiftop, andop)
4826 if (GET_CODE (andop) == CONST_INT)
4828 HOST_WIDE_INT c, lsb, shift_mask;
4831 if (c == 0 || c == ~0)
4835 shift_mask <<= INTVAL (shiftop);
4837 /* Find the least signifigant one bit. */
4840 /* It must coincide with the LSB of the shift mask. */
4841 if (-lsb != shift_mask)
4844 /* Invert to look for the next transition (if any). */
4847 /* Remove the low group of ones (originally low group of zeros). */
4850 /* Again find the lsb, and check we have all 1's above. */
4854 else if (GET_CODE (andop) == CONST_DOUBLE
4855 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
4857 HOST_WIDE_INT low, high, lsb;
4858 HOST_WIDE_INT shift_mask_low, shift_mask_high;
4860 low = CONST_DOUBLE_LOW (andop);
4861 if (HOST_BITS_PER_WIDE_INT < 64)
4862 high = CONST_DOUBLE_HIGH (andop);
4864 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
4865 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
4868 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
4870 shift_mask_high = ~0;
4871 if (INTVAL (shiftop) > 32)
4872 shift_mask_high <<= INTVAL (shiftop) - 32;
4876 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
4883 return high == -lsb;
4886 shift_mask_low = ~0;
4887 shift_mask_low <<= INTVAL (shiftop);
4891 if (-lsb != shift_mask_low)
4894 if (HOST_BITS_PER_WIDE_INT < 64)
4899 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
4902 return high == -lsb;
4906 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
4912 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
4913 to perform a left shift. It must have SHIFTOP or more least
4914 signifigant 0's, with the remainder of the word 1's. */
4917 includes_rldicr_lshift_p (shiftop, andop)
4921 if (GET_CODE (andop) == CONST_INT)
4923 HOST_WIDE_INT c, lsb, shift_mask;
4926 shift_mask <<= INTVAL (shiftop);
4929 /* Find the least signifigant one bit. */
4932 /* It must be covered by the shift mask.
4933 This test also rejects c == 0. */
4934 if ((lsb & shift_mask) == 0)
4937 /* Check we have all 1's above the transition, and reject all 1's. */
4938 return c == -lsb && lsb != 1;
4940 else if (GET_CODE (andop) == CONST_DOUBLE
4941 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
4943 HOST_WIDE_INT low, lsb, shift_mask_low;
4945 low = CONST_DOUBLE_LOW (andop);
4947 if (HOST_BITS_PER_WIDE_INT < 64)
4949 HOST_WIDE_INT high, shift_mask_high;
4951 high = CONST_DOUBLE_HIGH (andop);
4955 shift_mask_high = ~0;
4956 if (INTVAL (shiftop) > 32)
4957 shift_mask_high <<= INTVAL (shiftop) - 32;
4961 if ((lsb & shift_mask_high) == 0)
4964 return high == -lsb;
4970 shift_mask_low = ~0;
4971 shift_mask_low <<= INTVAL (shiftop);
4975 if ((lsb & shift_mask_low) == 0)
4978 return low == -lsb && lsb != 1;
4984 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
4985 for lfq and stfq insns.
4987 Note reg1 and reg2 *must* be hard registers. To be sure we will
4988 abort if we are passed pseudo registers. */
4991 registers_ok_for_quad_peep (reg1, reg2)
4994 /* We might have been passed a SUBREG. */
4995 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
4998 return (REGNO (reg1) == REGNO (reg2) - 1);
5001 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5002 addr1 and addr2 must be in consecutive memory locations
5003 (addr2 == addr1 + 8). */
5006 addrs_ok_for_quad_peep (addr1, addr2)
5013 /* Extract an offset (if used) from the first addr. */
5014 if (GET_CODE (addr1) == PLUS)
5016 /* If not a REG, return zero. */
5017 if (GET_CODE (XEXP (addr1, 0)) != REG)
5021 reg1 = REGNO (XEXP (addr1, 0));
5022 /* The offset must be constant! */
5023 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5025 offset1 = INTVAL (XEXP (addr1, 1));
5028 else if (GET_CODE (addr1) != REG)
5032 reg1 = REGNO (addr1);
5033 /* This was a simple (mem (reg)) expression. Offset is 0. */
5037 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5038 if (GET_CODE (addr2) != PLUS)
5041 if (GET_CODE (XEXP (addr2, 0)) != REG
5042 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5045 if (reg1 != REGNO (XEXP (addr2, 0)))
5048 /* The offset for the second addr must be 8 more than the first addr. */
5049 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5052 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5057 /* Return the register class of a scratch register needed to copy IN into
5058 or out of a register in CLASS in MODE. If it can be done directly,
5059 NO_REGS is returned. */
5062 secondary_reload_class (class, mode, in)
5063 enum reg_class class;
5064 enum machine_mode mode ATTRIBUTE_UNUSED;
5069 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5071 /* We cannot copy a symbolic operand directly into anything
5072 other than BASE_REGS for TARGET_ELF. So indicate that a
5073 register from BASE_REGS is needed as an intermediate
5076 On Darwin, pic addresses require a load from memory, which
5077 needs a base register. */
5078 if (class != BASE_REGS
5079 && (GET_CODE (in) == SYMBOL_REF
5080 || GET_CODE (in) == HIGH
5081 || GET_CODE (in) == LABEL_REF
5082 || GET_CODE (in) == CONST))
5086 if (GET_CODE (in) == REG)
5089 if (regno >= FIRST_PSEUDO_REGISTER)
5091 regno = true_regnum (in);
5092 if (regno >= FIRST_PSEUDO_REGISTER)
5096 else if (GET_CODE (in) == SUBREG)
5098 regno = true_regnum (in);
5099 if (regno >= FIRST_PSEUDO_REGISTER)
5105 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5107 if (class == GENERAL_REGS || class == BASE_REGS
5108 || (regno >= 0 && INT_REGNO_P (regno)))
5111 /* Constants, memory, and FP registers can go into FP registers. */
5112 if ((regno == -1 || FP_REGNO_P (regno))
5113 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5116 /* Memory, and AltiVec registers can go into AltiVec registers. */
5117 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5118 && class == ALTIVEC_REGS)
5121 /* We can copy among the CR registers. */
5122 if ((class == CR_REGS || class == CR0_REGS)
5123 && regno >= 0 && CR_REGNO_P (regno))
5126 /* Otherwise, we need GENERAL_REGS. */
5127 return GENERAL_REGS;
5130 /* Given a comparison operation, return the bit number in CCR to test. We
5131 know this is a valid comparison.
5133 SCC_P is 1 if this is for an scc. That means that %D will have been
5134 used instead of %C, so the bits will be in different places.
5136 Return -1 if OP isn't a valid comparison for some reason. */
5143 enum rtx_code code = GET_CODE (op);
5144 enum machine_mode cc_mode;
5149 if (GET_RTX_CLASS (code) != '<')
5154 if (GET_CODE (reg) != REG
5155 || ! CR_REGNO_P (REGNO (reg)))
5158 cc_mode = GET_MODE (reg);
5159 cc_regnum = REGNO (reg);
5160 base_bit = 4 * (cc_regnum - CR0_REGNO);
5162 validate_condition_mode (code, cc_mode);
5167 return scc_p ? base_bit + 3 : base_bit + 2;
5169 return base_bit + 2;
5170 case GT: case GTU: case UNLE:
5171 return base_bit + 1;
5172 case LT: case LTU: case UNGE:
5174 case ORDERED: case UNORDERED:
5175 return base_bit + 3;
5178 /* If scc, we will have done a cror to put the bit in the
5179 unordered position. So test that bit. For integer, this is ! LT
5180 unless this is an scc insn. */
5181 return scc_p ? base_bit + 3 : base_bit;
5184 return scc_p ? base_bit + 3 : base_bit + 1;
5191 /* Return the GOT register. */
5194 rs6000_got_register (value)
5195 rtx value ATTRIBUTE_UNUSED;
5197 /* The second flow pass currently (June 1999) can't update
5198 regs_ever_live without disturbing other parts of the compiler, so
5199 update it here to make the prolog/epilogue code happy. */
5200 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5201 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
5203 current_function_uses_pic_offset_table = 1;
5205 return pic_offset_table_rtx;
5208 /* Functions to init, mark and free struct machine_function.
5209 These will be called, via pointer variables,
5210 from push_function_context and pop_function_context. */
5213 rs6000_init_machine_status (p)
5216 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
5220 rs6000_free_machine_status (p)
5223 if (p->machine == NULL)
5231 /* Print an operand. Recognize special options, documented below. */
5234 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
5235 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
5237 #define SMALL_DATA_RELOC "sda21"
5238 #define SMALL_DATA_REG 0
5242 print_operand (file, x, code)
5250 /* These macros test for integers and extract the low-order bits. */
5252 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
5253 && GET_MODE (X) == VOIDmode)
5255 #define INT_LOWPART(X) \
5256 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
5261 /* Write out an instruction after the call which may be replaced
5262 with glue code by the loader. This depends on the AIX version. */
5263 asm_fprintf (file, RS6000_CALL_GLUE);
5266 /* %a is output_address. */
5269 /* If X is a constant integer whose low-order 5 bits are zero,
5270 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
5271 in the AIX assembler where "sri" with a zero shift count
5272 write a trash instruction. */
5273 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
5280 /* If constant, low-order 16 bits of constant, unsigned.
5281 Otherwise, write normally. */
5283 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
5285 print_operand (file, x, 0);
5289 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
5290 for 64-bit mask direction. */
5291 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
5294 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
5298 /* There used to be a comment for 'C' reading "This is an
5299 optional cror needed for certain floating-point
5300 comparisons. Otherwise write nothing." */
5302 /* Similar, except that this is for an scc, so we must be able to
5303 encode the test in a single bit that is one. We do the above
5304 for any LE, GE, GEU, or LEU and invert the bit for NE. */
5305 if (GET_CODE (x) == LE || GET_CODE (x) == GE
5306 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
5308 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
5310 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
5312 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
5315 else if (GET_CODE (x) == NE)
5317 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
5319 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
5320 base_bit + 2, base_bit + 2);
5325 /* X is a CR register. Print the number of the EQ bit of the CR */
5326 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5327 output_operand_lossage ("invalid %%E value");
5329 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
5333 /* X is a CR register. Print the shift count needed to move it
5334 to the high-order four bits. */
5335 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5336 output_operand_lossage ("invalid %%f value");
5338 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
5342 /* Similar, but print the count for the rotate in the opposite
5344 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5345 output_operand_lossage ("invalid %%F value");
5347 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
5351 /* X is a constant integer. If it is negative, print "m",
5352 otherwise print "z". This is to make a aze or ame insn. */
5353 if (GET_CODE (x) != CONST_INT)
5354 output_operand_lossage ("invalid %%G value");
5355 else if (INTVAL (x) >= 0)
5362 /* If constant, output low-order five bits. Otherwise, write
5365 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
5367 print_operand (file, x, 0);
5371 /* If constant, output low-order six bits. Otherwise, write
5374 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
5376 print_operand (file, x, 0);
5380 /* Print `i' if this is a constant, else nothing. */
5386 /* Write the bit number in CCR for jump. */
5389 output_operand_lossage ("invalid %%j code");
5391 fprintf (file, "%d", i);
5395 /* Similar, but add one for shift count in rlinm for scc and pass
5396 scc flag to `ccr_bit'. */
5399 output_operand_lossage ("invalid %%J code");
5401 /* If we want bit 31, write a shift count of zero, not 32. */
5402 fprintf (file, "%d", i == 31 ? 0 : i + 1);
5406 /* X must be a constant. Write the 1's complement of the
5409 output_operand_lossage ("invalid %%k value");
5411 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
5415 /* X must be a symbolic constant on ELF. Write an
5416 expression suitable for an 'addi' that adds in the low 16
5418 if (GET_CODE (x) != CONST)
5420 print_operand_address (file, x);
5425 if (GET_CODE (XEXP (x, 0)) != PLUS
5426 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
5427 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
5428 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
5429 output_operand_lossage ("invalid %%K value");
5430 print_operand_address (file, XEXP (XEXP (x, 0), 0));
5432 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
5436 /* %l is output_asm_label. */
5439 /* Write second word of DImode or DFmode reference. Works on register
5440 or non-indexed memory only. */
5441 if (GET_CODE (x) == REG)
5442 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
5443 else if (GET_CODE (x) == MEM)
5445 /* Handle possible auto-increment. Since it is pre-increment and
5446 we have already done it, we can just use an offset of word. */
5447 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5448 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5449 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
5452 output_address (XEXP (adjust_address_nv (x, SImode,
5456 if (small_data_operand (x, GET_MODE (x)))
5457 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5458 reg_names[SMALL_DATA_REG]);
5463 /* MB value for a mask operand. */
5464 if (! mask_operand (x, VOIDmode))
5465 output_operand_lossage ("invalid %%m value");
5467 val = INT_LOWPART (x);
5469 /* If the high bit is set and the low bit is not, the value is zero.
5470 If the high bit is zero, the value is the first 1 bit we find from
5472 if ((val & 0x80000000) && ((val & 1) == 0))
5477 else if ((val & 0x80000000) == 0)
5479 for (i = 1; i < 32; i++)
5480 if ((val <<= 1) & 0x80000000)
5482 fprintf (file, "%d", i);
5486 /* Otherwise, look for the first 0 bit from the right. The result is its
5487 number plus 1. We know the low-order bit is one. */
5488 for (i = 0; i < 32; i++)
5489 if (((val >>= 1) & 1) == 0)
5492 /* If we ended in ...01, i would be 0. The correct value is 31, so
5494 fprintf (file, "%d", 31 - i);
5498 /* ME value for a mask operand. */
5499 if (! mask_operand (x, VOIDmode))
5500 output_operand_lossage ("invalid %%M value");
5502 val = INT_LOWPART (x);
5504 /* If the low bit is set and the high bit is not, the value is 31.
5505 If the low bit is zero, the value is the first 1 bit we find from
5507 if ((val & 1) && ((val & 0x80000000) == 0))
5512 else if ((val & 1) == 0)
5514 for (i = 0; i < 32; i++)
5515 if ((val >>= 1) & 1)
5518 /* If we had ....10, i would be 0. The result should be
5519 30, so we need 30 - i. */
5520 fprintf (file, "%d", 30 - i);
5524 /* Otherwise, look for the first 0 bit from the left. The result is its
5525 number minus 1. We know the high-order bit is one. */
5526 for (i = 0; i < 32; i++)
5527 if (((val <<= 1) & 0x80000000) == 0)
5530 fprintf (file, "%d", i);
5533 /* %n outputs the negative of its operand. */
5536 /* Write the number of elements in the vector times 4. */
5537 if (GET_CODE (x) != PARALLEL)
5538 output_operand_lossage ("invalid %%N value");
5540 fprintf (file, "%d", XVECLEN (x, 0) * 4);
5544 /* Similar, but subtract 1 first. */
5545 if (GET_CODE (x) != PARALLEL)
5546 output_operand_lossage ("invalid %%O value");
5548 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
5552 /* X is a CONST_INT that is a power of two. Output the logarithm. */
5554 || INT_LOWPART (x) < 0
5555 || (i = exact_log2 (INT_LOWPART (x))) < 0)
5556 output_operand_lossage ("invalid %%p value");
5558 fprintf (file, "%d", i);
5562 /* The operand must be an indirect memory reference. The result
5563 is the register number. */
5564 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
5565 || REGNO (XEXP (x, 0)) >= 32)
5566 output_operand_lossage ("invalid %%P value");
5568 fprintf (file, "%d", REGNO (XEXP (x, 0)));
5572 /* This outputs the logical code corresponding to a boolean
5573 expression. The expression may have one or both operands
5574 negated (if one, only the first one). For condition register
5575 logical operations, it will also treat the negated
5576 CR codes as NOTs, but not handle NOTs of them. */
5578 const char *const *t = 0;
5580 enum rtx_code code = GET_CODE (x);
5581 static const char * const tbl[3][3] = {
5582 { "and", "andc", "nor" },
5583 { "or", "orc", "nand" },
5584 { "xor", "eqv", "xor" } };
5588 else if (code == IOR)
5590 else if (code == XOR)
5593 output_operand_lossage ("invalid %%q value");
5595 if (GET_CODE (XEXP (x, 0)) != NOT)
5599 if (GET_CODE (XEXP (x, 1)) == NOT)
5610 /* X is a CR register. Print the mask for `mtcrf'. */
5611 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5612 output_operand_lossage ("invalid %%R value");
5614 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
5618 /* Low 5 bits of 32 - value */
5620 output_operand_lossage ("invalid %%s value");
5622 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
5626 /* PowerPC64 mask position. All 0's and all 1's are excluded.
5627 CONST_INT 32-bit mask is considered sign-extended so any
5628 transition must occur within the CONST_INT, not on the boundary. */
5629 if (! mask64_operand (x, VOIDmode))
5630 output_operand_lossage ("invalid %%S value");
5632 val = INT_LOWPART (x);
5634 if (val & 1) /* Clear Left */
5636 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5637 if (!((val >>= 1) & 1))
5640 #if HOST_BITS_PER_WIDE_INT == 32
5641 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5643 val = CONST_DOUBLE_HIGH (x);
5648 for (i = 32; i < 64; i++)
5649 if (!((val >>= 1) & 1))
5653 /* i = index of last set bit from right
5654 mask begins at 63 - i from left */
5656 output_operand_lossage ("%%S computed all 1's mask");
5658 fprintf (file, "%d", 63 - i);
5661 else /* Clear Right */
5663 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5664 if ((val >>= 1) & 1)
5667 #if HOST_BITS_PER_WIDE_INT == 32
5668 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5670 val = CONST_DOUBLE_HIGH (x);
5672 if (val == (HOST_WIDE_INT) -1)
5675 for (i = 32; i < 64; i++)
5676 if ((val >>= 1) & 1)
5680 /* i = index of last clear bit from right
5681 mask ends at 62 - i from left */
5683 output_operand_lossage ("%%S computed all 0's mask");
5685 fprintf (file, "%d", 62 - i);
5690 /* Print the symbolic name of a branch target register. */
5691 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
5692 && REGNO (x) != COUNT_REGISTER_REGNUM))
5693 output_operand_lossage ("invalid %%T value");
5694 else if (REGNO (x) == LINK_REGISTER_REGNUM)
5695 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
5697 fputs ("ctr", file);
5701 /* High-order 16 bits of constant for use in unsigned operand. */
5703 output_operand_lossage ("invalid %%u value");
5705 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
5706 (INT_LOWPART (x) >> 16) & 0xffff);
5710 /* High-order 16 bits of constant for use in signed operand. */
5712 output_operand_lossage ("invalid %%v value");
5714 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
5715 (INT_LOWPART (x) >> 16) & 0xffff);
5719 /* Print `u' if this has an auto-increment or auto-decrement. */
5720 if (GET_CODE (x) == MEM
5721 && (GET_CODE (XEXP (x, 0)) == PRE_INC
5722 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
5727 /* Print the trap code for this operand. */
5728 switch (GET_CODE (x))
5731 fputs ("eq", file); /* 4 */
5734 fputs ("ne", file); /* 24 */
5737 fputs ("lt", file); /* 16 */
5740 fputs ("le", file); /* 20 */
5743 fputs ("gt", file); /* 8 */
5746 fputs ("ge", file); /* 12 */
5749 fputs ("llt", file); /* 2 */
5752 fputs ("lle", file); /* 6 */
5755 fputs ("lgt", file); /* 1 */
5758 fputs ("lge", file); /* 5 */
5766 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
5769 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5770 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
5772 print_operand (file, x, 0);
5776 /* MB value for a PowerPC64 rldic operand. */
5777 val = (GET_CODE (x) == CONST_INT
5778 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
5783 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5784 if ((val <<= 1) < 0)
5787 #if HOST_BITS_PER_WIDE_INT == 32
5788 if (GET_CODE (x) == CONST_INT && i >= 0)
5789 i += 32; /* zero-extend high-part was all 0's */
5790 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5792 val = CONST_DOUBLE_LOW (x);
5799 for ( ; i < 64; i++)
5800 if ((val <<= 1) < 0)
5805 fprintf (file, "%d", i + 1);
5809 if (GET_CODE (x) == MEM
5810 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
5815 /* Like 'L', for third word of TImode */
5816 if (GET_CODE (x) == REG)
5817 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
5818 else if (GET_CODE (x) == MEM)
5820 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5821 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5822 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
5824 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
5825 if (small_data_operand (x, GET_MODE (x)))
5826 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5827 reg_names[SMALL_DATA_REG]);
5832 /* X is a SYMBOL_REF. Write out the name preceded by a
5833 period and without any trailing data in brackets. Used for function
5834 names. If we are configured for System V (or the embedded ABI) on
5835 the PowerPC, do not emit the period, since those systems do not use
5836 TOCs and the like. */
5837 if (GET_CODE (x) != SYMBOL_REF)
5840 if (XSTR (x, 0)[0] != '.')
5842 switch (DEFAULT_ABI)
5852 case ABI_AIX_NODESC:
5858 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
5860 assemble_name (file, XSTR (x, 0));
5865 /* Like 'L', for last word of TImode. */
5866 if (GET_CODE (x) == REG)
5867 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
5868 else if (GET_CODE (x) == MEM)
5870 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5871 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5872 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
5874 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
5875 if (small_data_operand (x, GET_MODE (x)))
5876 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5877 reg_names[SMALL_DATA_REG]);
5881 /* Print AltiVec memory operand. */
5886 if (GET_CODE (x) != MEM)
5891 if (GET_CODE (tmp) == REG)
5892 fprintf (file, "0, %s", reg_names[REGNO (tmp)]);
5893 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
5895 if (REGNO (XEXP (tmp, 0)) == 0)
5896 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
5897 reg_names[ REGNO (XEXP (tmp, 0)) ]);
5899 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
5900 reg_names[ REGNO (XEXP (tmp, 1)) ]);
5908 if (GET_CODE (x) == REG)
5909 fprintf (file, "%s", reg_names[REGNO (x)]);
5910 else if (GET_CODE (x) == MEM)
5912 /* We need to handle PRE_INC and PRE_DEC here, since we need to
5913 know the width from the mode. */
5914 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
5915 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
5916 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
5917 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
5918 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
5919 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
5921 output_address (XEXP (x, 0));
5924 output_addr_const (file, x);
5928 output_operand_lossage ("invalid %%xn code");
5932 /* Print the address of an operand. */
5935 print_operand_address (file, x)
5939 if (GET_CODE (x) == REG)
5940 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
5941 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
5942 || GET_CODE (x) == LABEL_REF)
5944 output_addr_const (file, x);
5945 if (small_data_operand (x, GET_MODE (x)))
5946 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5947 reg_names[SMALL_DATA_REG]);
5948 else if (TARGET_TOC)
5951 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
5953 if (REGNO (XEXP (x, 0)) == 0)
5954 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
5955 reg_names[ REGNO (XEXP (x, 0)) ]);
5957 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
5958 reg_names[ REGNO (XEXP (x, 1)) ]);
5960 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
5962 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
5963 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
5966 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
5967 && CONSTANT_P (XEXP (x, 1)))
5969 output_addr_const (file, XEXP (x, 1));
5970 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
5974 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
5975 && CONSTANT_P (XEXP (x, 1)))
5977 fprintf (file, "lo16(");
5978 output_addr_const (file, XEXP (x, 1));
5979 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
5982 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
5984 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
5986 rtx contains_minus = XEXP (x, 1);
5990 /* Find the (minus (sym) (toc)) buried in X, and temporarily
5991 turn it into (sym) for output_addr_const. */
5992 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
5993 contains_minus = XEXP (contains_minus, 0);
5995 minus = XEXP (contains_minus, 0);
5996 symref = XEXP (minus, 0);
5997 XEXP (contains_minus, 0) = symref;
6002 name = XSTR (symref, 0);
6003 newname = alloca (strlen (name) + sizeof ("@toc"));
6004 strcpy (newname, name);
6005 strcat (newname, "@toc");
6006 XSTR (symref, 0) = newname;
6008 output_addr_const (file, XEXP (x, 1));
6010 XSTR (symref, 0) = name;
6011 XEXP (contains_minus, 0) = minus;
6014 output_addr_const (file, XEXP (x, 1));
6016 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6023 rs6000_reverse_condition (mode, code)
6024 enum machine_mode mode;
6027 /* Reversal of FP compares takes care -- an ordered compare
6028 becomes an unordered compare and vice versa. */
6029 if (mode == CCFPmode)
6030 return reverse_condition_maybe_unordered (code);
6032 return reverse_condition (code);
6035 /* Generate a compare for CODE. Return a brand-new rtx that
6036 represents the result of the compare. */
6039 rs6000_generate_compare (code)
6042 enum machine_mode comp_mode;
6045 if (rs6000_compare_fp_p)
6046 comp_mode = CCFPmode;
6047 else if (code == GTU || code == LTU
6048 || code == GEU || code == LEU)
6049 comp_mode = CCUNSmode;
6053 /* First, the compare. */
6054 compare_result = gen_reg_rtx (comp_mode);
6055 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6056 gen_rtx_COMPARE (comp_mode,
6058 rs6000_compare_op1)));
6060 /* Some kinds of FP comparisons need an OR operation;
6061 except for flag_unsafe_math_optimizations we don't bother. */
6062 if (rs6000_compare_fp_p
6063 && ! flag_unsafe_math_optimizations
6064 && (code == LE || code == GE
6065 || code == UNEQ || code == LTGT
6066 || code == UNGT || code == UNLT))
6068 enum rtx_code or1, or2;
6069 rtx or1_rtx, or2_rtx, compare2_rtx;
6070 rtx or_result = gen_reg_rtx (CCEQmode);
6074 case LE: or1 = LT; or2 = EQ; break;
6075 case GE: or1 = GT; or2 = EQ; break;
6076 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
6077 case LTGT: or1 = LT; or2 = GT; break;
6078 case UNGT: or1 = UNORDERED; or2 = GT; break;
6079 case UNLT: or1 = UNORDERED; or2 = LT; break;
6082 validate_condition_mode (or1, comp_mode);
6083 validate_condition_mode (or2, comp_mode);
6084 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6085 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6086 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6087 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6089 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6091 compare_result = or_result;
6095 validate_condition_mode (code, GET_MODE (compare_result));
6097 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6101 /* Emit the RTL for an sCOND pattern. */
6104 rs6000_emit_sCOND (code, result)
6109 enum machine_mode op_mode;
6111 condition_rtx = rs6000_generate_compare (code);
6113 op_mode = GET_MODE (rs6000_compare_op0);
6114 if (op_mode == VOIDmode)
6115 op_mode = GET_MODE (rs6000_compare_op1);
6117 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
6119 PUT_MODE (condition_rtx, DImode);
6120 convert_move (result, condition_rtx, 0);
6124 PUT_MODE (condition_rtx, SImode);
6125 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
6129 /* Emit a branch of kind CODE to location LOC. */
6132 rs6000_emit_cbranch (code, loc)
6136 rtx condition_rtx, loc_ref;
6138 condition_rtx = rs6000_generate_compare (code);
6139 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
6140 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
6141 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
6145 /* Return the string to output a conditional branch to LABEL, which is
6146 the operand number of the label, or -1 if the branch is really a
6149 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
6150 condition code register and its mode specifies what kind of
6153 REVERSED is non-zero if we should reverse the sense of the comparison.
6155 INSN is the insn. */
6158 output_cbranch (op, label, reversed, insn)
6164 static char string[64];
6165 enum rtx_code code = GET_CODE (op);
6166 rtx cc_reg = XEXP (op, 0);
6167 enum machine_mode mode = GET_MODE (cc_reg);
6168 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
6169 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
6170 int really_reversed = reversed ^ need_longbranch;
6176 validate_condition_mode (code, mode);
6178 /* Work out which way this really branches. We could use
6179 reverse_condition_maybe_unordered here always but this
6180 makes the resulting assembler clearer. */
6181 if (really_reversed)
6182 code = rs6000_reverse_condition (mode, code);
6186 /* Not all of these are actually distinct opcodes, but
6187 we distinguish them for clarity of the resulting assembler. */
6189 ccode = "ne"; break;
6191 ccode = "eq"; break;
6193 ccode = "ge"; break;
6194 case GT: case GTU: case UNGT:
6195 ccode = "gt"; break;
6197 ccode = "le"; break;
6198 case LT: case LTU: case UNLT:
6199 ccode = "lt"; break;
6200 case UNORDERED: ccode = "un"; break;
6201 case ORDERED: ccode = "nu"; break;
6202 case UNGE: ccode = "nl"; break;
6203 case UNLE: ccode = "ng"; break;
6208 /* Maybe we have a guess as to how likely the branch is.
6209 The old mnemonics don't have a way to specify this information. */
6210 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
6211 if (note != NULL_RTX)
6213 /* PROB is the difference from 50%. */
6214 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
6216 /* For branches that are very close to 50%, assume not-taken. */
6217 if (abs (prob) > REG_BR_PROB_BASE / 20
6218 && ((prob > 0) ^ need_longbranch))
6227 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
6229 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
6231 /* We need to escape any '%' characters in the reg_names string.
6232 Assume they'd only be the first character... */
6233 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
6235 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
6239 /* If the branch distance was too far, we may have to use an
6240 unconditional branch to go the distance. */
6241 if (need_longbranch)
6242 s += sprintf (s, ",$+8\n\tb %s", label);
6244 s += sprintf (s, ",%s", label);
6250 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
6251 operands of the last comparison is nonzero/true, FALSE_COND if it
6252 is zero/false. Return 0 if the hardware has no such operation. */
6255 rs6000_emit_cmove (dest, op, true_cond, false_cond)
6261 enum rtx_code code = GET_CODE (op);
6262 rtx op0 = rs6000_compare_op0;
6263 rtx op1 = rs6000_compare_op1;
6265 enum machine_mode mode = GET_MODE (op0);
6268 /* First, work out if the hardware can do this at all, or
6269 if it's too slow... */
6270 /* If the comparison is an integer one, since we only have fsel
6271 it'll be cheaper to use a branch. */
6272 if (! rs6000_compare_fp_p)
6275 /* Eliminate half of the comparisons by switching operands, this
6276 makes the remaining code simpler. */
6277 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
6278 || code == LTGT || code == LT)
6280 code = reverse_condition_maybe_unordered (code);
6282 true_cond = false_cond;
6286 /* UNEQ and LTGT take four instructions for a comparison with zero,
6287 it'll probably be faster to use a branch here too. */
6291 if (GET_CODE (op1) == CONST_DOUBLE)
6292 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
6294 /* We're going to try to implement comparions by performing
6295 a subtract, then comparing against zero. Unfortunately,
6296 Inf - Inf is NaN which is not zero, and so if we don't
6297 know that the the operand is finite and the comparison
6298 would treat EQ different to UNORDERED, we can't do it. */
6299 if (! flag_unsafe_math_optimizations
6300 && code != GT && code != UNGE
6301 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
6302 /* Constructs of the form (a OP b ? a : b) are safe. */
6303 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
6304 || (! rtx_equal_p (op0, true_cond)
6305 && ! rtx_equal_p (op1, true_cond))))
6307 /* At this point we know we can use fsel. */
6309 /* Reduce the comparison to a comparison against zero. */
6310 temp = gen_reg_rtx (mode);
6311 emit_insn (gen_rtx_SET (VOIDmode, temp,
6312 gen_rtx_MINUS (mode, op0, op1)));
6314 op1 = CONST0_RTX (mode);
6316 /* If we don't care about NaNs we can reduce some of the comparisons
6317 down to faster ones. */
6318 if (flag_unsafe_math_optimizations)
6324 true_cond = false_cond;
6337 /* Now, reduce everything down to a GE. */
6344 temp = gen_reg_rtx (mode);
6345 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6350 temp = gen_reg_rtx (mode);
6351 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
6356 temp = gen_reg_rtx (mode);
6357 emit_insn (gen_rtx_SET (VOIDmode, temp,
6359 gen_rtx_ABS (mode, op0))));
6364 temp = gen_reg_rtx (mode);
6365 emit_insn (gen_rtx_SET (VOIDmode, temp,
6366 gen_rtx_IF_THEN_ELSE (mode,
6367 gen_rtx_GE (VOIDmode,
6369 true_cond, false_cond)));
6371 true_cond = false_cond;
6373 temp = gen_reg_rtx (mode);
6374 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6379 temp = gen_reg_rtx (mode);
6380 emit_insn (gen_rtx_SET (VOIDmode, temp,
6381 gen_rtx_IF_THEN_ELSE (mode,
6382 gen_rtx_GE (VOIDmode,
6384 true_cond, false_cond)));
6386 false_cond = true_cond;
6388 temp = gen_reg_rtx (mode);
6389 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6397 emit_insn (gen_rtx_SET (VOIDmode, dest,
6398 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
6399 gen_rtx_GE (VOIDmode,
6401 true_cond, false_cond)));
6406 rs6000_emit_minmax (dest, code, op0, op1)
6412 enum machine_mode mode = GET_MODE (op0);
6414 if (code == SMAX || code == UMAX)
6415 target = emit_conditional_move (dest, GE, op0, op1, mode,
6418 target = emit_conditional_move (dest, GE, op0, op1, mode,
6420 if (target == NULL_RTX)
6423 emit_move_insn (dest, target);
6426 /* This page contains routines that are used to determine what the
6427 function prologue and epilogue code will do and write them out. */
6429 /* Return the first fixed-point register that is required to be
6430 saved. 32 if none. */
6433 first_reg_to_save ()
6437 /* Find lowest numbered live register. */
6438 for (first_reg = 13; first_reg <= 31; first_reg++)
6439 if (regs_ever_live[first_reg]
6440 && (! call_used_regs[first_reg]
6441 || (first_reg == PIC_OFFSET_TABLE_REGNUM
6442 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
6443 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
6448 /* AIX must save/restore every register that contains a parameter
6449 before/after the .__mcount call plus an additional register
6450 for the static chain, if needed; use registers from 30 down to 22
6452 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
6454 int last_parm_reg, profile_first_reg;
6456 /* Figure out last used parameter register. The proper thing
6457 to do is to walk incoming args of the function. A function
6458 might have live parameter registers even if it has no
6460 for (last_parm_reg = 10;
6461 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
6465 /* Calculate first reg for saving parameter registers
6467 Skip reg 31 which may contain the frame pointer. */
6468 profile_first_reg = (33 - last_parm_reg
6469 - (current_function_needs_context ? 1 : 0));
6471 /* Need to skip another reg to account for R31 being PICBASE
6472 (when flag_pic is set) or R30 being used as the frame
6473 pointer (when flag_pic is not set). */
6474 --profile_first_reg;
6476 /* Do not save frame pointer if no parameters needs to be saved. */
6477 if (profile_first_reg == 31)
6478 profile_first_reg = 32;
6480 if (first_reg > profile_first_reg)
6481 first_reg = profile_first_reg;
6484 /* SVR4 may need one register to preserve the static chain. */
6485 else if (current_function_needs_context)
6487 /* Skip reg 31 which may contain the frame pointer. */
6494 if (flag_pic && current_function_uses_pic_offset_table &&
6495 (first_reg > PIC_OFFSET_TABLE_REGNUM))
6496 return PIC_OFFSET_TABLE_REGNUM;
6502 /* Similar, for FP regs. */
6505 first_fp_reg_to_save ()
6509 /* Find lowest numbered live register. */
6510 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6511 if (regs_ever_live[first_reg])
6517 /* Similar, for AltiVec regs. */
6520 first_altivec_reg_to_save ()
6524 /* Stack frame remains as is unless we are in AltiVec ABI. */
6525 if (! TARGET_ALTIVEC_ABI)
6526 return LAST_ALTIVEC_REGNO + 1;
6528 /* Find lowest numbered live register. */
6529 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6530 if (regs_ever_live[i])
6536 /* Return a 32-bit mask of the AltiVec registers we need to set in
6537 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
6538 the 32-bit word is 0. */
6541 compute_vrsave_mask ()
6543 unsigned int i, mask = 0;
6545 /* First, find out if we use _any_ altivec registers. */
6546 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6547 if (regs_ever_live[i])
6548 mask |= ALTIVEC_REG_BIT (i);
6553 /* Next, add all registers that are call-clobbered. We do this
6554 because post-reload register optimizers such as regrename_optimize
6555 may choose to use them. They never change the register class
6556 chosen by reload, so cannot create new uses of altivec registers
6557 if there were none before, so the early exit above is safe. */
6558 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
6559 altivec registers not saved in the mask, which might well make the
6560 adjustments below more effective in eliding the save/restore of
6561 VRSAVE in small functions. */
6562 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6563 if (call_used_regs[i])
6564 mask |= ALTIVEC_REG_BIT (i);
6566 /* Next, remove the argument registers from the set. These must
6567 be in the VRSAVE mask set by the caller, so we don't need to add
6568 them in again. More importantly, the mask we compute here is
6569 used to generate CLOBBERs in the set_vrsave insn, and we do not
6570 wish the argument registers to die. */
6571 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
6572 mask &= ~ALTIVEC_REG_BIT (i);
6574 /* Similarly, remove the return value from the set. */
6577 diddle_return_value (is_altivec_return_reg, &yes);
6579 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
6586 is_altivec_return_reg (reg, xyes)
6590 bool *yes = (bool *) xyes;
6591 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
6596 /* Calculate the stack information for the current function. This is
6597 complicated by having two separate calling sequences, the AIX calling
6598 sequence and the V.4 calling sequence.
6600 AIX (and Darwin/Mac OS X) stack frames look like:
6602 SP----> +---------------------------------------+
6603 | back chain to caller | 0 0
6604 +---------------------------------------+
6605 | saved CR | 4 8 (8-11)
6606 +---------------------------------------+
6608 +---------------------------------------+
6609 | reserved for compilers | 12 24
6610 +---------------------------------------+
6611 | reserved for binders | 16 32
6612 +---------------------------------------+
6613 | saved TOC pointer | 20 40
6614 +---------------------------------------+
6615 | Parameter save area (P) | 24 48
6616 +---------------------------------------+
6617 | Alloca space (A) | 24+P etc.
6618 +---------------------------------------+
6619 | Local variable space (L) | 24+P+A
6620 +---------------------------------------+
6621 | Float/int conversion temporary (X) | 24+P+A+L
6622 +---------------------------------------+
6623 | Save area for AltiVec registers (W) | 24+P+A+L+X
6624 +---------------------------------------+
6625 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
6626 +---------------------------------------+
6627 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
6628 +---------------------------------------+
6629 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
6630 +---------------------------------------+
6631 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
6632 +---------------------------------------+
6633 old SP->| back chain to caller's caller |
6634 +---------------------------------------+
6636 The required alignment for AIX configurations is two words (i.e., 8
6640 V.4 stack frames look like:
6642 SP----> +---------------------------------------+
6643 | back chain to caller | 0
6644 +---------------------------------------+
6645 | caller's saved LR | 4
6646 +---------------------------------------+
6647 | Parameter save area (P) | 8
6648 +---------------------------------------+
6649 | Alloca space (A) | 8+P
6650 +---------------------------------------+
6651 | Varargs save area (V) | 8+P+A
6652 +---------------------------------------+
6653 | Local variable space (L) | 8+P+A+V
6654 +---------------------------------------+
6655 | Float/int conversion temporary (X) | 8+P+A+V+L
6656 +---------------------------------------+
6657 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
6658 +---------------------------------------+
6659 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
6660 +---------------------------------------+
6661 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
6662 +---------------------------------------+
6663 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
6664 +---------------------------------------+
6665 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
6666 +---------------------------------------+
6667 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
6668 +---------------------------------------+
6669 old SP->| back chain to caller's caller |
6670 +---------------------------------------+
6672 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
6673 given. (But note below and in sysv4.h that we require only 8 and
6674 may round up the size of our stack frame anyways. The historical
6675 reason is early versions of powerpc-linux which didn't properly
6676 align the stack at program startup. A happy side-effect is that
6677 -mno-eabi libraries can be used with -meabi programs.)
6679 The EABI configuration defaults to the V.4 layout, unless
6680 -mcall-aix is used, in which case the AIX layout is used. However,
6681 the stack alignment requirements may differ. If -mno-eabi is not
6682 given, the required stack alignment is 8 bytes; if -mno-eabi is
6683 given, the required alignment is 16 bytes. (But see V.4 comment
6686 #ifndef ABI_STACK_BOUNDARY
6687 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
6691 rs6000_stack_info ()
6693 static rs6000_stack_t info, zero_info;
6694 rs6000_stack_t *info_ptr = &info;
6695 int reg_size = TARGET_POWERPC64 ? 8 : 4;
6696 enum rs6000_abi abi;
6700 /* Zero all fields portably. */
6703 /* Select which calling sequence. */
6704 info_ptr->abi = abi = DEFAULT_ABI;
6706 /* Calculate which registers need to be saved & save area size. */
6707 info_ptr->first_gp_reg_save = first_reg_to_save ();
6708 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
6709 even if it currently looks like we won't. */
6710 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
6711 || (flag_pic == 1 && abi == ABI_V4)
6712 || (flag_pic && abi == ABI_DARWIN))
6713 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
6714 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
6716 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
6718 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
6719 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
6721 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
6722 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
6723 - info_ptr->first_altivec_reg_save);
6725 /* Does this function call anything? */
6726 info_ptr->calls_p = (! current_function_is_leaf
6727 || cfun->machine->ra_needs_full_frame);
6729 /* Determine if we need to save the link register. */
6730 if (rs6000_ra_ever_killed ()
6731 || (DEFAULT_ABI == ABI_AIX && profile_flag)
6732 #ifdef TARGET_RELOCATABLE
6733 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
6735 || (info_ptr->first_fp_reg_save != 64
6736 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
6737 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
6738 || (abi == ABI_V4 && current_function_calls_alloca)
6739 || (DEFAULT_ABI == ABI_DARWIN
6741 && current_function_uses_pic_offset_table)
6742 || info_ptr->calls_p)
6744 info_ptr->lr_save_p = 1;
6745 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
6748 /* Determine if we need to save the condition code registers. */
6749 if (regs_ever_live[CR2_REGNO]
6750 || regs_ever_live[CR3_REGNO]
6751 || regs_ever_live[CR4_REGNO])
6753 info_ptr->cr_save_p = 1;
6755 info_ptr->cr_size = reg_size;
6758 /* If the current function calls __builtin_eh_return, then we need
6759 to allocate stack space for registers that will hold data for
6760 the exception handler. */
6761 if (current_function_calls_eh_return)
6764 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
6766 ehrd_size = i * UNITS_PER_WORD;
6771 /* Determine various sizes. */
6772 info_ptr->reg_size = reg_size;
6773 info_ptr->fixed_size = RS6000_SAVE_AREA;
6774 info_ptr->varargs_size = RS6000_VARARGS_AREA;
6775 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
6776 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
6779 if (TARGET_ALTIVEC_ABI)
6781 info_ptr->vrsave_mask = compute_vrsave_mask ();
6782 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
6786 info_ptr->vrsave_mask = 0;
6787 info_ptr->vrsave_size = 0;
6790 /* Calculate the offsets. */
6798 case ABI_AIX_NODESC:
6800 info_ptr->fp_save_offset = - info_ptr->fp_size;
6801 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
6803 if (TARGET_ALTIVEC_ABI)
6805 info_ptr->vrsave_save_offset
6806 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
6808 /* Align stack so vector save area is on a quadword boundary. */
6809 if (info_ptr->altivec_size != 0)
6810 info_ptr->altivec_padding_size
6811 = 16 - (-info_ptr->vrsave_save_offset % 16);
6813 info_ptr->altivec_padding_size = 0;
6815 info_ptr->altivec_save_offset
6816 = info_ptr->vrsave_save_offset
6817 - info_ptr->altivec_padding_size
6818 - info_ptr->altivec_size;
6820 /* Adjust for AltiVec case. */
6821 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
6824 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
6825 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
6826 info_ptr->lr_save_offset = 2*reg_size;
6830 info_ptr->fp_save_offset = - info_ptr->fp_size;
6831 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
6832 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
6834 if (TARGET_ALTIVEC_ABI)
6836 info_ptr->vrsave_save_offset
6837 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
6839 /* Align stack so vector save area is on a quadword boundary. */
6840 if (info_ptr->altivec_size != 0)
6841 info_ptr->altivec_padding_size
6842 = 16 - (-info_ptr->vrsave_save_offset % 16);
6844 info_ptr->altivec_padding_size = 0;
6846 info_ptr->altivec_save_offset
6847 = info_ptr->vrsave_save_offset
6848 - info_ptr->altivec_padding_size
6849 - info_ptr->altivec_size;
6851 /* Adjust for AltiVec case. */
6852 info_ptr->toc_save_offset
6853 = info_ptr->altivec_save_offset - info_ptr->toc_size;
6856 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
6857 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
6858 info_ptr->lr_save_offset = reg_size;
6862 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
6864 + info_ptr->altivec_size
6865 + info_ptr->altivec_padding_size
6866 + info_ptr->vrsave_size
6870 + info_ptr->vrsave_size
6871 + info_ptr->toc_size,
6872 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
6875 total_raw_size = (info_ptr->vars_size
6876 + info_ptr->parm_size
6877 + info_ptr->save_size
6878 + info_ptr->varargs_size
6879 + info_ptr->fixed_size);
6881 info_ptr->total_size =
6882 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
6884 /* Determine if we need to allocate any stack frame:
6886 For AIX we need to push the stack if a frame pointer is needed
6887 (because the stack might be dynamically adjusted), if we are
6888 debugging, if we make calls, or if the sum of fp_save, gp_save,
6889 and local variables are more than the space needed to save all
6890 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
6891 + 18*8 = 288 (GPR13 reserved).
6893 For V.4 we don't have the stack cushion that AIX uses, but assume
6894 that the debugger can handle stackless frames. */
6896 if (info_ptr->calls_p)
6897 info_ptr->push_p = 1;
6899 else if (abi == ABI_V4)
6900 info_ptr->push_p = (total_raw_size > info_ptr->fixed_size
6901 || info_ptr->calls_p);
6904 info_ptr->push_p = (frame_pointer_needed
6905 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
6906 || ((total_raw_size - info_ptr->fixed_size)
6907 > (TARGET_32BIT ? 220 : 288)));
6909 /* Zero offsets if we're not saving those registers. */
6910 if (info_ptr->fp_size == 0)
6911 info_ptr->fp_save_offset = 0;
6913 if (info_ptr->gp_size == 0)
6914 info_ptr->gp_save_offset = 0;
6916 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
6917 info_ptr->altivec_save_offset = 0;
6919 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
6920 info_ptr->vrsave_save_offset = 0;
6922 if (! info_ptr->lr_save_p)
6923 info_ptr->lr_save_offset = 0;
6925 if (! info_ptr->cr_save_p)
6926 info_ptr->cr_save_offset = 0;
6928 if (! info_ptr->toc_save_p)
6929 info_ptr->toc_save_offset = 0;
6935 debug_stack_info (info)
6936 rs6000_stack_t *info;
6938 const char *abi_string;
6941 info = rs6000_stack_info ();
6943 fprintf (stderr, "\nStack information for function %s:\n",
6944 ((current_function_decl && DECL_NAME (current_function_decl))
6945 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
6950 default: abi_string = "Unknown"; break;
6951 case ABI_NONE: abi_string = "NONE"; break;
6953 case ABI_AIX_NODESC: abi_string = "AIX"; break;
6954 case ABI_DARWIN: abi_string = "Darwin"; break;
6955 case ABI_V4: abi_string = "V.4"; break;
6958 fprintf (stderr, "\tABI = %5s\n", abi_string);
6960 if (TARGET_ALTIVEC_ABI)
6961 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
6963 if (info->first_gp_reg_save != 32)
6964 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
6966 if (info->first_fp_reg_save != 64)
6967 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
6969 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
6970 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
6971 info->first_altivec_reg_save);
6973 if (info->lr_save_p)
6974 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
6976 if (info->cr_save_p)
6977 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
6979 if (info->toc_save_p)
6980 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
6982 if (info->vrsave_mask)
6983 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
6986 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
6989 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
6991 if (info->gp_save_offset)
6992 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
6994 if (info->fp_save_offset)
6995 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
6997 if (info->altivec_save_offset)
6998 fprintf (stderr, "\taltivec_save_offset = %5d\n",
6999 info->altivec_save_offset);
7001 if (info->vrsave_save_offset)
7002 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7003 info->vrsave_save_offset);
7005 if (info->lr_save_offset)
7006 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7008 if (info->cr_save_offset)
7009 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7011 if (info->toc_save_offset)
7012 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7014 if (info->varargs_save_offset)
7015 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7017 if (info->total_size)
7018 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7020 if (info->varargs_size)
7021 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7023 if (info->vars_size)
7024 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7026 if (info->parm_size)
7027 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7029 if (info->fixed_size)
7030 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7033 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7036 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7038 if (info->altivec_size)
7039 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
7041 if (info->vrsave_size)
7042 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
7044 if (info->altivec_padding_size)
7045 fprintf (stderr, "\taltivec_padding_size= %5d\n",
7046 info->altivec_padding_size);
7049 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
7052 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
7055 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
7057 if (info->save_size)
7058 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
7060 if (info->reg_size != 4)
7061 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
7063 fprintf (stderr, "\n");
7067 rs6000_return_addr (count, frame)
7071 /* Currently we don't optimize very well between prolog and body
7072 code and for PIC code the code can be actually quite bad, so
7073 don't try to be too clever here. */
7076 || DEFAULT_ABI == ABI_AIX
7077 || DEFAULT_ABI == ABI_AIX_NODESC)
7079 cfun->machine->ra_needs_full_frame = 1;
7086 plus_constant (copy_to_reg
7087 (gen_rtx_MEM (Pmode,
7088 memory_address (Pmode, frame))),
7089 RETURN_ADDRESS_OFFSET)));
7092 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
7096 rs6000_ra_ever_killed ()
7100 #ifdef ASM_OUTPUT_MI_THUNK
7101 if (current_function_is_thunk)
7104 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
7105 || cfun->machine->ra_needs_full_frame)
7106 return regs_ever_live[LINK_REGISTER_REGNUM];
7108 push_topmost_sequence ();
7110 pop_topmost_sequence ();
7112 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7116 /* Add a REG_MAYBE_DEAD note to the insn. */
7118 rs6000_maybe_dead (insn)
7121 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
7126 /* Emit instructions needed to load the TOC register.
7127 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
7128 a constant pool; or for SVR4 -fpic. */
7131 rs6000_emit_load_toc_table (fromprolog)
7135 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
7137 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
7139 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7141 rtx temp = (fromprolog
7142 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7143 : gen_reg_rtx (Pmode));
7144 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
7145 rs6000_maybe_dead (emit_move_insn (dest, temp));
7147 else if (flag_pic == 2)
7150 rtx tempLR = (fromprolog
7151 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7152 : gen_reg_rtx (Pmode));
7153 rtx temp0 = (fromprolog
7154 ? gen_rtx_REG (Pmode, 0)
7155 : gen_reg_rtx (Pmode));
7158 /* possibly create the toc section */
7159 if (! toc_initialized)
7162 function_section (current_function_decl);
7169 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
7170 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7172 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
7173 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7175 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
7177 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7178 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
7185 static int reload_toc_labelno = 0;
7187 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
7189 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
7190 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7192 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
7195 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7196 rs6000_maybe_dead (emit_move_insn (temp0,
7197 gen_rtx_MEM (Pmode, dest)));
7199 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
7201 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
7203 /* This is for AIX code running in non-PIC ELF. */
7206 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
7207 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7209 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
7210 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
7218 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
7220 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
7225 get_TOC_alias_set ()
7227 static int set = -1;
7229 set = new_alias_set ();
7233 /* This retuns nonzero if the current function uses the TOC. This is
7234 determined by the presence of (unspec ... 7), which is generated by
7235 the various load_toc_* patterns. */
7242 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7245 rtx pat = PATTERN (insn);
7248 if (GET_CODE (pat) == PARALLEL)
7249 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7250 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
7251 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
7258 create_TOC_reference (symbol)
7261 return gen_rtx_PLUS (Pmode,
7262 gen_rtx_REG (Pmode, TOC_REGISTER),
7263 gen_rtx_CONST (Pmode,
7264 gen_rtx_MINUS (Pmode, symbol,
7265 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
7269 /* __throw will restore its own return address to be the same as the
7270 return address of the function that the throw is being made to.
7271 This is unfortunate, because we want to check the original
7272 return address to see if we need to restore the TOC.
7273 So we have to squirrel it away here.
7274 This is used only in compiling __throw and __rethrow.
7276 Most of this code should be removed by CSE. */
7277 static rtx insn_after_throw;
7279 /* This does the saving... */
7281 rs6000_aix_emit_builtin_unwind_init ()
7284 rtx stack_top = gen_reg_rtx (Pmode);
7285 rtx opcode_addr = gen_reg_rtx (Pmode);
7287 insn_after_throw = gen_reg_rtx (SImode);
7289 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
7290 emit_move_insn (stack_top, mem);
7292 mem = gen_rtx_MEM (Pmode,
7293 gen_rtx_PLUS (Pmode, stack_top,
7294 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
7295 emit_move_insn (opcode_addr, mem);
7296 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
7299 /* Emit insns to _restore_ the TOC register, at runtime (specifically
7300 in _eh.o). Only used on AIX.
7302 The idea is that on AIX, function calls look like this:
7303 bl somefunction-trampoline
7307 somefunction-trampoline:
7309 ... load function address in the count register ...
7311 or like this, if the linker determines that this is not a cross-module call
7312 and so the TOC need not be restored:
7315 or like this, if the compiler could determine that this is not a
7318 now, the tricky bit here is that register 2 is saved and restored
7319 by the _linker_, so we can't readily generate debugging information
7320 for it. So we need to go back up the call chain looking at the
7321 insns at return addresses to see which calls saved the TOC register
7322 and so see where it gets restored from.
7324 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
7325 just before the actual epilogue.
7327 On the bright side, this incurs no space or time overhead unless an
7328 exception is thrown, except for the extra code in libgcc.a.
7330 The parameter STACKSIZE is a register containing (at runtime)
7331 the amount to be popped off the stack in addition to the stack frame
7332 of this routine (which will be __throw or __rethrow, and so is
7333 guaranteed to have a stack frame). */
7336 rs6000_emit_eh_toc_restore (stacksize)
7340 rtx bottom_of_stack = gen_reg_rtx (Pmode);
7341 rtx tocompare = gen_reg_rtx (SImode);
7342 rtx opcode = gen_reg_rtx (SImode);
7343 rtx opcode_addr = gen_reg_rtx (Pmode);
7345 rtx loop_start = gen_label_rtx ();
7346 rtx no_toc_restore_needed = gen_label_rtx ();
7347 rtx loop_exit = gen_label_rtx ();
7349 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
7350 set_mem_alias_set (mem, rs6000_sr_alias_set);
7351 emit_move_insn (bottom_of_stack, mem);
7353 top_of_stack = expand_binop (Pmode, add_optab,
7354 bottom_of_stack, stacksize,
7355 NULL_RTX, 1, OPTAB_WIDEN);
7357 emit_move_insn (tocompare,
7358 GEN_INT (trunc_int_for_mode (TARGET_32BIT
7360 : 0xE8410028, SImode)));
7362 if (insn_after_throw == NULL_RTX)
7364 emit_move_insn (opcode, insn_after_throw);
7366 emit_note (NULL, NOTE_INSN_LOOP_BEG);
7367 emit_label (loop_start);
7369 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
7370 SImode, NULL_RTX, NULL_RTX,
7371 no_toc_restore_needed);
7373 mem = gen_rtx_MEM (Pmode,
7374 gen_rtx_PLUS (Pmode, bottom_of_stack,
7375 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
7376 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
7378 emit_label (no_toc_restore_needed);
7379 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
7380 Pmode, NULL_RTX, NULL_RTX,
7383 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
7384 set_mem_alias_set (mem, rs6000_sr_alias_set);
7385 emit_move_insn (bottom_of_stack, mem);
7387 mem = gen_rtx_MEM (Pmode,
7388 gen_rtx_PLUS (Pmode, bottom_of_stack,
7389 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
7390 emit_move_insn (opcode_addr, mem);
7391 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
7393 emit_note (NULL, NOTE_INSN_LOOP_CONT);
7394 emit_jump (loop_start);
7395 emit_note (NULL, NOTE_INSN_LOOP_END);
7396 emit_label (loop_exit);
7398 #endif /* TARGET_AIX */
7400 /* This ties together stack memory (MEM with an alias set of
7401 rs6000_sr_alias_set) and the change to the stack pointer. */
7404 rs6000_emit_stack_tie ()
7406 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
7408 set_mem_alias_set (mem, rs6000_sr_alias_set);
7409 emit_insn (gen_stack_tie (mem));
7412 /* Emit the correct code for allocating stack space, as insns.
7413 If COPY_R12, make sure a copy of the old frame is left in r12.
7414 The generated code may use hard register 0 as a temporary. */
7417 rs6000_emit_allocate_stack (size, copy_r12)
7422 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7423 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
7424 rtx todec = GEN_INT (-size);
7426 if (current_function_limit_stack)
7428 if (REG_P (stack_limit_rtx)
7429 && REGNO (stack_limit_rtx) > 1
7430 && REGNO (stack_limit_rtx) <= 31)
7432 emit_insn (Pmode == SImode
7433 ? gen_addsi3 (tmp_reg,
7436 : gen_adddi3 (tmp_reg,
7440 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
7443 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
7445 && DEFAULT_ABI == ABI_V4)
7447 rtx toload = gen_rtx_CONST (VOIDmode,
7448 gen_rtx_PLUS (Pmode,
7452 emit_insn (gen_elf_high (tmp_reg, toload));
7453 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
7454 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
7458 warning ("stack limit expression is not supported");
7461 if (copy_r12 || ! TARGET_UPDATE)
7462 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
7468 /* Need a note here so that try_split doesn't get confused. */
7469 if (get_last_insn() == NULL_RTX)
7470 emit_note (0, NOTE_INSN_DELETED);
7471 insn = emit_move_insn (tmp_reg, todec);
7472 try_split (PATTERN (insn), insn, 0);
7476 if (Pmode == SImode)
7477 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
7480 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
7485 if (Pmode == SImode)
7486 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
7488 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
7489 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
7490 gen_rtx_REG (Pmode, 12));
7493 RTX_FRAME_RELATED_P (insn) = 1;
7495 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7496 gen_rtx_SET (VOIDmode, stack_reg,
7497 gen_rtx_PLUS (Pmode, stack_reg,
7502 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
7503 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
7504 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
7505 deduce these equivalences by itself so it wasn't necessary to hold
7506 its hand so much. */
7509 rs6000_frame_related (insn, reg, val, reg2, rreg)
7518 real = copy_rtx (PATTERN (insn));
7520 real = replace_rtx (real, reg,
7521 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
7522 STACK_POINTER_REGNUM),
7525 /* We expect that 'real' is either a SET or a PARALLEL containing
7526 SETs (and possibly other stuff). In a PARALLEL, all the SETs
7527 are important so they all have to be marked RTX_FRAME_RELATED_P. */
7529 if (GET_CODE (real) == SET)
7533 temp = simplify_rtx (SET_SRC (set));
7535 SET_SRC (set) = temp;
7536 temp = simplify_rtx (SET_DEST (set));
7538 SET_DEST (set) = temp;
7539 if (GET_CODE (SET_DEST (set)) == MEM)
7541 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
7543 XEXP (SET_DEST (set), 0) = temp;
7546 else if (GET_CODE (real) == PARALLEL)
7549 for (i = 0; i < XVECLEN (real, 0); i++)
7550 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
7552 rtx set = XVECEXP (real, 0, i);
7554 temp = simplify_rtx (SET_SRC (set));
7556 SET_SRC (set) = temp;
7557 temp = simplify_rtx (SET_DEST (set));
7559 SET_DEST (set) = temp;
7560 if (GET_CODE (SET_DEST (set)) == MEM)
7562 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
7564 XEXP (SET_DEST (set), 0) = temp;
7566 RTX_FRAME_RELATED_P (set) = 1;
7572 if (reg2 != NULL_RTX)
7573 real = replace_rtx (real, reg2, rreg);
7575 RTX_FRAME_RELATED_P (insn) = 1;
7576 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7581 /* Returns an insn that has a vrsave set operation with the
7582 appropriate CLOBBERs. */
7585 generate_set_vrsave (reg, info)
7587 rs6000_stack_t *info;
7590 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
7592 clobs[0] = gen_set_vrsave (reg);
7596 /* CLOBBER the registers in the mask. */
7598 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7599 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
7600 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
7601 gen_rtx_REG (V4SImode, i));
7603 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
7605 for (i = 0; i < nclobs; ++i)
7606 XVECEXP (insn, 0, i) = clobs[i];
7611 /* Emit function prologue as insns. */
7614 rs6000_emit_prologue ()
7616 rs6000_stack_t *info = rs6000_stack_info ();
7617 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
7618 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7619 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7620 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
7621 rtx frame_reg_rtx = sp_reg_rtx;
7622 rtx cr_save_rtx = NULL;
7624 int saving_FPRs_inline;
7625 int using_store_multiple;
7626 HOST_WIDE_INT sp_offset = 0;
7628 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
7629 && info->first_gp_reg_save < 31);
7630 saving_FPRs_inline = (info->first_fp_reg_save == 64
7631 || FP_SAVE_INLINE (info->first_fp_reg_save));
7633 /* For V.4, update stack before we do any saving and set back pointer. */
7634 if (info->push_p && DEFAULT_ABI == ABI_V4)
7636 if (info->total_size < 32767)
7637 sp_offset = info->total_size;
7639 frame_reg_rtx = frame_ptr_rtx;
7640 rs6000_emit_allocate_stack (info->total_size,
7641 (frame_reg_rtx != sp_reg_rtx
7644 || info->first_fp_reg_save < 64
7645 || info->first_gp_reg_save < 32
7647 if (frame_reg_rtx != sp_reg_rtx)
7648 rs6000_emit_stack_tie ();
7651 /* If we use the link register, get it into r0. */
7652 if (info->lr_save_p)
7653 emit_move_insn (gen_rtx_REG (Pmode, 0),
7654 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
7656 /* If we need to save CR, put it into r12. */
7657 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
7659 cr_save_rtx = gen_rtx_REG (SImode, 12);
7660 emit_insn (gen_movesi_from_cr (cr_save_rtx));
7663 /* Do any required saving of fpr's. If only one or two to save, do
7664 it ourselves. Otherwise, call function. */
7665 if (saving_FPRs_inline)
7668 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
7669 if ((regs_ever_live[info->first_fp_reg_save+i]
7670 && ! call_used_regs[info->first_fp_reg_save+i]))
7673 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
7674 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7675 GEN_INT (info->fp_save_offset
7678 mem = gen_rtx_MEM (DFmode, addr);
7679 set_mem_alias_set (mem, rs6000_sr_alias_set);
7681 insn = emit_move_insn (mem, reg);
7682 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7683 NULL_RTX, NULL_RTX);
7686 else if (info->first_fp_reg_save != 64)
7690 const char *alloc_rname;
7692 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
7694 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
7696 LINK_REGISTER_REGNUM));
7697 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
7698 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
7699 alloc_rname = ggc_strdup (rname);
7700 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
7701 gen_rtx_SYMBOL_REF (Pmode,
7703 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
7706 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
7707 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7708 GEN_INT (info->fp_save_offset
7709 + sp_offset + 8*i));
7710 mem = gen_rtx_MEM (DFmode, addr);
7711 set_mem_alias_set (mem, rs6000_sr_alias_set);
7713 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
7715 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
7716 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7717 NULL_RTX, NULL_RTX);
7720 /* Save GPRs. This is done as a PARALLEL if we are using
7721 the store-multiple instructions. */
7722 if (using_store_multiple)
7726 p = rtvec_alloc (32 - info->first_gp_reg_save);
7727 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
7728 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
7731 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
7732 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7733 GEN_INT (info->gp_save_offset
7736 mem = gen_rtx_MEM (reg_mode, addr);
7737 set_mem_alias_set (mem, rs6000_sr_alias_set);
7739 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
7741 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
7742 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7743 NULL_RTX, NULL_RTX);
7748 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
7749 if ((regs_ever_live[info->first_gp_reg_save+i]
7750 && ! call_used_regs[info->first_gp_reg_save+i])
7751 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
7752 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7753 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
7756 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
7757 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7758 GEN_INT (info->gp_save_offset
7761 mem = gen_rtx_MEM (reg_mode, addr);
7762 set_mem_alias_set (mem, rs6000_sr_alias_set);
7764 insn = emit_move_insn (mem, reg);
7765 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7766 NULL_RTX, NULL_RTX);
7770 /* ??? There's no need to emit actual instructions here, but it's the
7771 easiest way to get the frame unwind information emitted. */
7772 if (current_function_calls_eh_return)
7774 unsigned int i, regno;
7780 regno = EH_RETURN_DATA_REGNO (i);
7781 if (regno == INVALID_REGNUM)
7784 reg = gen_rtx_REG (reg_mode, regno);
7785 addr = plus_constant (frame_reg_rtx,
7786 info->ehrd_offset + sp_offset
7787 + reg_size * (int) i);
7788 mem = gen_rtx_MEM (reg_mode, addr);
7789 set_mem_alias_set (mem, rs6000_sr_alias_set);
7791 insn = emit_move_insn (mem, reg);
7792 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7793 NULL_RTX, NULL_RTX);
7797 /* Save lr if we used it. */
7798 if (info->lr_save_p)
7800 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7801 GEN_INT (info->lr_save_offset + sp_offset));
7802 rtx reg = gen_rtx_REG (Pmode, 0);
7803 rtx mem = gen_rtx_MEM (Pmode, addr);
7804 /* This should not be of rs6000_sr_alias_set, because of
7805 __builtin_return_address. */
7807 insn = emit_move_insn (mem, reg);
7808 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7809 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
7812 /* Save CR if we use any that must be preserved. */
7813 if (info->cr_save_p)
7815 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7816 GEN_INT (info->cr_save_offset + sp_offset));
7817 rtx mem = gen_rtx_MEM (SImode, addr);
7819 set_mem_alias_set (mem, rs6000_sr_alias_set);
7821 /* If r12 was used to hold the original sp, copy cr into r0 now
7823 if (REGNO (frame_reg_rtx) == 12)
7825 cr_save_rtx = gen_rtx_REG (SImode, 0);
7826 emit_insn (gen_movesi_from_cr (cr_save_rtx));
7828 insn = emit_move_insn (mem, cr_save_rtx);
7830 /* Now, there's no way that dwarf2out_frame_debug_expr is going
7831 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
7832 OK. All we have to do is specify that _one_ condition code
7833 register is saved in this stack slot. The thrower's epilogue
7834 will then restore all the call-saved registers.
7835 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
7836 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7837 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
7840 /* Update stack and set back pointer unless this is V.4,
7841 for which it was done previously. */
7842 if (info->push_p && DEFAULT_ABI != ABI_V4)
7843 rs6000_emit_allocate_stack (info->total_size, FALSE);
7845 /* Save AltiVec registers if needed. */
7846 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
7850 /* There should be a non inline version of this, for when we
7851 are saving lots of vector registers. */
7852 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
7853 if (regs_ever_live[i] && ! call_used_regs[i])
7855 rtx addr, areg, savereg, mem;
7857 savereg = gen_rtx_REG (V4SImode, i);
7859 areg = gen_rtx_REG (Pmode, 0);
7861 (areg, GEN_INT (info->altivec_save_offset
7863 + 16 * (i - info->first_altivec_reg_save)));
7865 /* AltiVec addressing mode is [reg+reg]. */
7866 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
7867 mem = gen_rtx_MEM (V4SImode, addr);
7868 set_mem_alias_set (mem, rs6000_sr_alias_set);
7870 insn = emit_move_insn (mem, savereg);
7871 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7872 NULL_RTX, NULL_RTX);
7876 /* VRSAVE is a bit vector representing which AltiVec registers
7877 are used. The OS uses this to determine which vector
7878 registers to save on a context switch. We need to save
7879 VRSAVE on the stack frame, add whatever AltiVec registers we
7880 used in this function, and do the corresponding magic in the
7883 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
7887 /* Get VRSAVE onto a GPR. */
7888 reg = gen_rtx_REG (SImode, 12);
7889 emit_insn (gen_get_vrsave (reg));
7892 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7893 GEN_INT (info->vrsave_save_offset + sp_offset));
7894 mem = gen_rtx_MEM (SImode, addr);
7895 set_mem_alias_set (mem, rs6000_sr_alias_set);
7896 insn = emit_move_insn (mem, reg);
7897 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7898 NULL_RTX, NULL_RTX);
7900 /* Include the registers in the mask. */
7901 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
7903 insn = emit_insn (generate_set_vrsave (reg, info));
7905 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7906 NULL_RTX, NULL_RTX);
7909 /* Set frame pointer, if needed. */
7910 if (frame_pointer_needed)
7912 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
7914 RTX_FRAME_RELATED_P (insn) = 1;
7917 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
7918 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
7919 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
7920 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
7922 /* If emit_load_toc_table will use the link register, we need to save
7923 it. We use R11 for this purpose because emit_load_toc_table
7924 can use register 0. This allows us to use a plain 'blr' to return
7925 from the procedure more often. */
7926 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
7927 && ! info->lr_save_p
7928 && EXIT_BLOCK_PTR->pred != NULL);
7929 if (save_LR_around_toc_setup)
7930 emit_move_insn (gen_rtx_REG (Pmode, 11),
7931 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
7933 rs6000_emit_load_toc_table (TRUE);
7935 if (save_LR_around_toc_setup)
7936 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7937 gen_rtx_REG (Pmode, 11));
7940 if (DEFAULT_ABI == ABI_DARWIN
7941 && flag_pic && current_function_uses_pic_offset_table)
7943 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
7945 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
7948 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
7949 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
7953 /* Write function prologue. */
7956 rs6000_output_function_prologue (file, size)
7958 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
7960 rs6000_stack_t *info = rs6000_stack_info ();
7962 if (TARGET_DEBUG_STACK)
7963 debug_stack_info (info);
7965 /* Write .extern for any function we will call to save and restore
7967 if (info->first_fp_reg_save < 64
7968 && !FP_SAVE_INLINE (info->first_fp_reg_save))
7969 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
7970 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
7971 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
7974 /* Write .extern for AIX common mode routines, if needed. */
7975 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
7977 fputs ("\t.extern __mulh\n", file);
7978 fputs ("\t.extern __mull\n", file);
7979 fputs ("\t.extern __divss\n", file);
7980 fputs ("\t.extern __divus\n", file);
7981 fputs ("\t.extern __quoss\n", file);
7982 fputs ("\t.extern __quous\n", file);
7983 common_mode_defined = 1;
7986 if (! HAVE_prologue)
7990 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
7991 the "toplevel" insn chain. */
7992 emit_note (0, NOTE_INSN_DELETED);
7993 rs6000_emit_prologue ();
7994 emit_note (0, NOTE_INSN_DELETED);
7996 if (TARGET_DEBUG_STACK)
7997 debug_rtx_list (get_insns (), 100);
7998 final (get_insns (), file, FALSE, FALSE);
8002 rs6000_pic_labelno++;
8005 /* Emit function epilogue as insns.
8007 At present, dwarf2out_frame_debug_expr doesn't understand
8008 register restores, so we don't bother setting RTX_FRAME_RELATED_P
8009 anywhere in the epilogue. Most of the insns below would in any case
8010 need special notes to explain where r11 is in relation to the stack. */
8013 rs6000_emit_epilogue (sibcall)
8016 rs6000_stack_t *info;
8017 int restoring_FPRs_inline;
8018 int using_load_multiple;
8019 int using_mfcr_multiple;
8020 int use_backchain_to_restore_sp;
8022 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
8023 rtx frame_reg_rtx = sp_reg_rtx;
8024 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8025 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8028 info = rs6000_stack_info ();
8029 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8030 && info->first_gp_reg_save < 31);
8031 restoring_FPRs_inline = (sibcall
8032 || current_function_calls_eh_return
8033 || info->first_fp_reg_save == 64
8034 || FP_SAVE_INLINE (info->first_fp_reg_save));
8035 use_backchain_to_restore_sp = (frame_pointer_needed
8036 || current_function_calls_alloca
8037 || info->total_size > 32767);
8038 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
8039 || rs6000_cpu == PROCESSOR_PPC603
8040 || rs6000_cpu == PROCESSOR_PPC750
8043 /* If we have a frame pointer, a call to alloca, or a large stack
8044 frame, restore the old stack pointer using the backchain. Otherwise,
8045 we know what size to update it with. */
8046 if (use_backchain_to_restore_sp)
8048 /* Under V.4, don't reset the stack pointer until after we're done
8049 loading the saved registers. */
8050 if (DEFAULT_ABI == ABI_V4)
8051 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
8053 emit_move_insn (frame_reg_rtx,
8054 gen_rtx_MEM (Pmode, sp_reg_rtx));
8057 else if (info->push_p)
8059 if (DEFAULT_ABI == ABI_V4)
8060 sp_offset = info->total_size;
8063 emit_insn (TARGET_32BIT
8064 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8065 GEN_INT (info->total_size))
8066 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8067 GEN_INT (info->total_size)));
8071 /* Get the old lr if we saved it. */
8072 if (info->lr_save_p)
8074 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8075 GEN_INT (info->lr_save_offset + sp_offset));
8076 rtx mem = gen_rtx_MEM (Pmode, addr);
8078 set_mem_alias_set (mem, rs6000_sr_alias_set);
8080 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
8083 /* Get the old cr if we saved it. */
8084 if (info->cr_save_p)
8086 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8087 GEN_INT (info->cr_save_offset + sp_offset));
8088 rtx mem = gen_rtx_MEM (SImode, addr);
8090 set_mem_alias_set (mem, rs6000_sr_alias_set);
8092 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
8095 /* Set LR here to try to overlap restores below. */
8096 if (info->lr_save_p)
8097 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8098 gen_rtx_REG (Pmode, 0));
8100 /* Load exception handler data registers, if needed. */
8101 if (current_function_calls_eh_return)
8103 unsigned int i, regno;
8109 regno = EH_RETURN_DATA_REGNO (i);
8110 if (regno == INVALID_REGNUM)
8113 addr = plus_constant (frame_reg_rtx,
8114 info->ehrd_offset + sp_offset
8115 + reg_size * (int) i);
8116 mem = gen_rtx_MEM (reg_mode, addr);
8117 set_mem_alias_set (mem, rs6000_sr_alias_set);
8119 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
8123 /* Restore GPRs. This is done as a PARALLEL if we are using
8124 the load-multiple instructions. */
8125 if (using_load_multiple)
8128 p = rtvec_alloc (32 - info->first_gp_reg_save);
8129 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8131 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8132 GEN_INT (info->gp_save_offset
8135 rtx mem = gen_rtx_MEM (reg_mode, addr);
8137 set_mem_alias_set (mem, rs6000_sr_alias_set);
8140 gen_rtx_SET (VOIDmode,
8141 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
8144 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8147 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8148 if ((regs_ever_live[info->first_gp_reg_save+i]
8149 && ! call_used_regs[info->first_gp_reg_save+i])
8150 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8151 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8152 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8154 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8155 GEN_INT (info->gp_save_offset
8158 rtx mem = gen_rtx_MEM (reg_mode, addr);
8160 set_mem_alias_set (mem, rs6000_sr_alias_set);
8162 emit_move_insn (gen_rtx_REG (reg_mode,
8163 info->first_gp_reg_save + i),
8167 /* Restore fpr's if we need to do it without calling a function. */
8168 if (restoring_FPRs_inline)
8169 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8170 if ((regs_ever_live[info->first_fp_reg_save+i]
8171 && ! call_used_regs[info->first_fp_reg_save+i]))
8174 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8175 GEN_INT (info->fp_save_offset
8178 mem = gen_rtx_MEM (DFmode, addr);
8179 set_mem_alias_set (mem, rs6000_sr_alias_set);
8181 emit_move_insn (gen_rtx_REG (DFmode,
8182 info->first_fp_reg_save + i),
8186 /* Restore AltiVec registers if needed. */
8187 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8191 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8192 if (regs_ever_live[i] && ! call_used_regs[i])
8194 rtx addr, areg, mem;
8196 areg = gen_rtx_REG (Pmode, 0);
8198 (areg, GEN_INT (info->altivec_save_offset
8200 + 16 * (i - info->first_altivec_reg_save)));
8202 /* AltiVec addressing mode is [reg+reg]. */
8203 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
8204 mem = gen_rtx_MEM (V4SImode, addr);
8205 set_mem_alias_set (mem, rs6000_sr_alias_set);
8207 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
8211 /* Restore VRSAVE if needed. */
8212 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
8216 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8217 GEN_INT (info->vrsave_save_offset + sp_offset));
8218 mem = gen_rtx_MEM (SImode, addr);
8219 set_mem_alias_set (mem, rs6000_sr_alias_set);
8220 reg = gen_rtx_REG (SImode, 12);
8221 emit_move_insn (reg, mem);
8223 emit_insn (generate_set_vrsave (reg, info));
8226 /* If we saved cr, restore it here. Just those that were used. */
8227 if (info->cr_save_p)
8229 rtx r12_rtx = gen_rtx_REG (SImode, 12);
8232 if (using_mfcr_multiple)
8234 for (i = 0; i < 8; i++)
8235 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8241 if (using_mfcr_multiple && count > 1)
8246 p = rtvec_alloc (count);
8249 for (i = 0; i < 8; i++)
8250 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8252 rtvec r = rtvec_alloc (2);
8253 RTVEC_ELT (r, 0) = r12_rtx;
8254 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
8255 RTVEC_ELT (p, ndx) =
8256 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
8257 gen_rtx_UNSPEC (CCmode, r, 20));
8260 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8265 for (i = 0; i < 8; i++)
8266 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8268 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
8274 /* If this is V.4, unwind the stack pointer after all of the loads
8275 have been done. We need to emit a block here so that sched
8276 doesn't decide to move the sp change before the register restores
8277 (which may not have any obvious dependency on the stack). This
8278 doesn't hurt performance, because there is no scheduling that can
8279 be done after this point. */
8280 if (DEFAULT_ABI == ABI_V4)
8282 if (frame_reg_rtx != sp_reg_rtx)
8283 rs6000_emit_stack_tie ();
8285 if (use_backchain_to_restore_sp)
8287 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
8289 else if (sp_offset != 0)
8291 emit_insn (Pmode == SImode
8292 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8293 GEN_INT (sp_offset))
8294 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8295 GEN_INT (sp_offset)));
8299 if (current_function_calls_eh_return)
8301 rtx sa = EH_RETURN_STACKADJ_RTX;
8302 emit_insn (Pmode == SImode
8303 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
8304 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
8310 if (! restoring_FPRs_inline)
8311 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
8313 p = rtvec_alloc (2);
8315 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
8316 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8318 LINK_REGISTER_REGNUM));
8320 /* If we have to restore more than two FP registers, branch to the
8321 restore function. It will return to our caller. */
8322 if (! restoring_FPRs_inline)
8326 const char *alloc_rname;
8328 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
8329 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
8330 alloc_rname = ggc_strdup (rname);
8331 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
8332 gen_rtx_SYMBOL_REF (Pmode,
8335 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8338 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
8339 GEN_INT (info->fp_save_offset + 8*i));
8340 mem = gen_rtx_MEM (DFmode, addr);
8341 set_mem_alias_set (mem, rs6000_sr_alias_set);
8343 RTVEC_ELT (p, i+3) =
8344 gen_rtx_SET (VOIDmode,
8345 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
8350 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
8354 /* Write function epilogue. */
8357 rs6000_output_function_epilogue (file, size)
8359 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8361 rs6000_stack_t *info = rs6000_stack_info ();
8363 if (! HAVE_epilogue)
8365 rtx insn = get_last_insn ();
8366 /* If the last insn was a BARRIER, we don't have to write anything except
8368 if (GET_CODE (insn) == NOTE)
8369 insn = prev_nonnote_insn (insn);
8370 if (insn == 0 || GET_CODE (insn) != BARRIER)
8372 /* This is slightly ugly, but at least we don't have two
8373 copies of the epilogue-emitting code. */
8376 /* A NOTE_INSN_DELETED is supposed to be at the start
8377 and end of the "toplevel" insn chain. */
8378 emit_note (0, NOTE_INSN_DELETED);
8379 rs6000_emit_epilogue (FALSE);
8380 emit_note (0, NOTE_INSN_DELETED);
8382 if (TARGET_DEBUG_STACK)
8383 debug_rtx_list (get_insns (), 100);
8384 final (get_insns (), file, FALSE, FALSE);
8389 /* Output a traceback table here. See /usr/include/sys/debug.h for info
8392 We don't output a traceback table if -finhibit-size-directive was
8393 used. The documentation for -finhibit-size-directive reads
8394 ``don't output a @code{.size} assembler directive, or anything
8395 else that would cause trouble if the function is split in the
8396 middle, and the two halves are placed at locations far apart in
8397 memory.'' The traceback table has this property, since it
8398 includes the offset from the start of the function to the
8399 traceback table itself.
8401 System V.4 Powerpc's (and the embedded ABI derived from it) use a
8402 different traceback table. */
8403 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
8405 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
8406 const char *language_string = lang_hooks.name;
8407 int fixed_parms, float_parms, parm_info;
8410 while (*fname == '.') /* V.4 encodes . in the name */
8413 /* Need label immediately before tbtab, so we can compute its offset
8414 from the function start. */
8417 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
8418 ASM_OUTPUT_LABEL (file, fname);
8420 /* The .tbtab pseudo-op can only be used for the first eight
8421 expressions, since it can't handle the possibly variable
8422 length fields that follow. However, if you omit the optional
8423 fields, the assembler outputs zeros for all optional fields
8424 anyways, giving each variable length field is minimum length
8425 (as defined in sys/debug.h). Thus we can not use the .tbtab
8426 pseudo-op at all. */
8428 /* An all-zero word flags the start of the tbtab, for debuggers
8429 that have to find it by searching forward from the entry
8430 point or from the current pc. */
8431 fputs ("\t.long 0\n", file);
8433 /* Tbtab format type. Use format type 0. */
8434 fputs ("\t.byte 0,", file);
8436 /* Language type. Unfortunately, there doesn't seem to be any
8437 official way to get this info, so we use language_string. C
8438 is 0. C++ is 9. No number defined for Obj-C, so use the
8439 value for C for now. There is no official value for Java,
8440 although IBM appears to be using 13. There is no official value
8441 for Chill, so we've chosen 44 pseudo-randomly. */
8442 if (! strcmp (language_string, "GNU C")
8443 || ! strcmp (language_string, "GNU Objective-C"))
8445 else if (! strcmp (language_string, "GNU F77"))
8447 else if (! strcmp (language_string, "GNU Ada"))
8449 else if (! strcmp (language_string, "GNU Pascal"))
8451 else if (! strcmp (language_string, "GNU C++"))
8453 else if (! strcmp (language_string, "GNU Java"))
8455 else if (! strcmp (language_string, "GNU CHILL"))
8459 fprintf (file, "%d,", i);
8461 /* 8 single bit fields: global linkage (not set for C extern linkage,
8462 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
8463 from start of procedure stored in tbtab, internal function, function
8464 has controlled storage, function has no toc, function uses fp,
8465 function logs/aborts fp operations. */
8466 /* Assume that fp operations are used if any fp reg must be saved. */
8467 fprintf (file, "%d,", (1 << 5) | ((info->first_fp_reg_save != 64) << 1));
8469 /* 6 bitfields: function is interrupt handler, name present in
8470 proc table, function calls alloca, on condition directives
8471 (controls stack walks, 3 bits), saves condition reg, saves
8473 /* The `function calls alloca' bit seems to be set whenever reg 31 is
8474 set up as a frame pointer, even when there is no alloca call. */
8475 fprintf (file, "%d,",
8476 ((1 << 6) | (frame_pointer_needed << 5)
8477 | (info->cr_save_p << 1) | (info->lr_save_p)));
8479 /* 3 bitfields: saves backchain, spare bit, number of fpr saved
8481 fprintf (file, "%d,",
8482 (info->push_p << 7) | (64 - info->first_fp_reg_save));
8484 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
8485 fprintf (file, "%d,", (32 - first_reg_to_save ()));
8488 /* Compute the parameter info from the function decl argument
8491 int next_parm_info_bit;
8493 next_parm_info_bit = 31;
8498 for (decl = DECL_ARGUMENTS (current_function_decl);
8499 decl; decl = TREE_CHAIN (decl))
8501 rtx parameter = DECL_INCOMING_RTL (decl);
8502 enum machine_mode mode = GET_MODE (parameter);
8504 if (GET_CODE (parameter) == REG)
8506 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
8514 else if (mode == DFmode)
8519 /* If only one bit will fit, don't or in this entry. */
8520 if (next_parm_info_bit > 0)
8521 parm_info |= (bits << (next_parm_info_bit - 1));
8522 next_parm_info_bit -= 2;
8526 fixed_parms += ((GET_MODE_SIZE (mode)
8527 + (UNITS_PER_WORD - 1))
8529 next_parm_info_bit -= 1;
8535 /* Number of fixed point parameters. */
8536 /* This is actually the number of words of fixed point parameters; thus
8537 an 8 byte struct counts as 2; and thus the maximum value is 8. */
8538 fprintf (file, "%d,", fixed_parms);
8540 /* 2 bitfields: number of floating point parameters (7 bits), parameters
8542 /* This is actually the number of fp registers that hold parameters;
8543 and thus the maximum value is 13. */
8544 /* Set parameters on stack bit if parameters are not in their original
8545 registers, regardless of whether they are on the stack? Xlc
8546 seems to set the bit when not optimizing. */
8547 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
8549 /* Optional fields follow. Some are variable length. */
8551 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
8553 /* There is an entry for each parameter in a register, in the order that
8554 they occur in the parameter list. Any intervening arguments on the
8555 stack are ignored. If the list overflows a long (max possible length
8556 34 bits) then completely leave off all elements that don't fit. */
8557 /* Only emit this long if there was at least one parameter. */
8558 if (fixed_parms || float_parms)
8559 fprintf (file, "\t.long %d\n", parm_info);
8561 /* Offset from start of code to tb table. */
8562 fputs ("\t.long ", file);
8563 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
8565 RS6000_OUTPUT_BASENAME (file, fname);
8567 assemble_name (file, fname);
8571 RS6000_OUTPUT_BASENAME (file, fname);
8573 assemble_name (file, fname);
8577 /* Interrupt handler mask. */
8578 /* Omit this long, since we never set the interrupt handler bit
8581 /* Number of CTL (controlled storage) anchors. */
8582 /* Omit this long, since the has_ctl bit is never set above. */
8584 /* Displacement into stack of each CTL anchor. */
8585 /* Omit this list of longs, because there are no CTL anchors. */
8587 /* Length of function name. */
8588 fprintf (file, "\t.short %d\n", (int) strlen (fname));
8590 /* Function name. */
8591 assemble_string (fname, strlen (fname));
8593 /* Register for alloca automatic storage; this is always reg 31.
8594 Only emit this if the alloca bit was set above. */
8595 if (frame_pointer_needed)
8596 fputs ("\t.byte 31\n", file);
8600 /* A C compound statement that outputs the assembler code for a thunk
8601 function, used to implement C++ virtual function calls with
8602 multiple inheritance. The thunk acts as a wrapper around a virtual
8603 function, adjusting the implicit object parameter before handing
8604 control off to the real function.
8606 First, emit code to add the integer DELTA to the location that
8607 contains the incoming first argument. Assume that this argument
8608 contains a pointer, and is the one used to pass the `this' pointer
8609 in C++. This is the incoming argument *before* the function
8610 prologue, e.g. `%o0' on a sparc. The addition must preserve the
8611 values of all other incoming arguments.
8613 After the addition, emit code to jump to FUNCTION, which is a
8614 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
8615 not touch the return address. Hence returning from FUNCTION will
8616 return to whoever called the current `thunk'.
8618 The effect must be as if FUNCTION had been called directly with the
8619 adjusted first argument. This macro is responsible for emitting
8620 all of the code for a thunk function; output_function_prologue()
8621 and output_function_epilogue() are not invoked.
8623 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
8624 been extracted from it.) It might possibly be useful on some
8625 targets, but probably not.
8627 If you do not define this macro, the target-independent code in the
8628 C++ frontend will generate a less efficient heavyweight thunk that
8629 calls FUNCTION instead of jumping to it. The generic approach does
8630 not support varargs. */
8633 output_mi_thunk (file, thunk_fndecl, delta, function)
8635 tree thunk_fndecl ATTRIBUTE_UNUSED;
8639 const char *this_reg =
8640 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
8643 const char *r0 = reg_names[0];
8644 const char *toc = reg_names[2];
8645 const char *schain = reg_names[11];
8646 const char *r12 = reg_names[12];
8648 static int labelno = 0;
8650 /* Small constants that can be done by one add instruction. */
8651 if (delta >= -32768 && delta <= 32767)
8653 if (! TARGET_NEW_MNEMONICS)
8654 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
8656 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
8659 /* Large constants that can be done by one addis instruction. */
8660 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
8661 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
8664 /* 32-bit constants that can be done by an add and addis instruction. */
8665 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
8667 /* Break into two pieces, propagating the sign bit from the low
8668 word to the upper word. */
8669 int delta_high = delta >> 16;
8670 int delta_low = delta & 0xffff;
8671 if ((delta_low & 0x8000) != 0)
8674 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
8677 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
8680 if (! TARGET_NEW_MNEMONICS)
8681 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
8683 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
8686 /* 64-bit constants, fixme */
8690 /* Get the prefix in front of the names. */
8691 switch (DEFAULT_ABI)
8701 case ABI_AIX_NODESC:
8706 /* If the function is compiled in this module, jump to it directly.
8707 Otherwise, load up its address and jump to it. */
8709 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
8711 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
8712 && ! lookup_attribute ("longcall",
8713 TYPE_ATTRIBUTES (TREE_TYPE (function))))
8715 fprintf (file, "\tb %s", prefix);
8716 assemble_name (file, fname);
8717 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
8723 switch (DEFAULT_ABI)
8729 /* Set up a TOC entry for the function. */
8730 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
8732 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
8735 if (TARGET_MINIMAL_TOC)
8736 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
8739 fputs ("\t.tc ", file);
8740 assemble_name (file, fname);
8741 fputs ("[TC],", file);
8743 assemble_name (file, fname);
8746 if (TARGET_MINIMAL_TOC)
8747 asm_fprintf (file, (TARGET_32BIT)
8748 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
8749 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
8750 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
8751 assemble_name (file, buf);
8752 if (TARGET_ELF && TARGET_MINIMAL_TOC)
8753 fputs ("-(.LCTOC1)", file);
8754 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
8756 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
8760 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
8763 asm_fprintf (file, "\tmtctr %s\n", r0);
8765 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
8768 asm_fprintf (file, "\tbctr\n");
8771 case ABI_AIX_NODESC:
8773 fprintf (file, "\tb %s", prefix);
8774 assemble_name (file, fname);
8775 if (flag_pic) fputs ("@plt", file);
8781 fprintf (file, "\tb %s", prefix);
8782 if (flag_pic && !machopic_name_defined_p (fname))
8783 assemble_name (file, machopic_stub_name (fname));
8785 assemble_name (file, fname);
8794 /* A quick summary of the various types of 'constant-pool tables'
8797 Target Flags Name One table per
8798 AIX (none) AIX TOC object file
8799 AIX -mfull-toc AIX TOC object file
8800 AIX -mminimal-toc AIX minimal TOC translation unit
8801 SVR4/EABI (none) SVR4 SDATA object file
8802 SVR4/EABI -fpic SVR4 pic object file
8803 SVR4/EABI -fPIC SVR4 PIC translation unit
8804 SVR4/EABI -mrelocatable EABI TOC function
8805 SVR4/EABI -maix AIX TOC object file
8806 SVR4/EABI -maix -mminimal-toc
8807 AIX minimal TOC translation unit
8809 Name Reg. Set by entries contains:
8810 made by addrs? fp? sum?
8812 AIX TOC 2 crt0 as Y option option
8813 AIX minimal TOC 30 prolog gcc Y Y option
8814 SVR4 SDATA 13 crt0 gcc N Y N
8815 SVR4 pic 30 prolog ld Y not yet N
8816 SVR4 PIC 30 prolog gcc Y option option
8817 EABI TOC 30 prolog gcc Y option option
8821 /* Hash table stuff for keeping track of TOC entries. */
8823 struct toc_hash_struct
8825 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
8826 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
8828 enum machine_mode key_mode;
8832 static htab_t toc_hash_table;
8834 /* Hash functions for the hash table. */
8837 rs6000_hash_constant (k)
8840 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
8841 const char *format = GET_RTX_FORMAT (GET_CODE (k));
8842 int flen = strlen (format);
8845 if (GET_CODE (k) == LABEL_REF)
8846 return result * 1231 + X0INT (XEXP (k, 0), 3);
8848 if (GET_CODE (k) == CONST_DOUBLE)
8850 else if (GET_CODE (k) == CODE_LABEL)
8855 for (; fidx < flen; fidx++)
8856 switch (format[fidx])
8861 const char *str = XSTR (k, fidx);
8863 result = result * 613 + len;
8864 for (i = 0; i < len; i++)
8865 result = result * 613 + (unsigned) str[i];
8870 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
8874 result = result * 613 + (unsigned) XINT (k, fidx);
8877 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
8878 result = result * 613 + (unsigned) XWINT (k, fidx);
8882 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
8883 result = result * 613 + (unsigned) (XWINT (k, fidx)
8894 toc_hash_function (hash_entry)
8895 const void * hash_entry;
8897 const struct toc_hash_struct *thc =
8898 (const struct toc_hash_struct *) hash_entry;
8899 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
8902 /* Compare H1 and H2 for equivalence. */
8905 toc_hash_eq (h1, h2)
8909 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
8910 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
8912 if (((const struct toc_hash_struct *) h1)->key_mode
8913 != ((const struct toc_hash_struct *) h2)->key_mode)
8916 /* Gotcha: One of these const_doubles will be in memory.
8917 The other may be on the constant-pool chain.
8918 So rtx_equal_p will think they are different... */
8921 if (GET_CODE (r1) != GET_CODE (r2)
8922 || GET_MODE (r1) != GET_MODE (r2))
8924 if (GET_CODE (r1) == CONST_DOUBLE)
8926 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
8928 for (i = 1; i < format_len; i++)
8929 if (XWINT (r1, i) != XWINT (r2, i))
8934 else if (GET_CODE (r1) == LABEL_REF)
8935 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
8936 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
8938 return rtx_equal_p (r1, r2);
8941 /* Mark the hash table-entry HASH_ENTRY. */
8944 toc_hash_mark_entry (hash_slot, unused)
8946 void * unused ATTRIBUTE_UNUSED;
8948 const struct toc_hash_struct * hash_entry =
8949 *(const struct toc_hash_struct **) hash_slot;
8950 rtx r = hash_entry->key;
8951 ggc_set_mark (hash_entry);
8952 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
8953 if (GET_CODE (r) == LABEL_REF)
8956 ggc_set_mark (XEXP (r, 0));
8963 /* Mark all the elements of the TOC hash-table *HT. */
8966 toc_hash_mark_table (vht)
8971 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
8974 /* These are the names given by the C++ front-end to vtables, and
8975 vtable-like objects. Ideally, this logic should not be here;
8976 instead, there should be some programmatic way of inquiring as
8977 to whether or not an object is a vtable. */
8979 #define VTABLE_NAME_P(NAME) \
8980 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
8981 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
8982 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
8983 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
8986 rs6000_output_symbol_ref (file, x)
8990 /* Currently C++ toc references to vtables can be emitted before it
8991 is decided whether the vtable is public or private. If this is
8992 the case, then the linker will eventually complain that there is
8993 a reference to an unknown section. Thus, for vtables only,
8994 we emit the TOC reference to reference the symbol and not the
8996 const char *name = XSTR (x, 0);
8998 if (VTABLE_NAME_P (name))
9000 RS6000_OUTPUT_BASENAME (file, name);
9003 assemble_name (file, name);
9006 /* Output a TOC entry. We derive the entry name from what is being
9010 output_toc (file, x, labelno, mode)
9014 enum machine_mode mode;
9017 const char *name = buf;
9018 const char *real_name;
9025 /* When the linker won't eliminate them, don't output duplicate
9026 TOC entries (this happens on AIX if there is any kind of TOC,
9027 and on SVR4 under -fPIC or -mrelocatable). */
9030 struct toc_hash_struct *h;
9033 h = ggc_alloc (sizeof (*h));
9036 h->labelno = labelno;
9038 found = htab_find_slot (toc_hash_table, h, 1);
9041 else /* This is indeed a duplicate.
9042 Set this label equal to that label. */
9044 fputs ("\t.set ", file);
9045 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9046 fprintf (file, "%d,", labelno);
9047 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9048 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9054 /* If we're going to put a double constant in the TOC, make sure it's
9055 aligned properly when strict alignment is on. */
9056 if (GET_CODE (x) == CONST_DOUBLE
9058 && GET_MODE_BITSIZE (mode) >= 64
9059 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
9060 ASM_OUTPUT_ALIGN (file, 3);
9063 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
9065 /* Handle FP constants specially. Note that if we have a minimal
9066 TOC, things we put here aren't actually in the TOC, so we can allow
9068 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
9073 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9074 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
9078 if (TARGET_MINIMAL_TOC)
9079 fputs (DOUBLE_INT_ASM_OP, file);
9081 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9082 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
9087 if (TARGET_MINIMAL_TOC)
9088 fputs ("\t.long ", file);
9090 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9091 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
9095 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
9100 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9101 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
9105 if (TARGET_MINIMAL_TOC)
9106 fputs (DOUBLE_INT_ASM_OP, file);
9108 fprintf (file, "\t.tc FS_%lx[TC],", l);
9109 fprintf (file, "0x%lx00000000\n", l);
9114 if (TARGET_MINIMAL_TOC)
9115 fputs ("\t.long ", file);
9117 fprintf (file, "\t.tc FS_%lx[TC],", l);
9118 fprintf (file, "0x%lx\n", l);
9122 else if (GET_MODE (x) == VOIDmode
9123 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
9125 unsigned HOST_WIDE_INT low;
9128 if (GET_CODE (x) == CONST_DOUBLE)
9130 low = CONST_DOUBLE_LOW (x);
9131 high = CONST_DOUBLE_HIGH (x);
9134 #if HOST_BITS_PER_WIDE_INT == 32
9137 high = (low & 0x80000000) ? ~0 : 0;
9141 low = INTVAL (x) & 0xffffffff;
9142 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
9146 /* TOC entries are always Pmode-sized, but since this
9147 is a bigendian machine then if we're putting smaller
9148 integer constants in the TOC we have to pad them.
9149 (This is still a win over putting the constants in
9150 a separate constant pool, because then we'd have
9151 to have both a TOC entry _and_ the actual constant.)
9153 For a 32-bit target, CONST_INT values are loaded and shifted
9154 entirely within `low' and can be stored in one TOC entry. */
9156 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
9157 abort ();/* It would be easy to make this work, but it doesn't now. */
9159 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
9160 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
9161 POINTER_SIZE, &low, &high, 0);
9165 if (TARGET_MINIMAL_TOC)
9166 fputs (DOUBLE_INT_ASM_OP, file);
9168 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long)high, (long)low);
9169 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
9174 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
9176 if (TARGET_MINIMAL_TOC)
9177 fputs ("\t.long ", file);
9179 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
9180 (long)high, (long)low);
9181 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
9185 if (TARGET_MINIMAL_TOC)
9186 fputs ("\t.long ", file);
9188 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
9189 fprintf (file, "0x%lx\n", (long) low);
9195 if (GET_CODE (x) == CONST)
9197 if (GET_CODE (XEXP (x, 0)) != PLUS)
9200 base = XEXP (XEXP (x, 0), 0);
9201 offset = INTVAL (XEXP (XEXP (x, 0), 1));
9204 if (GET_CODE (base) == SYMBOL_REF)
9205 name = XSTR (base, 0);
9206 else if (GET_CODE (base) == LABEL_REF)
9207 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
9208 else if (GET_CODE (base) == CODE_LABEL)
9209 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
9213 STRIP_NAME_ENCODING (real_name, name);
9214 if (TARGET_MINIMAL_TOC)
9215 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9218 fprintf (file, "\t.tc %s", real_name);
9221 fprintf (file, ".N%d", - offset);
9223 fprintf (file, ".P%d", offset);
9225 fputs ("[TC],", file);
9228 /* Currently C++ toc references to vtables can be emitted before it
9229 is decided whether the vtable is public or private. If this is
9230 the case, then the linker will eventually complain that there is
9231 a TOC reference to an unknown section. Thus, for vtables only,
9232 we emit the TOC reference to reference the symbol and not the
9234 if (VTABLE_NAME_P (name))
9236 RS6000_OUTPUT_BASENAME (file, name);
9238 fprintf (file, "%d", offset);
9239 else if (offset > 0)
9240 fprintf (file, "+%d", offset);
9243 output_addr_const (file, x);
9247 /* Output an assembler pseudo-op to write an ASCII string of N characters
9248 starting at P to FILE.
9250 On the RS/6000, we have to do this using the .byte operation and
9251 write out special characters outside the quoted string.
9252 Also, the assembler is broken; very long strings are truncated,
9253 so we must artificially break them up early. */
9256 output_ascii (file, p, n)
9262 int i, count_string;
9263 const char *for_string = "\t.byte \"";
9264 const char *for_decimal = "\t.byte ";
9265 const char *to_close = NULL;
9268 for (i = 0; i < n; i++)
9271 if (c >= ' ' && c < 0177)
9274 fputs (for_string, file);
9277 /* Write two quotes to get one. */
9285 for_decimal = "\"\n\t.byte ";
9289 if (count_string >= 512)
9291 fputs (to_close, file);
9293 for_string = "\t.byte \"";
9294 for_decimal = "\t.byte ";
9302 fputs (for_decimal, file);
9303 fprintf (file, "%d", c);
9305 for_string = "\n\t.byte \"";
9312 /* Now close the string if we have written one. Then end the line. */
9314 fputs (to_close, file);
9317 /* Generate a unique section name for FILENAME for a section type
9318 represented by SECTION_DESC. Output goes into BUF.
9320 SECTION_DESC can be any string, as long as it is different for each
9321 possible section type.
9323 We name the section in the same manner as xlc. The name begins with an
9324 underscore followed by the filename (after stripping any leading directory
9325 names) with the last period replaced by the string SECTION_DESC. If
9326 FILENAME does not contain a period, SECTION_DESC is appended to the end of
9330 rs6000_gen_section_name (buf, filename, section_desc)
9332 const char *filename;
9333 const char *section_desc;
9335 const char *q, *after_last_slash, *last_period = 0;
9339 after_last_slash = filename;
9340 for (q = filename; *q; q++)
9343 after_last_slash = q + 1;
9348 len = strlen (after_last_slash) + strlen (section_desc) + 2;
9349 *buf = (char *) permalloc (len);
9354 for (q = after_last_slash; *q; q++)
9356 if (q == last_period)
9358 strcpy (p, section_desc);
9359 p += strlen (section_desc);
9362 else if (ISALNUM (*q))
9366 if (last_period == 0)
9367 strcpy (p, section_desc);
9372 /* Emit profile function. */
9375 output_profile_hook (labelno)
9378 if (DEFAULT_ABI == ABI_AIX)
9381 const char *label_name;
9386 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
9387 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
9388 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
9390 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
9393 else if (DEFAULT_ABI == ABI_DARWIN)
9395 const char *mcount_name = RS6000_MCOUNT;
9396 int caller_addr_regno = LINK_REGISTER_REGNUM;
9398 /* Be conservative and always set this, at least for now. */
9399 current_function_uses_pic_offset_table = 1;
9402 /* For PIC code, set up a stub and collect the caller's address
9403 from r0, which is where the prologue puts it. */
9406 mcount_name = machopic_stub_name (mcount_name);
9407 if (current_function_uses_pic_offset_table)
9408 caller_addr_regno = 0;
9411 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
9413 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
9417 /* Write function profiler code. */
9420 output_function_profiler (file, labelno)
9426 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
9427 switch (DEFAULT_ABI)
9433 case ABI_AIX_NODESC:
9434 fprintf (file, "\tmflr %s\n", reg_names[0]);
9437 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
9438 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9439 reg_names[0], reg_names[1]);
9440 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
9441 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
9442 assemble_name (file, buf);
9443 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
9445 else if (flag_pic > 1)
9447 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9448 reg_names[0], reg_names[1]);
9449 /* Now, we need to get the address of the label. */
9450 fputs ("\tbl 1f\n\t.long ", file);
9451 assemble_name (file, buf);
9452 fputs ("-.\n1:", file);
9453 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
9454 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9455 reg_names[0], reg_names[11]);
9456 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
9457 reg_names[0], reg_names[0], reg_names[11]);
9461 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
9462 assemble_name (file, buf);
9463 fputs ("@ha\n", file);
9464 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9465 reg_names[0], reg_names[1]);
9466 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
9467 assemble_name (file, buf);
9468 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
9471 if (current_function_needs_context)
9472 asm_fprintf (file, "\tmr %s,%s\n",
9473 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
9474 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
9475 if (current_function_needs_context)
9476 asm_fprintf (file, "\tmr %s,%s\n",
9477 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
9482 /* Don't do anything, done in output_profile_hook (). */
9488 /* Adjust the cost of a scheduling dependency. Return the new cost of
9489 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
9492 rs6000_adjust_cost (insn, link, dep_insn, cost)
9495 rtx dep_insn ATTRIBUTE_UNUSED;
9498 if (! recog_memoized (insn))
9501 if (REG_NOTE_KIND (link) != 0)
9504 if (REG_NOTE_KIND (link) == 0)
9506 /* Data dependency; DEP_INSN writes a register that INSN reads
9507 some cycles later. */
9508 switch (get_attr_type (insn))
9511 /* Tell the first scheduling pass about the latency between
9512 a mtctr and bctr (and mtlr and br/blr). The first
9513 scheduling pass will not know about this latency since
9514 the mtctr instruction, which has the latency associated
9515 to it, will be generated by reload. */
9516 return TARGET_POWER ? 5 : 4;
9518 /* Leave some extra cycles between a compare and its
9519 dependent branch, to inhibit expensive mispredicts. */
9520 if ((rs6000_cpu_attr == CPU_PPC750
9521 || rs6000_cpu_attr == CPU_PPC7400
9522 || rs6000_cpu_attr == CPU_PPC7450)
9523 && recog_memoized (dep_insn)
9524 && (INSN_CODE (dep_insn) >= 0)
9525 && (get_attr_type (dep_insn) == TYPE_COMPARE
9526 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
9527 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
9528 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
9533 /* Fall out to return default cost. */
9539 /* A C statement (sans semicolon) to update the integer scheduling
9540 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
9541 INSN earlier, increase the priority to execute INSN later. Do not
9542 define this macro if you do not need to adjust the scheduling
9543 priorities of insns. */
9546 rs6000_adjust_priority (insn, priority)
9547 rtx insn ATTRIBUTE_UNUSED;
9550 /* On machines (like the 750) which have asymmetric integer units,
9551 where one integer unit can do multiply and divides and the other
9552 can't, reduce the priority of multiply/divide so it is scheduled
9553 before other integer operations. */
9556 if (! INSN_P (insn))
9559 if (GET_CODE (PATTERN (insn)) == USE)
9562 switch (rs6000_cpu_attr) {
9564 switch (get_attr_type (insn))
9571 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
9572 priority, priority);
9573 if (priority >= 0 && priority < 0x01000000)
9583 /* Return how many instructions the machine can issue per cycle. */
9586 rs6000_issue_rate ()
9588 switch (rs6000_cpu_attr) {
9589 case CPU_RIOS1: /* ? */
9591 case CPU_PPC601: /* ? */
9610 /* Length in units of the trampoline for entering a nested function. */
9613 rs6000_trampoline_size ()
9617 switch (DEFAULT_ABI)
9623 ret = (TARGET_32BIT) ? 12 : 24;
9628 case ABI_AIX_NODESC:
9629 ret = (TARGET_32BIT) ? 40 : 48;
9636 /* Emit RTL insns to initialize the variable parts of a trampoline.
9637 FNADDR is an RTX for the address of the function's pure code.
9638 CXT is an RTX for the static chain value for the function. */
9641 rs6000_initialize_trampoline (addr, fnaddr, cxt)
9646 enum machine_mode pmode = Pmode;
9647 int regsize = (TARGET_32BIT) ? 4 : 8;
9648 rtx ctx_reg = force_reg (pmode, cxt);
9650 switch (DEFAULT_ABI)
9655 /* Macros to shorten the code expansions below. */
9656 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
9657 #define MEM_PLUS(addr,offset) \
9658 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
9660 /* Under AIX, just build the 3 word function descriptor */
9663 rtx fn_reg = gen_reg_rtx (pmode);
9664 rtx toc_reg = gen_reg_rtx (pmode);
9665 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
9666 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
9667 emit_move_insn (MEM_DEREF (addr), fn_reg);
9668 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
9669 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
9673 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
9676 case ABI_AIX_NODESC:
9677 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
9680 GEN_INT (rs6000_trampoline_size ()), SImode,
9690 /* Table of valid machine attributes. */
9692 const struct attribute_spec rs6000_attribute_table[] =
9694 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
9695 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
9696 { NULL, 0, 0, false, false, false, NULL }
9699 /* Handle a "longcall" attribute; arguments as in struct
9700 attribute_spec.handler. */
9703 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
9706 tree args ATTRIBUTE_UNUSED;
9707 int flags ATTRIBUTE_UNUSED;
9710 if (TREE_CODE (*node) != FUNCTION_TYPE
9711 && TREE_CODE (*node) != FIELD_DECL
9712 && TREE_CODE (*node) != TYPE_DECL)
9714 warning ("`%s' attribute only applies to functions",
9715 IDENTIFIER_POINTER (name));
9716 *no_add_attrs = true;
9722 /* Return a reference suitable for calling a function with the
9723 longcall attribute. */
9726 rs6000_longcall_ref (call_ref)
9729 const char *call_name;
9732 if (GET_CODE (call_ref) != SYMBOL_REF)
9735 /* System V adds '.' to the internal name, so skip them. */
9736 call_name = XSTR (call_ref, 0);
9737 if (*call_name == '.')
9739 while (*call_name == '.')
9742 node = get_identifier (call_name);
9743 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
9746 return force_reg (Pmode, call_ref);
9750 /* A C statement or statements to switch to the appropriate section
9751 for output of RTX in mode MODE. You can assume that RTX is some
9752 kind of constant in RTL. The argument MODE is redundant except in
9753 the case of a `const_int' rtx. Select the section by calling
9754 `text_section' or one of the alternatives for other sections.
9756 Do not define this macro if you put all constants in the read-only
9759 #ifdef USING_ELFOS_H
9762 rs6000_select_rtx_section (mode, x)
9763 enum machine_mode mode;
9766 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
9772 /* A C statement or statements to switch to the appropriate
9773 section for output of DECL. DECL is either a `VAR_DECL' node
9774 or a constant of some sort. RELOC indicates whether forming
9775 the initial value of DECL requires link-time relocations. */
9778 rs6000_select_section (decl, reloc)
9782 int size = int_size_in_bytes (TREE_TYPE (decl));
9785 static void (* const sec_funcs[4]) PARAMS ((void)) = {
9792 needs_sdata = (size > 0
9793 && size <= g_switch_value
9794 && rs6000_sdata != SDATA_NONE
9795 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
9797 if (TREE_CODE (decl) == STRING_CST)
9798 readonly = ! flag_writable_strings;
9799 else if (TREE_CODE (decl) == VAR_DECL)
9800 readonly = (! (flag_pic && reloc)
9801 && TREE_READONLY (decl)
9802 && ! TREE_SIDE_EFFECTS (decl)
9803 && DECL_INITIAL (decl)
9804 && DECL_INITIAL (decl) != error_mark_node
9805 && TREE_CONSTANT (DECL_INITIAL (decl)));
9806 else if (TREE_CODE (decl) == CONSTRUCTOR)
9807 readonly = (! (flag_pic && reloc)
9808 && ! TREE_SIDE_EFFECTS (decl)
9809 && TREE_CONSTANT (decl));
9812 if (needs_sdata && rs6000_sdata != SDATA_EABI)
9815 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
9818 /* A C statement to build up a unique section name, expressed as a
9819 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
9820 RELOC indicates whether the initial value of EXP requires
9821 link-time relocations. If you do not define this macro, GCC will use
9822 the symbol name prefixed by `.' as the section name. Note - this
9823 macro can now be called for uninitialized data items as well as
9824 initialised data and functions. */
9827 rs6000_unique_section (decl, reloc)
9837 static const char *const prefixes[7][2] =
9839 { ".rodata.", ".gnu.linkonce.r." },
9840 { ".sdata2.", ".gnu.linkonce.s2." },
9841 { ".data.", ".gnu.linkonce.d." },
9842 { ".sdata.", ".gnu.linkonce.s." },
9843 { ".bss.", ".gnu.linkonce.b." },
9844 { ".sbss.", ".gnu.linkonce.sb." },
9845 { ".text.", ".gnu.linkonce.t." }
9848 if (TREE_CODE (decl) == FUNCTION_DECL)
9857 if (TREE_CODE (decl) == STRING_CST)
9858 readonly = ! flag_writable_strings;
9859 else if (TREE_CODE (decl) == VAR_DECL)
9860 readonly = (! (flag_pic && reloc)
9861 && TREE_READONLY (decl)
9862 && ! TREE_SIDE_EFFECTS (decl)
9863 && TREE_CONSTANT (DECL_INITIAL (decl)));
9865 size = int_size_in_bytes (TREE_TYPE (decl));
9866 needs_sdata = (size > 0
9867 && size <= g_switch_value
9868 && rs6000_sdata != SDATA_NONE
9869 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
9871 if (DECL_INITIAL (decl) == 0
9872 || DECL_INITIAL (decl) == error_mark_node)
9874 else if (! readonly)
9881 /* .sdata2 is only for EABI. */
9882 if (sec == 0 && rs6000_sdata != SDATA_EABI)
9888 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
9889 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
9890 len = strlen (name) + strlen (prefix);
9891 string = alloca (len + 1);
9893 sprintf (string, "%s%s", prefix, name);
9895 DECL_SECTION_NAME (decl) = build_string (len, string);
9899 /* If we are referencing a function that is static or is known to be
9900 in this file, make the SYMBOL_REF special. We can use this to indicate
9901 that we can branch to this function without emitting a no-op after the
9902 call. For real AIX calling sequences, we also replace the
9903 function name with the real name (1 or 2 leading .'s), rather than
9904 the function descriptor name. This saves a lot of overriding code
9905 to read the prefixes. */
9908 rs6000_encode_section_info (decl)
9911 if (TREE_CODE (decl) == FUNCTION_DECL)
9913 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
9914 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
9915 && ! DECL_WEAK (decl))
9916 SYMBOL_REF_FLAG (sym_ref) = 1;
9918 if (DEFAULT_ABI == ABI_AIX)
9920 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
9921 size_t len2 = strlen (XSTR (sym_ref, 0));
9922 char *str = alloca (len1 + len2 + 1);
9925 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
9927 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
9930 else if (rs6000_sdata != SDATA_NONE
9931 && DEFAULT_ABI == ABI_V4
9932 && TREE_CODE (decl) == VAR_DECL)
9934 int size = int_size_in_bytes (TREE_TYPE (decl));
9935 tree section_name = DECL_SECTION_NAME (decl);
9936 const char *name = (char *)0;
9941 if (TREE_CODE (section_name) == STRING_CST)
9943 name = TREE_STRING_POINTER (section_name);
9944 len = TREE_STRING_LENGTH (section_name);
9950 if ((size > 0 && size <= g_switch_value)
9952 && ((len == sizeof (".sdata") - 1
9953 && strcmp (name, ".sdata") == 0)
9954 || (len == sizeof (".sdata2") - 1
9955 && strcmp (name, ".sdata2") == 0)
9956 || (len == sizeof (".sbss") - 1
9957 && strcmp (name, ".sbss") == 0)
9958 || (len == sizeof (".sbss2") - 1
9959 && strcmp (name, ".sbss2") == 0)
9960 || (len == sizeof (".PPC.EMB.sdata0") - 1
9961 && strcmp (name, ".PPC.EMB.sdata0") == 0)
9962 || (len == sizeof (".PPC.EMB.sbss0") - 1
9963 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
9965 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
9966 size_t len = strlen (XSTR (sym_ref, 0));
9967 char *str = alloca (len + 2);
9970 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
9971 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
9976 #endif /* USING_ELFOS_H */
9979 /* Return a REG that occurs in ADDR with coefficient 1.
9980 ADDR can be effectively incremented by incrementing REG.
9982 r0 is special and we must not select it as an address
9983 register by this routine since our caller will try to
9984 increment the returned register via an "la" instruction. */
9987 find_addr_reg (addr)
9990 while (GET_CODE (addr) == PLUS)
9992 if (GET_CODE (XEXP (addr, 0)) == REG
9993 && REGNO (XEXP (addr, 0)) != 0)
9994 addr = XEXP (addr, 0);
9995 else if (GET_CODE (XEXP (addr, 1)) == REG
9996 && REGNO (XEXP (addr, 1)) != 0)
9997 addr = XEXP (addr, 1);
9998 else if (CONSTANT_P (XEXP (addr, 0)))
9999 addr = XEXP (addr, 1);
10000 else if (CONSTANT_P (XEXP (addr, 1)))
10001 addr = XEXP (addr, 0);
10005 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
10011 rs6000_fatal_bad_address (op)
10014 fatal_insn ("bad address", op);
10017 /* Called to register all of our global variables with the garbage
10021 rs6000_add_gc_roots ()
10023 ggc_add_rtx_root (&rs6000_compare_op0, 1);
10024 ggc_add_rtx_root (&rs6000_compare_op1, 1);
10026 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
10027 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
10028 toc_hash_mark_table);
10031 machopic_add_gc_roots ();
10038 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
10039 reference and a constant. */
10042 symbolic_operand (op)
10045 switch (GET_CODE (op))
10052 return (GET_CODE (op) == SYMBOL_REF ||
10053 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
10054 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
10055 && GET_CODE (XEXP (op, 1)) == CONST_INT);
10062 #ifdef RS6000_LONG_BRANCH
10064 static tree stub_list = 0;
10066 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
10067 procedure calls to the linked list. */
10070 add_compiler_stub (label_name, function_name, line_number)
10072 tree function_name;
10075 tree stub = build_tree_list (function_name, label_name);
10076 TREE_TYPE (stub) = build_int_2 (line_number, 0);
10077 TREE_CHAIN (stub) = stub_list;
10081 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
10082 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
10083 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
10085 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
10086 handling procedure calls from the linked list and initializes the
10090 output_compiler_stub ()
10093 char label_buf[256];
10095 tree tmp_stub, stub;
10098 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10100 fprintf (asm_out_file,
10101 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
10103 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10104 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10105 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
10106 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10108 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
10110 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
10113 label_buf[0] = '_';
10114 strcpy (label_buf+1,
10115 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
10118 strcpy (tmp_buf, "lis r12,hi16(");
10119 strcat (tmp_buf, label_buf);
10120 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
10121 strcat (tmp_buf, label_buf);
10122 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
10123 output_asm_insn (tmp_buf, 0);
10125 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10126 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10127 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
10128 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10134 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
10135 already there or not. */
10138 no_previous_def (function_name)
10139 tree function_name;
10142 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10143 if (function_name == STUB_FUNCTION_NAME (stub))
10148 /* GET_PREV_LABEL gets the label name from the previous definition of
10152 get_prev_label (function_name)
10153 tree function_name;
10156 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10157 if (function_name == STUB_FUNCTION_NAME (stub))
10158 return STUB_LABEL_NAME (stub);
10162 /* INSN is either a function call or a millicode call. It may have an
10163 unconditional jump in its delay slot.
10165 CALL_DEST is the routine we are calling. */
10168 output_call (insn, call_dest, operand_number)
10171 int operand_number;
10173 static char buf[256];
10174 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
10177 tree funname = get_identifier (XSTR (call_dest, 0));
10179 if (no_previous_def (funname))
10182 rtx label_rtx = gen_label_rtx ();
10183 char *label_buf, temp_buf[256];
10184 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
10185 CODE_LABEL_NUMBER (label_rtx));
10186 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
10187 labelname = get_identifier (label_buf);
10188 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
10190 line_number = NOTE_LINE_NUMBER (insn);
10191 add_compiler_stub (labelname, funname, line_number);
10194 labelname = get_prev_label (funname);
10196 sprintf (buf, "jbsr %%z%d,%.246s",
10197 operand_number, IDENTIFIER_POINTER (labelname));
10202 sprintf (buf, "bl %%z%d", operand_number);
10207 #endif /* RS6000_LONG_BRANCH */
10209 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
10211 const char *const symbol_ = (SYMBOL); \
10212 char *buffer_ = (BUF); \
10213 if (symbol_[0] == '"') \
10215 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
10217 else if (name_needs_quotes(symbol_)) \
10219 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
10223 sprintf(buffer_, "L%d$%s", (N), symbol_); \
10228 /* Generate PIC and indirect symbol stubs. */
10231 machopic_output_stub (file, symb, stub)
10233 const char *symb, *stub;
10235 unsigned int length;
10236 char *symbol_name, *lazy_ptr_name;
10237 char *local_label_0;
10238 static int label = 0;
10240 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
10241 STRIP_NAME_ENCODING (symb, symb);
10245 length = strlen (symb);
10246 symbol_name = alloca (length + 32);
10247 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
10249 lazy_ptr_name = alloca (length + 32);
10250 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
10252 local_label_0 = alloca (length + 32);
10253 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
10256 machopic_picsymbol_stub_section ();
10258 machopic_symbol_stub_section ();
10260 fprintf (file, "%s:\n", stub);
10261 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
10265 fprintf (file, "\tmflr r0\n");
10266 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
10267 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
10268 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
10269 lazy_ptr_name, local_label_0);
10270 fprintf (file, "\tmtlr r0\n");
10271 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
10272 lazy_ptr_name, local_label_0);
10273 fprintf (file, "\tmtctr r12\n");
10274 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
10275 lazy_ptr_name, local_label_0);
10276 fprintf (file, "\tbctr\n");
10279 fprintf (file, "non-pure not supported\n");
10281 machopic_lazy_symbol_ptr_section ();
10282 fprintf (file, "%s:\n", lazy_ptr_name);
10283 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
10284 fprintf (file, "\t.long dyld_stub_binding_helper\n");
10287 /* Legitimize PIC addresses. If the address is already
10288 position-independent, we return ORIG. Newly generated
10289 position-independent addresses go into a reg. This is REG if non
10290 zero, otherwise we allocate register(s) as necessary. */
10292 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
10295 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
10297 enum machine_mode mode;
10302 if (reg == NULL && ! reload_in_progress && ! reload_completed)
10303 reg = gen_reg_rtx (Pmode);
10305 if (GET_CODE (orig) == CONST)
10307 if (GET_CODE (XEXP (orig, 0)) == PLUS
10308 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
10311 if (GET_CODE (XEXP (orig, 0)) == PLUS)
10314 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
10317 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
10323 if (GET_CODE (offset) == CONST_INT)
10325 if (SMALL_INT (offset))
10326 return plus_constant (base, INTVAL (offset));
10327 else if (! reload_in_progress && ! reload_completed)
10328 offset = force_reg (Pmode, offset);
10331 rtx mem = force_const_mem (Pmode, orig);
10332 return machopic_legitimize_pic_address (mem, Pmode, reg);
10335 return gen_rtx (PLUS, Pmode, base, offset);
10338 /* Fall back on generic machopic code. */
10339 return machopic_legitimize_pic_address (orig, mode, reg);
10342 /* This is just a placeholder to make linking work without having to
10343 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
10344 ever needed for Darwin (not too likely!) this would have to get a
10345 real definition. */
10352 #endif /* TARGET_MACHO */
10355 static unsigned int
10356 rs6000_elf_section_type_flags (decl, name, reloc)
10361 unsigned int flags = default_section_type_flags (decl, name, reloc);
10363 if (TARGET_RELOCATABLE)
10364 flags |= SECTION_WRITE;
10369 /* Record an element in the table of global constructors. SYMBOL is
10370 a SYMBOL_REF of the function to be called; PRIORITY is a number
10371 between 0 and MAX_INIT_PRIORITY.
10373 This differs from default_named_section_asm_out_constructor in
10374 that we have special handling for -mrelocatable. */
10377 rs6000_elf_asm_out_constructor (symbol, priority)
10381 const char *section = ".ctors";
10384 if (priority != DEFAULT_INIT_PRIORITY)
10386 sprintf (buf, ".ctors.%.5u",
10387 /* Invert the numbering so the linker puts us in the proper
10388 order; constructors are run from right to left, and the
10389 linker sorts in increasing order. */
10390 MAX_INIT_PRIORITY - priority);
10394 named_section_flags (section, SECTION_WRITE);
10395 assemble_align (POINTER_SIZE);
10397 if (TARGET_RELOCATABLE)
10399 fputs ("\t.long (", asm_out_file);
10400 output_addr_const (asm_out_file, symbol);
10401 fputs (")@fixup\n", asm_out_file);
10404 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
10408 rs6000_elf_asm_out_destructor (symbol, priority)
10412 const char *section = ".dtors";
10415 if (priority != DEFAULT_INIT_PRIORITY)
10417 sprintf (buf, ".dtors.%.5u",
10418 /* Invert the numbering so the linker puts us in the proper
10419 order; constructors are run from right to left, and the
10420 linker sorts in increasing order. */
10421 MAX_INIT_PRIORITY - priority);
10425 named_section_flags (section, SECTION_WRITE);
10426 assemble_align (POINTER_SIZE);
10428 if (TARGET_RELOCATABLE)
10430 fputs ("\t.long (", asm_out_file);
10431 output_addr_const (asm_out_file, symbol);
10432 fputs (")@fixup\n", asm_out_file);
10435 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
10439 #ifdef OBJECT_FORMAT_COFF
10441 xcoff_asm_named_section (name, flags)
10443 unsigned int flags ATTRIBUTE_UNUSED;
10445 fprintf (asm_out_file, "\t.csect %s\n", name);