1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined;
80 /* Save information from a "cmpxx" operation until the branch or scc is
82 rtx rs6000_compare_op0, rs6000_compare_op1;
83 int rs6000_compare_fp_p;
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno;
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name = RS6000_ABI_NAME;
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
96 /* Which small data model to use */
97 const char *rs6000_sdata_name = (char *)0;
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno = 0;
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi;
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string;
110 const char *rs6000_debug_name;
111 int rs6000_debug_stack; /* debug stack applications */
112 int rs6000_debug_arg; /* debug argument handling */
114 /* Flag to say the TOC is initialized */
116 char toc_label_name[10];
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set;
121 static void rs6000_add_gc_roots PARAMS ((void));
122 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code, enum machine_mode));
126 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127 static void rs6000_maybe_dead PARAMS ((rtx));
128 static void rs6000_emit_stack_tie PARAMS ((void));
129 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131 static unsigned rs6000_hash_constant PARAMS ((rtx));
132 static unsigned toc_hash_function PARAMS ((const void *));
133 static int toc_hash_eq PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry PARAMS ((void **, void *));
135 static void toc_hash_mark_table PARAMS ((void *));
136 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137 static void rs6000_free_machine_status PARAMS ((struct function *));
138 static void rs6000_init_machine_status PARAMS ((struct function *));
139 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
167 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
168 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
169 static void rs6000_parse_abi_options PARAMS ((void));
170 static int first_altivec_reg_to_save PARAMS ((void));
171 static unsigned int compute_vrsave_mask PARAMS ((void));
172 static void is_altivec_return_reg PARAMS ((rtx, void *));
173 int vrsave_operation PARAMS ((rtx, enum machine_mode));
174 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
175 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
177 /* Default register names. */
178 char rs6000_reg_names[][8] =
180 "0", "1", "2", "3", "4", "5", "6", "7",
181 "8", "9", "10", "11", "12", "13", "14", "15",
182 "16", "17", "18", "19", "20", "21", "22", "23",
183 "24", "25", "26", "27", "28", "29", "30", "31",
184 "0", "1", "2", "3", "4", "5", "6", "7",
185 "8", "9", "10", "11", "12", "13", "14", "15",
186 "16", "17", "18", "19", "20", "21", "22", "23",
187 "24", "25", "26", "27", "28", "29", "30", "31",
188 "mq", "lr", "ctr","ap",
189 "0", "1", "2", "3", "4", "5", "6", "7",
191 /* AltiVec registers. */
192 "0", "1", "2", "3", "4", "5", "6", "7",
193 "8", "9", "10", "11", "12", "13", "14", "15",
194 "16", "17", "18", "19", "20", "21", "22", "23",
195 "24", "25", "26", "27", "28", "29", "30", "31",
199 #ifdef TARGET_REGNAMES
200 static const char alt_reg_names[][8] =
202 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
203 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
204 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
205 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
206 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
207 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
208 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
209 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
210 "mq", "lr", "ctr", "ap",
211 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
213 /* AltiVec registers. */
214 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
215 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
216 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
217 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
222 #ifndef MASK_STRICT_ALIGN
223 #define MASK_STRICT_ALIGN 0
226 /* Initialize the GCC target structure. */
227 #undef TARGET_ATTRIBUTE_TABLE
228 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
230 #undef TARGET_ASM_ALIGNED_DI_OP
231 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
233 /* Default unaligned ops are only provided for ELF. Find the ops needed
234 for non-ELF systems. */
235 #ifndef OBJECT_FORMAT_ELF
236 #ifdef OBJECT_FORMAT_COFF
237 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
239 #undef TARGET_ASM_UNALIGNED_HI_OP
240 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
241 #undef TARGET_ASM_UNALIGNED_SI_OP
242 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
243 #undef TARGET_ASM_UNALIGNED_DI_OP
244 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
247 #undef TARGET_ASM_UNALIGNED_HI_OP
248 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
249 #undef TARGET_ASM_UNALIGNED_SI_OP
250 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
254 /* This hook deals with fixups for relocatable code and DI-mode objects
256 #undef TARGET_ASM_INTEGER
257 #define TARGET_ASM_INTEGER rs6000_assemble_integer
259 #undef TARGET_ASM_FUNCTION_PROLOGUE
260 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
261 #undef TARGET_ASM_FUNCTION_EPILOGUE
262 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
265 #undef TARGET_SECTION_TYPE_FLAGS
266 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
269 #undef TARGET_SCHED_ISSUE_RATE
270 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
271 #undef TARGET_SCHED_ADJUST_COST
272 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
273 #undef TARGET_SCHED_ADJUST_PRIORITY
274 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
276 #undef TARGET_INIT_BUILTINS
277 #define TARGET_INIT_BUILTINS rs6000_init_builtins
279 #undef TARGET_EXPAND_BUILTIN
280 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
282 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
283 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
285 struct gcc_target targetm = TARGET_INITIALIZER;
287 /* Override command line options. Mostly we process the processor
288 type and sometimes adjust other TARGET_ options. */
291 rs6000_override_options (default_cpu)
292 const char *default_cpu;
295 struct rs6000_cpu_select *ptr;
297 /* Simplify the entries below by making a mask for any POWER
298 variant and any PowerPC variant. */
300 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
301 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
302 | MASK_PPC_GFXOPT | MASK_POWERPC64)
303 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
307 const char *const name; /* Canonical processor name. */
308 const enum processor_type processor; /* Processor type enum value. */
309 const int target_enable; /* Target flags to enable. */
310 const int target_disable; /* Target flags to disable. */
311 } const processor_target_table[]
312 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
313 POWER_MASKS | POWERPC_MASKS},
314 {"power", PROCESSOR_POWER,
315 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
316 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
317 {"power2", PROCESSOR_POWER,
318 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
319 POWERPC_MASKS | MASK_NEW_MNEMONICS},
320 {"power3", PROCESSOR_PPC630,
321 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
322 POWER_MASKS | MASK_PPC_GPOPT},
323 {"powerpc", PROCESSOR_POWERPC,
324 MASK_POWERPC | MASK_NEW_MNEMONICS,
325 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
326 {"powerpc64", PROCESSOR_POWERPC64,
327 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
328 POWER_MASKS | POWERPC_OPT_MASKS},
329 {"rios", PROCESSOR_RIOS1,
330 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
331 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
332 {"rios1", PROCESSOR_RIOS1,
333 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
334 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
335 {"rsc", PROCESSOR_PPC601,
336 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
337 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
338 {"rsc1", PROCESSOR_PPC601,
339 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
340 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
341 {"rios2", PROCESSOR_RIOS2,
342 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
343 POWERPC_MASKS | MASK_NEW_MNEMONICS},
344 {"rs64a", PROCESSOR_RS64A,
345 MASK_POWERPC | MASK_NEW_MNEMONICS,
346 POWER_MASKS | POWERPC_OPT_MASKS},
347 {"401", PROCESSOR_PPC403,
348 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
349 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
350 {"403", PROCESSOR_PPC403,
351 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
352 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
353 {"405", PROCESSOR_PPC405,
354 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
355 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
356 {"505", PROCESSOR_MPCCORE,
357 MASK_POWERPC | MASK_NEW_MNEMONICS,
358 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
359 {"601", PROCESSOR_PPC601,
360 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
361 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
362 {"602", PROCESSOR_PPC603,
363 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
364 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
365 {"603", PROCESSOR_PPC603,
366 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
367 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
368 {"603e", PROCESSOR_PPC603,
369 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
370 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
371 {"ec603e", PROCESSOR_PPC603,
372 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
373 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
374 {"604", PROCESSOR_PPC604,
375 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
376 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
377 {"604e", PROCESSOR_PPC604e,
378 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
379 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
380 {"620", PROCESSOR_PPC620,
381 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
382 POWER_MASKS | MASK_PPC_GPOPT},
383 {"630", PROCESSOR_PPC630,
384 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
385 POWER_MASKS | MASK_PPC_GPOPT},
386 {"740", PROCESSOR_PPC750,
387 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
388 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
389 {"750", PROCESSOR_PPC750,
390 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
391 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
392 {"7400", PROCESSOR_PPC7400,
393 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
394 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
395 {"7450", PROCESSOR_PPC7450,
396 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
397 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
398 {"801", PROCESSOR_MPCCORE,
399 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
400 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
401 {"821", PROCESSOR_MPCCORE,
402 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
403 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
404 {"823", PROCESSOR_MPCCORE,
405 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
406 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
407 {"860", PROCESSOR_MPCCORE,
408 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
409 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
411 size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
413 /* Save current -mmultiple/-mno-multiple status. */
414 int multiple = TARGET_MULTIPLE;
415 /* Save current -mstring/-mno-string status. */
416 int string = TARGET_STRING;
418 /* Identify the processor type. */
419 rs6000_select[0].string = default_cpu;
420 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
422 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
424 ptr = &rs6000_select[i];
425 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
427 for (j = 0; j < ptt_size; j++)
428 if (! strcmp (ptr->string, processor_target_table[j].name))
431 rs6000_cpu = processor_target_table[j].processor;
435 target_flags |= processor_target_table[j].target_enable;
436 target_flags &= ~processor_target_table[j].target_disable;
442 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
446 /* If we are optimizing big endian systems for space, use the store
447 multiple instructions. */
448 if (BYTES_BIG_ENDIAN && optimize_size)
449 target_flags |= MASK_MULTIPLE;
451 /* If -mmultiple or -mno-multiple was explicitly used, don't
452 override with the processor default */
453 if (TARGET_MULTIPLE_SET)
454 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
456 /* If -mstring or -mno-string was explicitly used, don't override
457 with the processor default. */
458 if (TARGET_STRING_SET)
459 target_flags = (target_flags & ~MASK_STRING) | string;
461 /* Don't allow -mmultiple or -mstring on little endian systems
462 unless the cpu is a 750, because the hardware doesn't support the
463 instructions used in little endian mode, and causes an alignment
464 trap. The 750 does not cause an alignment trap (except when the
465 target is unaligned). */
467 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
471 target_flags &= ~MASK_MULTIPLE;
472 if (TARGET_MULTIPLE_SET)
473 warning ("-mmultiple is not supported on little endian systems");
478 target_flags &= ~MASK_STRING;
479 if (TARGET_STRING_SET)
480 warning ("-mstring is not supported on little endian systems");
484 if (flag_pic && DEFAULT_ABI == ABI_AIX)
486 warning ("-f%s ignored (all code is position independent)",
487 (flag_pic > 1) ? "PIC" : "pic");
491 #ifdef XCOFF_DEBUGGING_INFO
492 if (flag_function_sections && (write_symbols != NO_DEBUG)
493 && DEFAULT_ABI == ABI_AIX)
495 warning ("-ffunction-sections disabled on AIX when debugging");
496 flag_function_sections = 0;
499 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
501 warning ("-fdata-sections not supported on AIX");
502 flag_data_sections = 0;
506 /* Set debug flags */
507 if (rs6000_debug_name)
509 if (! strcmp (rs6000_debug_name, "all"))
510 rs6000_debug_stack = rs6000_debug_arg = 1;
511 else if (! strcmp (rs6000_debug_name, "stack"))
512 rs6000_debug_stack = 1;
513 else if (! strcmp (rs6000_debug_name, "arg"))
514 rs6000_debug_arg = 1;
516 error ("unknown -mdebug-%s switch", rs6000_debug_name);
519 /* Set size of long double */
520 rs6000_long_double_type_size = 64;
521 if (rs6000_long_double_size_string)
524 int size = strtol (rs6000_long_double_size_string, &tail, 10);
525 if (*tail != '\0' || (size != 64 && size != 128))
526 error ("Unknown switch -mlong-double-%s",
527 rs6000_long_double_size_string);
529 rs6000_long_double_type_size = size;
532 /* Handle -mabi= options. */
533 rs6000_parse_abi_options ();
535 #ifdef TARGET_REGNAMES
536 /* If the user desires alternate register names, copy in the
537 alternate names now. */
539 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
542 #ifdef SUBTARGET_OVERRIDE_OPTIONS
543 SUBTARGET_OVERRIDE_OPTIONS;
545 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
546 SUBSUBTARGET_OVERRIDE_OPTIONS;
549 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
550 If -maix-struct-return or -msvr4-struct-return was explicitly
551 used, don't override with the ABI default. */
552 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
554 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
555 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
557 target_flags |= MASK_AIX_STRUCT_RET;
560 /* Register global variables with the garbage collector. */
561 rs6000_add_gc_roots ();
563 /* Allocate an alias set for register saves & restores from stack. */
564 rs6000_sr_alias_set = new_alias_set ();
567 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
569 /* We can only guarantee the availability of DI pseudo-ops when
570 assembling for 64-bit targets. */
573 targetm.asm_out.aligned_op.di = NULL;
574 targetm.asm_out.unaligned_op.di = NULL;
577 /* Arrange to save and restore machine status around nested functions. */
578 init_machine_status = rs6000_init_machine_status;
579 free_machine_status = rs6000_free_machine_status;
582 /* Handle -mabi= options. */
584 rs6000_parse_abi_options ()
586 if (rs6000_abi_string == 0)
588 else if (! strcmp (rs6000_abi_string, "altivec"))
589 rs6000_altivec_abi = 1;
591 error ("unknown ABI specified: '%s'", rs6000_abi_string);
595 optimization_options (level, size)
596 int level ATTRIBUTE_UNUSED;
597 int size ATTRIBUTE_UNUSED;
601 /* Do anything needed at the start of the asm file. */
604 rs6000_file_start (file, default_cpu)
606 const char *default_cpu;
610 const char *start = buffer;
611 struct rs6000_cpu_select *ptr;
613 if (flag_verbose_asm)
615 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
616 rs6000_select[0].string = default_cpu;
618 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
620 ptr = &rs6000_select[i];
621 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
623 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
629 switch (rs6000_sdata)
631 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
632 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
633 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
634 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
637 if (rs6000_sdata && g_switch_value)
639 fprintf (file, "%s -G %d", start, g_switch_value);
650 /* Create a CONST_DOUBLE from a string. */
653 rs6000_float_const (string, mode)
655 enum machine_mode mode;
657 REAL_VALUE_TYPE value;
658 value = REAL_VALUE_ATOF (string, mode);
659 return immed_real_const_1 (value, mode);
662 /* Return non-zero if this function is known to have a null epilogue. */
667 if (reload_completed)
669 rs6000_stack_t *info = rs6000_stack_info ();
671 if (info->first_gp_reg_save == 32
672 && info->first_fp_reg_save == 64
673 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
676 && info->vrsave_mask == 0
684 /* Returns 1 always. */
687 any_operand (op, mode)
688 rtx op ATTRIBUTE_UNUSED;
689 enum machine_mode mode ATTRIBUTE_UNUSED;
694 /* Returns 1 if op is the count register. */
696 count_register_operand (op, mode)
698 enum machine_mode mode ATTRIBUTE_UNUSED;
700 if (GET_CODE (op) != REG)
703 if (REGNO (op) == COUNT_REGISTER_REGNUM)
706 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
713 xer_operand (op, mode)
715 enum machine_mode mode ATTRIBUTE_UNUSED;
717 if (GET_CODE (op) != REG)
720 if (XER_REGNO_P (REGNO (op)))
726 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
727 by such constants completes more quickly. */
730 s8bit_cint_operand (op, mode)
732 enum machine_mode mode ATTRIBUTE_UNUSED;
734 return ( GET_CODE (op) == CONST_INT
735 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
738 /* Return 1 if OP is a constant that can fit in a D field. */
741 short_cint_operand (op, mode)
743 enum machine_mode mode ATTRIBUTE_UNUSED;
745 return (GET_CODE (op) == CONST_INT
746 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
749 /* Similar for an unsigned D field. */
752 u_short_cint_operand (op, mode)
754 enum machine_mode mode ATTRIBUTE_UNUSED;
756 return (GET_CODE (op) == CONST_INT
757 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'));
760 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
763 non_short_cint_operand (op, mode)
765 enum machine_mode mode ATTRIBUTE_UNUSED;
767 return (GET_CODE (op) == CONST_INT
768 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
771 /* Returns 1 if OP is a CONST_INT that is a positive value
772 and an exact power of 2. */
775 exact_log2_cint_operand (op, mode)
777 enum machine_mode mode ATTRIBUTE_UNUSED;
779 return (GET_CODE (op) == CONST_INT
781 && exact_log2 (INTVAL (op)) >= 0);
784 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
788 gpc_reg_operand (op, mode)
790 enum machine_mode mode;
792 return (register_operand (op, mode)
793 && (GET_CODE (op) != REG
794 || (REGNO (op) >= ARG_POINTER_REGNUM
795 && !XER_REGNO_P (REGNO (op)))
796 || REGNO (op) < MQ_REGNO));
799 /* Returns 1 if OP is either a pseudo-register or a register denoting a
803 cc_reg_operand (op, mode)
805 enum machine_mode mode;
807 return (register_operand (op, mode)
808 && (GET_CODE (op) != REG
809 || REGNO (op) >= FIRST_PSEUDO_REGISTER
810 || CR_REGNO_P (REGNO (op))));
813 /* Returns 1 if OP is either a pseudo-register or a register denoting a
814 CR field that isn't CR0. */
817 cc_reg_not_cr0_operand (op, mode)
819 enum machine_mode mode;
821 return (register_operand (op, mode)
822 && (GET_CODE (op) != REG
823 || REGNO (op) >= FIRST_PSEUDO_REGISTER
824 || CR_REGNO_NOT_CR0_P (REGNO (op))));
827 /* Returns 1 if OP is either a constant integer valid for a D-field or
828 a non-special register. If a register, it must be in the proper
829 mode unless MODE is VOIDmode. */
832 reg_or_short_operand (op, mode)
834 enum machine_mode mode;
836 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
839 /* Similar, except check if the negation of the constant would be
840 valid for a D-field. */
843 reg_or_neg_short_operand (op, mode)
845 enum machine_mode mode;
847 if (GET_CODE (op) == CONST_INT)
848 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
850 return gpc_reg_operand (op, mode);
853 /* Return 1 if the operand is either a register or an integer whose
854 high-order 16 bits are zero. */
857 reg_or_u_short_operand (op, mode)
859 enum machine_mode mode;
861 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
864 /* Return 1 is the operand is either a non-special register or ANY
868 reg_or_cint_operand (op, mode)
870 enum machine_mode mode;
872 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
875 /* Return 1 is the operand is either a non-special register or ANY
876 32-bit signed constant integer. */
879 reg_or_arith_cint_operand (op, mode)
881 enum machine_mode mode;
883 return (gpc_reg_operand (op, mode)
884 || (GET_CODE (op) == CONST_INT
885 #if HOST_BITS_PER_WIDE_INT != 32
886 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
887 < (unsigned HOST_WIDE_INT) 0x100000000ll)
892 /* Return 1 is the operand is either a non-special register or a 32-bit
893 signed constant integer valid for 64-bit addition. */
896 reg_or_add_cint64_operand (op, mode)
898 enum machine_mode mode;
900 return (gpc_reg_operand (op, mode)
901 || (GET_CODE (op) == CONST_INT
902 && INTVAL (op) < 0x7fff8000
903 #if HOST_BITS_PER_WIDE_INT != 32
904 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
910 /* Return 1 is the operand is either a non-special register or a 32-bit
911 signed constant integer valid for 64-bit subtraction. */
914 reg_or_sub_cint64_operand (op, mode)
916 enum machine_mode mode;
918 return (gpc_reg_operand (op, mode)
919 || (GET_CODE (op) == CONST_INT
920 && (- INTVAL (op)) < 0x7fff8000
921 #if HOST_BITS_PER_WIDE_INT != 32
922 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
928 /* Return 1 is the operand is either a non-special register or ANY
929 32-bit unsigned constant integer. */
932 reg_or_logical_cint_operand (op, mode)
934 enum machine_mode mode;
936 if (GET_CODE (op) == CONST_INT)
938 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
940 if (GET_MODE_BITSIZE (mode) <= 32)
947 return ((INTVAL (op) & GET_MODE_MASK (mode)
948 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
950 else if (GET_CODE (op) == CONST_DOUBLE)
952 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
956 return CONST_DOUBLE_HIGH (op) == 0;
959 return gpc_reg_operand (op, mode);
962 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
965 got_operand (op, mode)
967 enum machine_mode mode ATTRIBUTE_UNUSED;
969 return (GET_CODE (op) == SYMBOL_REF
970 || GET_CODE (op) == CONST
971 || GET_CODE (op) == LABEL_REF);
974 /* Return 1 if the operand is a simple references that can be loaded via
975 the GOT (labels involving addition aren't allowed). */
978 got_no_const_operand (op, mode)
980 enum machine_mode mode ATTRIBUTE_UNUSED;
982 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
985 /* Return the number of instructions it takes to form a constant in an
989 num_insns_constant_wide (value)
992 /* signed constant loadable with {cal|addi} */
993 if (CONST_OK_FOR_LETTER_P (value, 'I'))
996 /* constant loadable with {cau|addis} */
997 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1000 #if HOST_BITS_PER_WIDE_INT == 64
1001 else if (TARGET_POWERPC64)
1003 HOST_WIDE_INT low = value & 0xffffffff;
1004 HOST_WIDE_INT high = value >> 32;
1006 low = (low ^ 0x80000000) - 0x80000000; /* sign extend */
1008 if (high == 0 && (low & 0x80000000) == 0)
1011 else if (high == -1 && (low & 0x80000000) != 0)
1015 return num_insns_constant_wide (high) + 1;
1018 return (num_insns_constant_wide (high)
1019 + num_insns_constant_wide (low) + 1);
1028 num_insns_constant (op, mode)
1030 enum machine_mode mode;
1032 if (GET_CODE (op) == CONST_INT)
1034 #if HOST_BITS_PER_WIDE_INT == 64
1035 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1036 && mask64_operand (op, mode))
1040 return num_insns_constant_wide (INTVAL (op));
1043 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1048 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1049 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1050 return num_insns_constant_wide ((HOST_WIDE_INT)l);
1053 else if (GET_CODE (op) == CONST_DOUBLE)
1059 int endian = (WORDS_BIG_ENDIAN == 0);
1061 if (mode == VOIDmode || mode == DImode)
1063 high = CONST_DOUBLE_HIGH (op);
1064 low = CONST_DOUBLE_LOW (op);
1068 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1069 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1071 low = l[1 - endian];
1075 return (num_insns_constant_wide (low)
1076 + num_insns_constant_wide (high));
1080 if (high == 0 && (low & 0x80000000) == 0)
1081 return num_insns_constant_wide (low);
1083 else if (high == -1 && (low & 0x80000000) != 0)
1084 return num_insns_constant_wide (low);
1086 else if (mask64_operand (op, mode))
1090 return num_insns_constant_wide (high) + 1;
1093 return (num_insns_constant_wide (high)
1094 + num_insns_constant_wide (low) + 1);
1102 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1103 register with one instruction per word. We only do this if we can
1104 safely read CONST_DOUBLE_{LOW,HIGH}. */
1107 easy_fp_constant (op, mode)
1109 enum machine_mode mode;
1111 if (GET_CODE (op) != CONST_DOUBLE
1112 || GET_MODE (op) != mode
1113 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1116 /* Consider all constants with -msoft-float to be easy. */
1117 if (TARGET_SOFT_FLOAT && mode != DImode)
1120 /* If we are using V.4 style PIC, consider all constants to be hard. */
1121 if (flag_pic && DEFAULT_ABI == ABI_V4)
1124 #ifdef TARGET_RELOCATABLE
1125 /* Similarly if we are using -mrelocatable, consider all constants
1127 if (TARGET_RELOCATABLE)
1136 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1137 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1139 return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1140 && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1143 else if (mode == SFmode)
1148 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1149 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1151 return num_insns_constant_wide (l) == 1;
1154 else if (mode == DImode)
1155 return ((TARGET_POWERPC64
1156 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1157 || (num_insns_constant (op, DImode) <= 2));
1159 else if (mode == SImode)
1165 /* Return 1 if the operand is 0.0. */
1167 zero_fp_constant (op, mode)
1169 enum machine_mode mode;
1171 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1174 /* Return 1 if the operand is in volatile memory. Note that during
1175 the RTL generation phase, memory_operand does not return TRUE for
1176 volatile memory references. So this function allows us to
1177 recognize volatile references where its safe. */
1180 volatile_mem_operand (op, mode)
1182 enum machine_mode mode;
1184 if (GET_CODE (op) != MEM)
1187 if (!MEM_VOLATILE_P (op))
1190 if (mode != GET_MODE (op))
1193 if (reload_completed)
1194 return memory_operand (op, mode);
1196 if (reload_in_progress)
1197 return strict_memory_address_p (mode, XEXP (op, 0));
1199 return memory_address_p (mode, XEXP (op, 0));
1202 /* Return 1 if the operand is an offsettable memory operand. */
1205 offsettable_mem_operand (op, mode)
1207 enum machine_mode mode;
1209 return ((GET_CODE (op) == MEM)
1210 && offsettable_address_p (reload_completed || reload_in_progress,
1211 mode, XEXP (op, 0)));
1214 /* Return 1 if the operand is either an easy FP constant (see above) or
1218 mem_or_easy_const_operand (op, mode)
1220 enum machine_mode mode;
1222 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1225 /* Return 1 if the operand is either a non-special register or an item
1226 that can be used as the operand of a `mode' add insn. */
1229 add_operand (op, mode)
1231 enum machine_mode mode;
1233 if (GET_CODE (op) == CONST_INT)
1234 return (CONST_OK_FOR_LETTER_P (INTVAL(op), 'I')
1235 || CONST_OK_FOR_LETTER_P (INTVAL(op), 'L'));
1237 return gpc_reg_operand (op, mode);
1240 /* Return 1 if OP is a constant but not a valid add_operand. */
1243 non_add_cint_operand (op, mode)
1245 enum machine_mode mode ATTRIBUTE_UNUSED;
1247 return (GET_CODE (op) == CONST_INT
1248 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000
1249 && ! CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1252 /* Return 1 if the operand is a non-special register or a constant that
1253 can be used as the operand of an OR or XOR insn on the RS/6000. */
1256 logical_operand (op, mode)
1258 enum machine_mode mode;
1260 HOST_WIDE_INT opl, oph;
1262 if (gpc_reg_operand (op, mode))
1265 if (GET_CODE (op) == CONST_INT)
1267 opl = INTVAL (op) & GET_MODE_MASK (mode);
1269 #if HOST_BITS_PER_WIDE_INT <= 32
1270 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1274 else if (GET_CODE (op) == CONST_DOUBLE)
1276 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1279 opl = CONST_DOUBLE_LOW (op);
1280 oph = CONST_DOUBLE_HIGH (op);
1287 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1288 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1291 /* Return 1 if C is a constant that is not a logical operand (as
1292 above), but could be split into one. */
1295 non_logical_cint_operand (op, mode)
1297 enum machine_mode mode;
1299 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1300 && ! logical_operand (op, mode)
1301 && reg_or_logical_cint_operand (op, mode));
1304 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1305 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1306 Reject all ones and all zeros, since these should have been optimized
1307 away and confuse the making of MB and ME. */
1310 mask_operand (op, mode)
1312 enum machine_mode mode ATTRIBUTE_UNUSED;
1314 HOST_WIDE_INT c, lsb;
1316 if (GET_CODE (op) != CONST_INT)
1321 /* We don't change the number of transitions by inverting,
1322 so make sure we start with the LS bit zero. */
1326 /* Reject all zeros or all ones. */
1330 /* Find the first transition. */
1333 /* Invert to look for a second transition. */
1336 /* Erase first transition. */
1339 /* Find the second transition (if any). */
1342 /* Match if all the bits above are 1's (or c is zero). */
1346 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1347 It is if there are no more than one 1->0 or 0->1 transitions.
1348 Reject all ones and all zeros, since these should have been optimized
1349 away and confuse the making of MB and ME. */
1352 mask64_operand (op, mode)
1354 enum machine_mode mode;
1356 if (GET_CODE (op) == CONST_INT)
1358 HOST_WIDE_INT c, lsb;
1360 /* We don't change the number of transitions by inverting,
1361 so make sure we start with the LS bit zero. */
1366 /* Reject all zeros or all ones. */
1370 /* Find the transition, and check that all bits above are 1's. */
1374 else if (GET_CODE (op) == CONST_DOUBLE
1375 && (mode == VOIDmode || mode == DImode))
1377 HOST_WIDE_INT low, high, lsb;
1379 if (HOST_BITS_PER_WIDE_INT < 64)
1380 high = CONST_DOUBLE_HIGH (op);
1382 low = CONST_DOUBLE_LOW (op);
1385 if (HOST_BITS_PER_WIDE_INT < 64)
1392 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1396 return high == -lsb;
1400 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1406 /* Return 1 if the operand is either a non-special register or a constant
1407 that can be used as the operand of a PowerPC64 logical AND insn. */
1410 and64_operand (op, mode)
1412 enum machine_mode mode;
1414 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1415 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1417 return (logical_operand (op, mode) || mask64_operand (op, mode));
1420 /* Return 1 if the operand is either a non-special register or a
1421 constant that can be used as the operand of an RS/6000 logical AND insn. */
1424 and_operand (op, mode)
1426 enum machine_mode mode;
1428 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1429 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1431 return (logical_operand (op, mode) || mask_operand (op, mode));
1434 /* Return 1 if the operand is a general register or memory operand. */
1437 reg_or_mem_operand (op, mode)
1439 enum machine_mode mode;
1441 return (gpc_reg_operand (op, mode)
1442 || memory_operand (op, mode)
1443 || volatile_mem_operand (op, mode));
1446 /* Return 1 if the operand is a general register or memory operand without
1447 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1451 lwa_operand (op, mode)
1453 enum machine_mode mode;
1457 if (reload_completed && GET_CODE (inner) == SUBREG)
1458 inner = SUBREG_REG (inner);
1460 return gpc_reg_operand (inner, mode)
1461 || (memory_operand (inner, mode)
1462 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1463 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1464 && (GET_CODE (XEXP (inner, 0)) != PLUS
1465 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1466 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1469 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1470 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1474 call_operand (op, mode)
1476 enum machine_mode mode;
1478 if (mode != VOIDmode && GET_MODE (op) != mode)
1481 return (GET_CODE (op) == SYMBOL_REF
1482 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1485 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1486 this file and the function is not weakly defined. */
1489 current_file_function_operand (op, mode)
1491 enum machine_mode mode ATTRIBUTE_UNUSED;
1493 return (GET_CODE (op) == SYMBOL_REF
1494 && (SYMBOL_REF_FLAG (op)
1495 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1496 && ! DECL_WEAK (current_function_decl))));
1499 /* Return 1 if this operand is a valid input for a move insn. */
1502 input_operand (op, mode)
1504 enum machine_mode mode;
1506 /* Memory is always valid. */
1507 if (memory_operand (op, mode))
1510 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1511 if (GET_CODE (op) == CONSTANT_P_RTX)
1514 /* For floating-point, easy constants are valid. */
1515 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1517 && easy_fp_constant (op, mode))
1520 /* Allow any integer constant. */
1521 if (GET_MODE_CLASS (mode) == MODE_INT
1522 && (GET_CODE (op) == CONST_INT
1523 || GET_CODE (op) == CONST_DOUBLE))
1526 /* For floating-point or multi-word mode, the only remaining valid type
1528 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1529 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1530 return register_operand (op, mode);
1532 /* The only cases left are integral modes one word or smaller (we
1533 do not get called for MODE_CC values). These can be in any
1535 if (register_operand (op, mode))
1538 /* A SYMBOL_REF referring to the TOC is valid. */
1539 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1542 /* A constant pool expression (relative to the TOC) is valid */
1543 if (TOC_RELATIVE_EXPR_P (op))
1546 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1548 if (DEFAULT_ABI == ABI_V4
1549 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1550 && small_data_operand (op, Pmode))
1556 /* Return 1 for an operand in small memory on V.4/eabi. */
1559 small_data_operand (op, mode)
1560 rtx op ATTRIBUTE_UNUSED;
1561 enum machine_mode mode ATTRIBUTE_UNUSED;
1566 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1569 if (DEFAULT_ABI != ABI_V4)
1572 if (GET_CODE (op) == SYMBOL_REF)
1575 else if (GET_CODE (op) != CONST
1576 || GET_CODE (XEXP (op, 0)) != PLUS
1577 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1578 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1583 rtx sum = XEXP (op, 0);
1584 HOST_WIDE_INT summand;
1586 /* We have to be careful here, because it is the referenced address
1587 that must be 32k from _SDA_BASE_, not just the symbol. */
1588 summand = INTVAL (XEXP (sum, 1));
1589 if (summand < 0 || summand > g_switch_value)
1592 sym_ref = XEXP (sum, 0);
1595 if (*XSTR (sym_ref, 0) != '@')
1606 constant_pool_expr_1 (op, have_sym, have_toc)
1611 switch (GET_CODE(op))
1614 if (CONSTANT_POOL_ADDRESS_P (op))
1616 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1624 else if (! strcmp (XSTR (op, 0), toc_label_name))
1633 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) &&
1634 constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc);
1636 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1645 constant_pool_expr_p (op)
1650 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1654 toc_relative_expr_p (op)
1659 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1662 /* Try machine-dependent ways of modifying an illegitimate address
1663 to be legitimate. If we find one, return the new, valid address.
1664 This is used from only one place: `memory_address' in explow.c.
1666 OLDX is the address as it was before break_out_memory_refs was
1667 called. In some cases it is useful to look at this to decide what
1670 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1672 It is always safe for this function to do nothing. It exists to
1673 recognize opportunities to optimize the output.
1675 On RS/6000, first check for the sum of a register with a constant
1676 integer that is out of range. If so, generate code to add the
1677 constant with the low-order 16 bits masked to the register and force
1678 this result into another register (this can be done with `cau').
1679 Then generate an address of REG+(CONST&0xffff), allowing for the
1680 possibility of bit 16 being a one.
1682 Then check for the sum of a register and something not constant, try to
1683 load the other things into a register and return the sum. */
1685 rs6000_legitimize_address (x, oldx, mode)
1687 rtx oldx ATTRIBUTE_UNUSED;
1688 enum machine_mode mode;
1690 if (GET_CODE (x) == PLUS
1691 && GET_CODE (XEXP (x, 0)) == REG
1692 && GET_CODE (XEXP (x, 1)) == CONST_INT
1693 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1695 HOST_WIDE_INT high_int, low_int;
1697 high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1698 low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1699 if (low_int & 0x8000)
1700 high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1701 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1702 GEN_INT (high_int)), 0);
1703 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1705 else if (GET_CODE (x) == PLUS
1706 && GET_CODE (XEXP (x, 0)) == REG
1707 && GET_CODE (XEXP (x, 1)) != CONST_INT
1708 && GET_MODE_NUNITS (mode) == 1
1709 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1710 && (TARGET_POWERPC64 || mode != DImode)
1713 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1714 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1716 else if (ALTIVEC_VECTOR_MODE (mode))
1720 /* Make sure both operands are registers. */
1721 if (GET_CODE (x) == PLUS)
1722 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1723 force_reg (Pmode, XEXP (x, 1)));
1725 reg = force_reg (Pmode, x);
1728 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1729 && GET_CODE (x) != CONST_INT
1730 && GET_CODE (x) != CONST_DOUBLE
1732 && GET_MODE_NUNITS (mode) == 1
1733 && (GET_MODE_BITSIZE (mode) <= 32
1734 || (TARGET_HARD_FLOAT && mode == DFmode)))
1736 rtx reg = gen_reg_rtx (Pmode);
1737 emit_insn (gen_elf_high (reg, (x)));
1738 return gen_rtx_LO_SUM (Pmode, reg, (x));
1740 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1742 && GET_CODE (x) != CONST_INT
1743 && GET_CODE (x) != CONST_DOUBLE
1745 && (TARGET_HARD_FLOAT || mode != DFmode)
1749 rtx reg = gen_reg_rtx (Pmode);
1750 emit_insn (gen_macho_high (reg, (x)));
1751 return gen_rtx_LO_SUM (Pmode, reg, (x));
1754 && CONSTANT_POOL_EXPR_P (x)
1755 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1757 return create_TOC_reference (x);
1763 /* The convention appears to be to define this wherever it is used.
1764 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1765 is now used here. */
1766 #ifndef REG_MODE_OK_FOR_BASE_P
1767 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1770 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1771 replace the input X, or the original X if no replacement is called for.
1772 The output parameter *WIN is 1 if the calling macro should goto WIN,
1775 For RS/6000, we wish to handle large displacements off a base
1776 register by splitting the addend across an addiu/addis and the mem insn.
1777 This cuts number of extra insns needed from 3 to 1.
1779 On Darwin, we use this to generate code for floating point constants.
1780 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1781 The Darwin code is inside #if TARGET_MACHO because only then is
1782 machopic_function_base_name() defined. */
1784 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1786 enum machine_mode mode;
1789 int ind_levels ATTRIBUTE_UNUSED;
1792 /* We must recognize output that we have already generated ourselves. */
1793 if (GET_CODE (x) == PLUS
1794 && GET_CODE (XEXP (x, 0)) == PLUS
1795 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1796 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1797 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1799 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1800 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1801 opnum, (enum reload_type)type);
1807 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1808 && GET_CODE (x) == LO_SUM
1809 && GET_CODE (XEXP (x, 0)) == PLUS
1810 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1811 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1812 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1813 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1814 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1815 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1816 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1818 /* Result of previous invocation of this function on Darwin
1819 floating point constant. */
1820 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1821 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1822 opnum, (enum reload_type)type);
1827 if (GET_CODE (x) == PLUS
1828 && GET_CODE (XEXP (x, 0)) == REG
1829 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1830 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1831 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1833 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1834 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1836 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1838 /* Check for 32-bit overflow. */
1839 if (high + low != val)
1845 /* Reload the high part into a base reg; leave the low part
1846 in the mem directly. */
1848 x = gen_rtx_PLUS (GET_MODE (x),
1849 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1853 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1854 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1855 opnum, (enum reload_type)type);
1860 if (GET_CODE (x) == SYMBOL_REF
1861 && DEFAULT_ABI == ABI_DARWIN
1864 /* Darwin load of floating point constant. */
1865 rtx offset = gen_rtx (CONST, Pmode,
1866 gen_rtx (MINUS, Pmode, x,
1867 gen_rtx (SYMBOL_REF, Pmode,
1868 machopic_function_base_name ())));
1869 x = gen_rtx (LO_SUM, GET_MODE (x),
1870 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1871 gen_rtx (HIGH, Pmode, offset)), offset);
1872 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1873 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1874 opnum, (enum reload_type)type);
1880 && CONSTANT_POOL_EXPR_P (x)
1881 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1883 (x) = create_TOC_reference (x);
1891 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1892 that is a valid memory address for an instruction.
1893 The MODE argument is the machine mode for the MEM expression
1894 that wants to use this address.
1896 On the RS/6000, there are four valid address: a SYMBOL_REF that
1897 refers to a constant pool entry of an address (or the sum of it
1898 plus a constant), a short (16-bit signed) constant plus a register,
1899 the sum of two registers, or a register indirect, possibly with an
1900 auto-increment. For DFmode and DImode with an constant plus register,
1901 we must ensure that both words are addressable or PowerPC64 with offset
1904 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1905 32-bit DImode, TImode), indexed addressing cannot be used because
1906 adjacent memory cells are accessed by adding word-sized offsets
1907 during assembly output. */
1909 rs6000_legitimate_address (mode, x, reg_ok_strict)
1910 enum machine_mode mode;
1914 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
1916 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
1918 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
1920 if (LEGITIMATE_SMALL_DATA_P (mode, x))
1922 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
1924 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
1926 && GET_CODE (x) == PLUS
1927 && GET_CODE (XEXP (x, 0)) == REG
1928 && XEXP (x, 0) == virtual_stack_vars_rtx
1929 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1931 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
1934 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1935 && (TARGET_POWERPC64 || mode != DImode)
1936 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
1938 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
1943 /* Try to output insns to set TARGET equal to the constant C if it can
1944 be done in less than N insns. Do all computations in MODE.
1945 Returns the place where the output has been placed if it can be
1946 done and the insns have been emitted. If it would take more than N
1947 insns, zero is returned and no insns and emitted. */
1950 rs6000_emit_set_const (dest, mode, source, n)
1952 enum machine_mode mode;
1953 int n ATTRIBUTE_UNUSED;
1955 HOST_WIDE_INT c0, c1;
1957 if (mode == QImode || mode == HImode || mode == SImode)
1960 dest = gen_reg_rtx (mode);
1961 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
1965 if (GET_CODE (source) == CONST_INT)
1967 c0 = INTVAL (source);
1970 else if (GET_CODE (source) == CONST_DOUBLE)
1972 #if HOST_BITS_PER_WIDE_INT >= 64
1973 c0 = CONST_DOUBLE_LOW (source);
1976 c0 = CONST_DOUBLE_LOW (source);
1977 c1 = CONST_DOUBLE_HIGH (source);
1983 return rs6000_emit_set_long_const (dest, c0, c1);
1986 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
1987 fall back to a straight forward decomposition. We do this to avoid
1988 exponential run times encountered when looking for longer sequences
1989 with rs6000_emit_set_const. */
1991 rs6000_emit_set_long_const (dest, c1, c2)
1993 HOST_WIDE_INT c1, c2;
1995 if (!TARGET_POWERPC64)
1997 rtx operand1, operand2;
1999 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2001 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2003 emit_move_insn (operand1, GEN_INT (c1));
2004 emit_move_insn (operand2, GEN_INT (c2));
2008 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2011 ud2 = (c1 & 0xffff0000) >> 16;
2012 #if HOST_BITS_PER_WIDE_INT >= 64
2016 ud4 = (c2 & 0xffff0000) >> 16;
2018 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2019 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2022 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2024 emit_move_insn (dest, GEN_INT (ud1));
2027 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2028 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2031 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2034 emit_move_insn (dest, GEN_INT (ud2 << 16));
2036 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2038 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2039 || (ud4 == 0 && ! (ud3 & 0x8000)))
2042 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2045 emit_move_insn (dest, GEN_INT (ud3 << 16));
2048 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2049 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2051 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2056 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2059 emit_move_insn (dest, GEN_INT (ud4 << 16));
2062 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2064 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2066 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2067 GEN_INT (ud2 << 16)));
2069 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2075 /* Emit a move from SOURCE to DEST in mode MODE. */
2077 rs6000_emit_move (dest, source, mode)
2080 enum machine_mode mode;
2084 operands[1] = source;
2086 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2087 if (GET_CODE (operands[1]) == CONST_DOUBLE
2088 && ! FLOAT_MODE_P (mode)
2089 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2091 /* FIXME. This should never happen. */
2092 /* Since it seems that it does, do the safe thing and convert
2095 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2097 if (GET_CODE (operands[1]) == CONST_DOUBLE
2098 && ! FLOAT_MODE_P (mode)
2099 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2100 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2101 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2102 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2105 /* Check if GCC is setting up a block move that will end up using FP
2106 registers as temporaries. We must make sure this is acceptable. */
2107 if (GET_CODE (operands[0]) == MEM
2108 && GET_CODE (operands[1]) == MEM
2110 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2111 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2112 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2113 ? 32 : MEM_ALIGN (operands[0])))
2114 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2116 : MEM_ALIGN (operands[1]))))
2117 && ! MEM_VOLATILE_P (operands [0])
2118 && ! MEM_VOLATILE_P (operands [1]))
2120 emit_move_insn (adjust_address (operands[0], SImode, 0),
2121 adjust_address (operands[1], SImode, 0));
2122 emit_move_insn (adjust_address (operands[0], SImode, 4),
2123 adjust_address (operands[1], SImode, 4));
2127 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2128 operands[1] = force_reg (mode, operands[1]);
2130 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2131 && GET_CODE (operands[0]) == MEM)
2135 if (reload_in_progress || reload_completed)
2136 regnum = true_regnum (operands[1]);
2137 else if (GET_CODE (operands[1]) == REG)
2138 regnum = REGNO (operands[1]);
2142 /* If operands[1] is a register, on POWER it may have
2143 double-precision data in it, so truncate it to single
2145 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2148 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2149 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2150 operands[1] = newreg;
2154 /* Handle the case where reload calls us with an invalid address;
2155 and the case of CONSTANT_P_RTX. */
2156 if (! general_operand (operands[1], mode)
2157 || ! nonimmediate_operand (operands[0], mode)
2158 || GET_CODE (operands[1]) == CONSTANT_P_RTX)
2160 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2164 /* FIXME: In the long term, this switch statement should go away
2165 and be replaced by a sequence of tests based on things like
2171 if (CONSTANT_P (operands[1])
2172 && GET_CODE (operands[1]) != CONST_INT)
2173 operands[1] = force_const_mem (mode, operands[1]);
2179 if (CONSTANT_P (operands[1])
2180 && ! easy_fp_constant (operands[1], mode))
2181 operands[1] = force_const_mem (mode, operands[1]);
2188 /* fixme: aldyh -- allow vector constants when they are implemented. */
2189 if (CONSTANT_P (operands[1]))
2190 operands[1] = force_const_mem (mode, operands[1]);
2195 /* Use default pattern for address of ELF small data */
2198 && DEFAULT_ABI == ABI_V4
2199 && (GET_CODE (operands[1]) == SYMBOL_REF
2200 || GET_CODE (operands[1]) == CONST)
2201 && small_data_operand (operands[1], mode))
2203 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2207 if (DEFAULT_ABI == ABI_V4
2208 && mode == Pmode && mode == SImode
2209 && flag_pic == 1 && got_operand (operands[1], mode))
2211 emit_insn (gen_movsi_got (operands[0], operands[1]));
2215 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2216 && TARGET_NO_TOC && ! flag_pic
2218 && CONSTANT_P (operands[1])
2219 && GET_CODE (operands[1]) != HIGH
2220 && GET_CODE (operands[1]) != CONST_INT)
2222 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2224 /* If this is a function address on -mcall-aixdesc,
2225 convert it to the address of the descriptor. */
2226 if (DEFAULT_ABI == ABI_AIX
2227 && GET_CODE (operands[1]) == SYMBOL_REF
2228 && XSTR (operands[1], 0)[0] == '.')
2230 const char *name = XSTR (operands[1], 0);
2232 while (*name == '.')
2234 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2235 CONSTANT_POOL_ADDRESS_P (new_ref)
2236 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2237 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2238 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2239 operands[1] = new_ref;
2242 if (DEFAULT_ABI == ABI_DARWIN)
2244 emit_insn (gen_macho_high (target, operands[1]));
2245 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2249 emit_insn (gen_elf_high (target, operands[1]));
2250 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2254 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2255 and we have put it in the TOC, we just need to make a TOC-relative
2258 && GET_CODE (operands[1]) == SYMBOL_REF
2259 && CONSTANT_POOL_EXPR_P (operands[1])
2260 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2261 get_pool_mode (operands[1])))
2263 operands[1] = create_TOC_reference (operands[1]);
2265 else if (mode == Pmode
2266 && CONSTANT_P (operands[1])
2267 && ((GET_CODE (operands[1]) != CONST_INT
2268 && ! easy_fp_constant (operands[1], mode))
2269 || (GET_CODE (operands[1]) == CONST_INT
2270 && num_insns_constant (operands[1], mode) > 2)
2271 || (GET_CODE (operands[0]) == REG
2272 && FP_REGNO_P (REGNO (operands[0]))))
2273 && GET_CODE (operands[1]) != HIGH
2274 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2275 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2277 /* Emit a USE operation so that the constant isn't deleted if
2278 expensive optimizations are turned on because nobody
2279 references it. This should only be done for operands that
2280 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2281 This should not be done for operands that contain LABEL_REFs.
2282 For now, we just handle the obvious case. */
2283 if (GET_CODE (operands[1]) != LABEL_REF)
2284 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2287 /* Darwin uses a special PIC legitimizer. */
2288 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2291 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2293 if (operands[0] != operands[1])
2294 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2299 /* If we are to limit the number of things we put in the TOC and
2300 this is a symbol plus a constant we can add in one insn,
2301 just put the symbol in the TOC and add the constant. Don't do
2302 this if reload is in progress. */
2303 if (GET_CODE (operands[1]) == CONST
2304 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2305 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2306 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2307 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2308 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2309 && ! side_effects_p (operands[0]))
2312 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2313 rtx other = XEXP (XEXP (operands[1], 0), 1);
2315 sym = force_reg (mode, sym);
2317 emit_insn (gen_addsi3 (operands[0], sym, other));
2319 emit_insn (gen_adddi3 (operands[0], sym, other));
2323 operands[1] = force_const_mem (mode, operands[1]);
2326 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2327 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2328 get_pool_constant (XEXP (operands[1], 0)),
2329 get_pool_mode (XEXP (operands[1], 0))))
2332 = gen_rtx_MEM (mode,
2333 create_TOC_reference (XEXP (operands[1], 0)));
2334 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2335 RTX_UNCHANGING_P (operands[1]) = 1;
2341 if (GET_CODE (operands[0]) == MEM
2342 && GET_CODE (XEXP (operands[0], 0)) != REG
2343 && ! reload_in_progress)
2345 = replace_equiv_address (operands[0],
2346 copy_addr_to_reg (XEXP (operands[0], 0)));
2348 if (GET_CODE (operands[1]) == MEM
2349 && GET_CODE (XEXP (operands[1], 0)) != REG
2350 && ! reload_in_progress)
2352 = replace_equiv_address (operands[1],
2353 copy_addr_to_reg (XEXP (operands[1], 0)));
2360 /* Above, we may have called force_const_mem which may have returned
2361 an invalid address. If we can, fix this up; otherwise, reload will
2362 have to deal with it. */
2363 if (GET_CODE (operands[1]) == MEM
2364 && ! memory_address_p (mode, XEXP (operands[1], 0))
2365 && ! reload_in_progress)
2366 operands[1] = adjust_address (operands[1], mode, 0);
2368 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2372 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2373 for a call to a function whose data type is FNTYPE.
2374 For a library call, FNTYPE is 0.
2376 For incoming args we set the number of arguments in the prototype large
2377 so we never return a PARALLEL. */
2380 init_cumulative_args (cum, fntype, libname, incoming)
2381 CUMULATIVE_ARGS *cum;
2383 rtx libname ATTRIBUTE_UNUSED;
2386 static CUMULATIVE_ARGS zero_cumulative;
2388 *cum = zero_cumulative;
2390 cum->fregno = FP_ARG_MIN_REG;
2391 cum->vregno = ALTIVEC_ARG_MIN_REG;
2392 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2393 cum->call_cookie = CALL_NORMAL;
2394 cum->sysv_gregno = GP_ARG_MIN_REG;
2397 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2399 else if (cum->prototype)
2400 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2401 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2402 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2405 cum->nargs_prototype = 0;
2407 cum->orig_nargs = cum->nargs_prototype;
2409 /* Check for longcall's */
2410 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2411 cum->call_cookie = CALL_LONG;
2413 if (TARGET_DEBUG_ARG)
2415 fprintf (stderr, "\ninit_cumulative_args:");
2418 tree ret_type = TREE_TYPE (fntype);
2419 fprintf (stderr, " ret code = %s,",
2420 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2423 if (cum->call_cookie & CALL_LONG)
2424 fprintf (stderr, " longcall,");
2426 fprintf (stderr, " proto = %d, nargs = %d\n",
2427 cum->prototype, cum->nargs_prototype);
2431 /* If defined, a C expression which determines whether, and in which
2432 direction, to pad out an argument with extra space. The value
2433 should be of type `enum direction': either `upward' to pad above
2434 the argument, `downward' to pad below, or `none' to inhibit
2437 For the AIX ABI structs are always stored left shifted in their
2441 function_arg_padding (mode, type)
2442 enum machine_mode mode;
2445 if (type != 0 && AGGREGATE_TYPE_P (type))
2448 /* This is the default definition. */
2449 return (! BYTES_BIG_ENDIAN
2452 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2453 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2454 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2455 ? downward : upward));
2458 /* If defined, a C expression that gives the alignment boundary, in bits,
2459 of an argument with the specified mode and type. If it is not defined,
2460 PARM_BOUNDARY is used for all arguments.
2462 V.4 wants long longs to be double word aligned. */
2465 function_arg_boundary (mode, type)
2466 enum machine_mode mode;
2467 tree type ATTRIBUTE_UNUSED;
2469 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2471 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2474 return PARM_BOUNDARY;
2477 /* Update the data in CUM to advance over an argument
2478 of mode MODE and data type TYPE.
2479 (TYPE is null for libcalls where that information may not be available.) */
2482 function_arg_advance (cum, mode, type, named)
2483 CUMULATIVE_ARGS *cum;
2484 enum machine_mode mode;
2488 cum->nargs_prototype--;
2490 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2492 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2495 cum->words += RS6000_ARG_SIZE (mode, type);
2497 else if (DEFAULT_ABI == ABI_V4)
2499 if (TARGET_HARD_FLOAT
2500 && (mode == SFmode || mode == DFmode))
2502 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2507 cum->words += cum->words & 1;
2508 cum->words += RS6000_ARG_SIZE (mode, type);
2514 int gregno = cum->sysv_gregno;
2516 /* Aggregates and IEEE quad get passed by reference. */
2517 if ((type && AGGREGATE_TYPE_P (type))
2521 n_words = RS6000_ARG_SIZE (mode, type);
2523 /* Long long is put in odd registers. */
2524 if (n_words == 2 && (gregno & 1) == 0)
2527 /* Long long is not split between registers and stack. */
2528 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2530 /* Long long is aligned on the stack. */
2532 cum->words += cum->words & 1;
2533 cum->words += n_words;
2536 /* Note: continuing to accumulate gregno past when we've started
2537 spilling to the stack indicates the fact that we've started
2538 spilling to the stack to expand_builtin_saveregs. */
2539 cum->sysv_gregno = gregno + n_words;
2542 if (TARGET_DEBUG_ARG)
2544 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2545 cum->words, cum->fregno);
2546 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2547 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2548 fprintf (stderr, "mode = %4s, named = %d\n",
2549 GET_MODE_NAME (mode), named);
2554 int align = (TARGET_32BIT && (cum->words & 1) != 0
2555 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2557 cum->words += align + RS6000_ARG_SIZE (mode, type);
2559 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2562 if (TARGET_DEBUG_ARG)
2564 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2565 cum->words, cum->fregno);
2566 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2567 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2568 fprintf (stderr, "named = %d, align = %d\n", named, align);
2573 /* Determine where to put an argument to a function.
2574 Value is zero to push the argument on the stack,
2575 or a hard register in which to store the argument.
2577 MODE is the argument's machine mode.
2578 TYPE is the data type of the argument (as a tree).
2579 This is null for libcalls where that information may
2581 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2582 the preceding args and about the function being called.
2583 NAMED is nonzero if this argument is a named parameter
2584 (otherwise it is an extra parameter matching an ellipsis).
2586 On RS/6000 the first eight words of non-FP are normally in registers
2587 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2588 Under V.4, the first 8 FP args are in registers.
2590 If this is floating-point and no prototype is specified, we use
2591 both an FP and integer register (or possibly FP reg and stack). Library
2592 functions (when TYPE is zero) always have the proper types for args,
2593 so we can pass the FP value just in one register. emit_library_function
2594 doesn't support PARALLEL anyway. */
2597 function_arg (cum, mode, type, named)
2598 CUMULATIVE_ARGS *cum;
2599 enum machine_mode mode;
2603 enum rs6000_abi abi = DEFAULT_ABI;
2605 /* Return a marker to indicate whether CR1 needs to set or clear the
2606 bit that V.4 uses to say fp args were passed in registers.
2607 Assume that we don't need the marker for software floating point,
2608 or compiler generated library calls. */
2609 if (mode == VOIDmode)
2612 && TARGET_HARD_FLOAT
2613 && cum->nargs_prototype < 0
2614 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2616 return GEN_INT (cum->call_cookie
2617 | ((cum->fregno == FP_ARG_MIN_REG)
2618 ? CALL_V4_SET_FP_ARGS
2619 : CALL_V4_CLEAR_FP_ARGS));
2622 return GEN_INT (cum->call_cookie);
2625 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2627 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2628 return gen_rtx_REG (mode, cum->vregno);
2632 else if (abi == ABI_V4)
2634 if (TARGET_HARD_FLOAT
2635 && (mode == SFmode || mode == DFmode))
2637 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2638 return gen_rtx_REG (mode, cum->fregno);
2645 int gregno = cum->sysv_gregno;
2647 /* Aggregates and IEEE quad get passed by reference. */
2648 if ((type && AGGREGATE_TYPE_P (type))
2652 n_words = RS6000_ARG_SIZE (mode, type);
2654 /* Long long is put in odd registers. */
2655 if (n_words == 2 && (gregno & 1) == 0)
2658 /* Long long is not split between registers and stack. */
2659 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2660 return gen_rtx_REG (mode, gregno);
2667 int align = (TARGET_32BIT && (cum->words & 1) != 0
2668 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2669 int align_words = cum->words + align;
2671 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2674 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2677 || ((cum->nargs_prototype > 0)
2678 /* IBM AIX extended its linkage convention definition always
2679 to require FP args after register save area hole on the
2681 && (DEFAULT_ABI != ABI_AIX
2683 || (align_words < GP_ARG_NUM_REG))))
2684 return gen_rtx_REG (mode, cum->fregno);
2686 return gen_rtx_PARALLEL (mode,
2688 gen_rtx_EXPR_LIST (VOIDmode,
2689 ((align_words >= GP_ARG_NUM_REG)
2692 + RS6000_ARG_SIZE (mode, type)
2694 /* If this is partially on the stack, then
2695 we only include the portion actually
2696 in registers here. */
2697 ? gen_rtx_REG (SImode,
2698 GP_ARG_MIN_REG + align_words)
2699 : gen_rtx_REG (mode,
2700 GP_ARG_MIN_REG + align_words))),
2702 gen_rtx_EXPR_LIST (VOIDmode,
2703 gen_rtx_REG (mode, cum->fregno),
2706 else if (align_words < GP_ARG_NUM_REG)
2707 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2713 /* For an arg passed partly in registers and partly in memory,
2714 this is the number of registers used.
2715 For args passed entirely in registers or entirely in memory, zero. */
2718 function_arg_partial_nregs (cum, mode, type, named)
2719 CUMULATIVE_ARGS *cum;
2720 enum machine_mode mode;
2722 int named ATTRIBUTE_UNUSED;
2724 if (DEFAULT_ABI == ABI_V4)
2727 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2728 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2730 if (cum->nargs_prototype >= 0)
2734 if (cum->words < GP_ARG_NUM_REG
2735 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2737 int ret = GP_ARG_NUM_REG - cum->words;
2738 if (ret && TARGET_DEBUG_ARG)
2739 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2747 /* A C expression that indicates when an argument must be passed by
2748 reference. If nonzero for an argument, a copy of that argument is
2749 made in memory and a pointer to the argument is passed instead of
2750 the argument itself. The pointer is passed in whatever way is
2751 appropriate for passing a pointer to that type.
2753 Under V.4, structures and unions are passed by reference. */
2756 function_arg_pass_by_reference (cum, mode, type, named)
2757 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2758 enum machine_mode mode ATTRIBUTE_UNUSED;
2760 int named ATTRIBUTE_UNUSED;
2762 if (DEFAULT_ABI == ABI_V4
2763 && ((type && AGGREGATE_TYPE_P (type))
2766 if (TARGET_DEBUG_ARG)
2767 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2775 /* Perform any needed actions needed for a function that is receiving a
2776 variable number of arguments.
2780 MODE and TYPE are the mode and type of the current parameter.
2782 PRETEND_SIZE is a variable that should be set to the amount of stack
2783 that must be pushed by the prolog to pretend that our caller pushed
2786 Normally, this macro will push all remaining incoming registers on the
2787 stack and set PRETEND_SIZE to the length of the registers pushed. */
2790 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2791 CUMULATIVE_ARGS *cum;
2792 enum machine_mode mode;
2798 CUMULATIVE_ARGS next_cum;
2799 int reg_size = TARGET_32BIT ? 4 : 8;
2800 rtx save_area = NULL_RTX, mem;
2801 int first_reg_offset, set;
2805 fntype = TREE_TYPE (current_function_decl);
2806 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2807 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2808 != void_type_node));
2810 /* For varargs, we do not want to skip the dummy va_dcl argument.
2811 For stdargs, we do want to skip the last named argument. */
2814 function_arg_advance (&next_cum, mode, type, 1);
2816 if (DEFAULT_ABI == ABI_V4)
2818 /* Indicate to allocate space on the stack for varargs save area. */
2819 /* ??? Does this really have to be located at a magic spot on the
2820 stack, or can we allocate this with assign_stack_local instead. */
2821 cfun->machine->sysv_varargs_p = 1;
2823 save_area = plus_constant (virtual_stack_vars_rtx,
2824 - RS6000_VARARGS_SIZE);
2826 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2830 first_reg_offset = next_cum.words;
2831 save_area = virtual_incoming_args_rtx;
2832 cfun->machine->sysv_varargs_p = 0;
2834 if (MUST_PASS_IN_STACK (mode, type))
2835 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2838 set = get_varargs_alias_set ();
2839 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2841 mem = gen_rtx_MEM (BLKmode,
2842 plus_constant (save_area,
2843 first_reg_offset * reg_size)),
2844 set_mem_alias_set (mem, set);
2845 set_mem_align (mem, BITS_PER_WORD);
2848 (GP_ARG_MIN_REG + first_reg_offset, mem,
2849 GP_ARG_NUM_REG - first_reg_offset,
2850 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2852 /* ??? Does ABI_V4 need this at all? */
2853 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2856 /* Save FP registers if needed. */
2857 if (DEFAULT_ABI == ABI_V4
2858 && TARGET_HARD_FLOAT && ! no_rtl
2859 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2861 int fregno = next_cum.fregno;
2862 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2863 rtx lab = gen_label_rtx ();
2864 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2866 emit_jump_insn (gen_rtx_SET (VOIDmode,
2868 gen_rtx_IF_THEN_ELSE (VOIDmode,
2869 gen_rtx_NE (VOIDmode, cr1,
2871 gen_rtx_LABEL_REF (VOIDmode, lab),
2874 while (fregno <= FP_ARG_V4_MAX_REG)
2876 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2877 set_mem_alias_set (mem, set);
2878 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2887 /* Create the va_list data type. */
2890 rs6000_build_va_list ()
2892 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2894 /* For AIX, prefer 'char *' because that's what the system
2895 header files like. */
2896 if (DEFAULT_ABI != ABI_V4)
2897 return build_pointer_type (char_type_node);
2899 record = make_lang_type (RECORD_TYPE);
2900 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2902 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2903 unsigned_char_type_node);
2904 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2905 unsigned_char_type_node);
2906 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2908 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2911 DECL_FIELD_CONTEXT (f_gpr) = record;
2912 DECL_FIELD_CONTEXT (f_fpr) = record;
2913 DECL_FIELD_CONTEXT (f_ovf) = record;
2914 DECL_FIELD_CONTEXT (f_sav) = record;
2916 TREE_CHAIN (record) = type_decl;
2917 TYPE_NAME (record) = type_decl;
2918 TYPE_FIELDS (record) = f_gpr;
2919 TREE_CHAIN (f_gpr) = f_fpr;
2920 TREE_CHAIN (f_fpr) = f_ovf;
2921 TREE_CHAIN (f_ovf) = f_sav;
2923 layout_type (record);
2925 /* The correct type is an array type of one element. */
2926 return build_array_type (record, build_index_type (size_zero_node));
2929 /* Implement va_start. */
2932 rs6000_va_start (stdarg_p, valist, nextarg)
2937 HOST_WIDE_INT words, n_gpr, n_fpr;
2938 tree f_gpr, f_fpr, f_ovf, f_sav;
2939 tree gpr, fpr, ovf, sav, t;
2941 /* Only SVR4 needs something special. */
2942 if (DEFAULT_ABI != ABI_V4)
2944 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2948 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2949 f_fpr = TREE_CHAIN (f_gpr);
2950 f_ovf = TREE_CHAIN (f_fpr);
2951 f_sav = TREE_CHAIN (f_ovf);
2953 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2954 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2955 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2956 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2957 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2959 /* Count number of gp and fp argument registers used. */
2960 words = current_function_args_info.words;
2961 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
2962 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
2964 if (TARGET_DEBUG_ARG)
2966 fputs ("va_start: words = ", stderr);
2967 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
2968 fputs (", n_gpr = ", stderr);
2969 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
2970 fputs (", n_fpr = ", stderr);
2971 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
2972 putc ('\n', stderr);
2975 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
2976 TREE_SIDE_EFFECTS (t) = 1;
2977 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2979 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
2980 TREE_SIDE_EFFECTS (t) = 1;
2981 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2983 /* Find the overflow area. */
2984 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
2986 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
2987 build_int_2 (words * UNITS_PER_WORD, 0));
2988 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
2989 TREE_SIDE_EFFECTS (t) = 1;
2990 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2992 /* Find the register save area. */
2993 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
2994 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
2995 build_int_2 (-RS6000_VARARGS_SIZE, -1));
2996 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
2997 TREE_SIDE_EFFECTS (t) = 1;
2998 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3001 /* Implement va_arg. */
3004 rs6000_va_arg (valist, type)
3007 tree f_gpr, f_fpr, f_ovf, f_sav;
3008 tree gpr, fpr, ovf, sav, reg, t, u;
3009 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3010 rtx lab_false, lab_over, addr_rtx, r;
3012 /* For AIX, the rule is that structures are passed left-aligned in
3013 their stack slot. However, GCC does not presently do this:
3014 structures which are the same size as integer types are passed
3015 right-aligned, as if they were in fact integers. This only
3016 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
3017 if (DEFAULT_ABI != ABI_V4)
3019 HOST_WIDE_INT align, rounded_size;
3020 enum machine_mode mode;
3023 /* Compute the rounded size of the type. */
3024 align = PARM_BOUNDARY / BITS_PER_UNIT;
3025 rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
3030 mode = TYPE_MODE (type);
3031 if (mode != BLKmode)
3034 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
3035 if (rounded_size > align)
3038 addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3039 build_int_2 (rounded_size - adj, 0));
3042 addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3043 addr_rtx = copy_to_reg (addr_rtx);
3045 /* Compute new value for AP. */
3046 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3047 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3048 build_int_2 (rounded_size, 0)));
3049 TREE_SIDE_EFFECTS (t) = 1;
3050 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3055 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3056 f_fpr = TREE_CHAIN (f_gpr);
3057 f_ovf = TREE_CHAIN (f_fpr);
3058 f_sav = TREE_CHAIN (f_ovf);
3060 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3061 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3062 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3063 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3064 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3066 size = int_size_in_bytes (type);
3067 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3069 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3071 /* Aggregates and long doubles are passed by reference. */
3077 size = rsize = UNITS_PER_WORD;
3079 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3081 /* FP args go in FP registers, if present. */
3090 /* Otherwise into GP registers. */
3098 /* Pull the value out of the saved registers ... */
3100 lab_false = gen_label_rtx ();
3101 lab_over = gen_label_rtx ();
3102 addr_rtx = gen_reg_rtx (Pmode);
3104 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3105 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3108 /* Long long is aligned in the registers. */
3111 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3112 build_int_2 (n_reg - 1, 0));
3113 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3114 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3115 TREE_SIDE_EFFECTS (u) = 1;
3116 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3120 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3124 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0));
3125 TREE_SIDE_EFFECTS (u) = 1;
3127 u = build1 (CONVERT_EXPR, integer_type_node, u);
3128 TREE_SIDE_EFFECTS (u) = 1;
3130 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3131 TREE_SIDE_EFFECTS (u) = 1;
3133 t = build (PLUS_EXPR, ptr_type_node, t, u);
3134 TREE_SIDE_EFFECTS (t) = 1;
3136 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3138 emit_move_insn (addr_rtx, r);
3140 emit_jump_insn (gen_jump (lab_over));
3142 emit_label (lab_false);
3144 /* ... otherwise out of the overflow area. */
3146 /* Make sure we don't find reg 7 for the next int arg. */
3149 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3150 TREE_SIDE_EFFECTS (t) = 1;
3151 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3154 /* Care for on-stack alignment if needed. */
3159 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0));
3160 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1));
3164 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3166 emit_move_insn (addr_rtx, r);
3168 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3169 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3170 TREE_SIDE_EFFECTS (t) = 1;
3171 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3173 emit_label (lab_over);
3177 r = gen_rtx_MEM (Pmode, addr_rtx);
3178 set_mem_alias_set (r, get_varargs_alias_set ());
3179 emit_move_insn (addr_rtx, r);
3187 #define def_builtin(MASK, NAME, TYPE, CODE) \
3189 if ((MASK) & target_flags) \
3190 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3193 struct builtin_description
3195 const unsigned int mask;
3196 const enum insn_code icode;
3197 const char *const name;
3198 const enum rs6000_builtins code;
3201 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3203 static const struct builtin_description bdesc_3arg[] =
3205 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3206 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3207 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3208 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3209 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3210 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3211 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3212 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3213 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3214 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3215 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3216 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3217 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3218 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3219 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3220 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3221 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3222 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3223 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3224 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3225 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3226 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3227 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3230 /* DST operations: void foo (void *, const int, const char). */
3232 static const struct builtin_description bdesc_dst[] =
3234 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3235 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3236 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3237 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3240 /* Simple binary operations: VECc = foo (VECa, VECb). */
3242 static const struct builtin_description bdesc_2arg[] =
3244 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3245 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3246 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3247 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3248 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3249 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3250 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3251 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3252 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3253 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3254 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3255 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3256 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3257 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3258 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3259 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3260 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3261 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3262 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3263 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3264 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3265 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3266 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3267 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3268 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3269 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3270 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3271 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3272 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3273 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3274 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3275 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3276 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3277 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3278 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3279 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3280 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3281 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3282 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3283 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3284 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3285 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3286 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3287 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3288 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3289 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3290 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3291 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3292 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3293 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3294 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3295 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3296 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3297 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3298 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3299 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3300 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3301 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3302 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3303 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3304 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3305 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3306 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3307 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3308 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3309 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3310 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3311 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3312 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3313 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3314 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3315 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3316 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3317 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3318 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3319 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3320 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3321 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3322 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3323 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3324 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3325 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3326 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3327 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3328 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3329 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3330 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3331 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3332 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3333 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3334 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3335 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3336 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3337 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3338 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3339 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3340 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3341 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3342 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3343 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3344 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3345 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3346 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3347 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3348 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3349 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3350 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3351 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3356 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3359 /* AltiVec predicates. */
3361 struct builtin_description_predicates
3363 const unsigned int mask;
3364 const enum insn_code icode;
3366 const char *const name;
3367 const enum rs6000_builtins code;
3370 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3372 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3377 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3378 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3379 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3380 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3381 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3382 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3383 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3384 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3387 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3390 static const struct builtin_description bdesc_1arg[] =
3392 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3393 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3395 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3398 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3404 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3405 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3406 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3407 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3412 altivec_expand_unop_builtin (icode, arglist, target)
3413 enum insn_code icode;
3418 tree arg0 = TREE_VALUE (arglist);
3419 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3420 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3421 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3423 /* If we got invalid arguments bail out before generating bad rtl. */
3424 if (arg0 == error_mark_node)
3428 || GET_MODE (target) != tmode
3429 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3430 target = gen_reg_rtx (tmode);
3432 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3433 op0 = copy_to_mode_reg (mode0, op0);
3435 pat = GEN_FCN (icode) (target, op0);
3444 altivec_expand_binop_builtin (icode, arglist, target)
3445 enum insn_code icode;
3450 tree arg0 = TREE_VALUE (arglist);
3451 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3452 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3453 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3454 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3455 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3456 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3458 /* If we got invalid arguments bail out before generating bad rtl. */
3459 if (arg0 == error_mark_node || arg1 == error_mark_node)
3463 || GET_MODE (target) != tmode
3464 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3465 target = gen_reg_rtx (tmode);
3467 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3468 op0 = copy_to_mode_reg (mode0, op0);
3469 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3470 op1 = copy_to_mode_reg (mode1, op1);
3472 pat = GEN_FCN (icode) (target, op0, op1);
3481 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3482 enum insn_code icode;
3488 tree cr6_form = TREE_VALUE (arglist);
3489 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3490 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3491 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3492 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3493 enum machine_mode tmode = SImode;
3494 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3495 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3498 if (TREE_CODE (cr6_form) != INTEGER_CST)
3500 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3504 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3509 /* If we have invalid arguments, bail out before generating bad rtl. */
3510 if (arg0 == error_mark_node || arg1 == error_mark_node)
3514 || GET_MODE (target) != tmode
3515 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3516 target = gen_reg_rtx (tmode);
3518 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3519 op0 = copy_to_mode_reg (mode0, op0);
3520 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3521 op1 = copy_to_mode_reg (mode1, op1);
3523 scratch = gen_reg_rtx (mode0);
3525 pat = GEN_FCN (icode) (scratch, op0, op1,
3526 gen_rtx (SYMBOL_REF, Pmode, opcode));
3531 /* The vec_any* and vec_all* predicates use the same opcodes for two
3532 different operations, but the bits in CR6 will be different
3533 depending on what information we want. So we have to play tricks
3534 with CR6 to get the right bits out.
3536 If you think this is disgusting, look at the specs for the
3537 AltiVec predicates. */
3539 switch (cr6_form_int)
3542 emit_insn (gen_cr6_test_for_zero (target));
3545 emit_insn (gen_cr6_test_for_zero_reverse (target));
3548 emit_insn (gen_cr6_test_for_lt (target));
3551 emit_insn (gen_cr6_test_for_lt_reverse (target));
3554 error ("argument 1 of __builtin_altivec_predicate is out of range");
3562 altivec_expand_stv_builtin (icode, arglist)
3563 enum insn_code icode;
3566 tree arg0 = TREE_VALUE (arglist);
3567 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3568 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3569 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3570 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3571 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3573 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3574 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3575 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3577 /* Invalid arguments. Bail before doing anything stoopid! */
3578 if (arg0 == error_mark_node
3579 || arg1 == error_mark_node
3580 || arg2 == error_mark_node)
3583 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3584 op0 = copy_to_mode_reg (mode2, op0);
3585 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3586 op1 = copy_to_mode_reg (mode0, op1);
3587 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3588 op2 = copy_to_mode_reg (mode1, op2);
3590 pat = GEN_FCN (icode) (op1, op2, op0);
3597 altivec_expand_ternop_builtin (icode, arglist, target)
3598 enum insn_code icode;
3603 tree arg0 = TREE_VALUE (arglist);
3604 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3605 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3606 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3607 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3608 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3609 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3610 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3611 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3612 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3614 /* If we got invalid arguments bail out before generating bad rtl. */
3615 if (arg0 == error_mark_node
3616 || arg1 == error_mark_node
3617 || arg2 == error_mark_node)
3621 || GET_MODE (target) != tmode
3622 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3623 target = gen_reg_rtx (tmode);
3625 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3626 op0 = copy_to_mode_reg (mode0, op0);
3627 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3628 op1 = copy_to_mode_reg (mode1, op1);
3629 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3630 op2 = copy_to_mode_reg (mode2, op2);
3632 pat = GEN_FCN (icode) (target, op0, op1, op2);
3640 altivec_expand_builtin (exp, target)
3644 struct builtin_description *d;
3645 struct builtin_description_predicates *dp;
3647 enum insn_code icode;
3648 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3649 tree arglist = TREE_OPERAND (exp, 1);
3650 tree arg0, arg1, arg2;
3651 rtx op0, op1, op2, pat;
3652 enum machine_mode tmode, mode0, mode1, mode2;
3653 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3657 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3658 icode = CODE_FOR_altivec_lvx_16qi;
3659 arg0 = TREE_VALUE (arglist);
3660 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3661 tmode = insn_data[icode].operand[0].mode;
3662 mode0 = insn_data[icode].operand[1].mode;
3665 || GET_MODE (target) != tmode
3666 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3667 target = gen_reg_rtx (tmode);
3669 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3670 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3672 pat = GEN_FCN (icode) (target, op0);
3678 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3679 icode = CODE_FOR_altivec_lvx_8hi;
3680 arg0 = TREE_VALUE (arglist);
3681 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3682 tmode = insn_data[icode].operand[0].mode;
3683 mode0 = insn_data[icode].operand[1].mode;
3686 || GET_MODE (target) != tmode
3687 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3688 target = gen_reg_rtx (tmode);
3690 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3691 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3693 pat = GEN_FCN (icode) (target, op0);
3699 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3700 icode = CODE_FOR_altivec_lvx_4si;
3701 arg0 = TREE_VALUE (arglist);
3702 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3703 tmode = insn_data[icode].operand[0].mode;
3704 mode0 = insn_data[icode].operand[1].mode;
3707 || GET_MODE (target) != tmode
3708 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3709 target = gen_reg_rtx (tmode);
3711 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3712 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3714 pat = GEN_FCN (icode) (target, op0);
3720 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3721 icode = CODE_FOR_altivec_lvx_4sf;
3722 arg0 = TREE_VALUE (arglist);
3723 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3724 tmode = insn_data[icode].operand[0].mode;
3725 mode0 = insn_data[icode].operand[1].mode;
3728 || GET_MODE (target) != tmode
3729 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3730 target = gen_reg_rtx (tmode);
3732 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3733 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3735 pat = GEN_FCN (icode) (target, op0);
3741 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3742 icode = CODE_FOR_altivec_stvx_16qi;
3743 arg0 = TREE_VALUE (arglist);
3744 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3745 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3746 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3747 mode0 = insn_data[icode].operand[0].mode;
3748 mode1 = insn_data[icode].operand[1].mode;
3750 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3751 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3752 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3753 op1 = copy_to_mode_reg (mode1, op1);
3755 pat = GEN_FCN (icode) (op0, op1);
3760 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3761 icode = CODE_FOR_altivec_stvx_8hi;
3762 arg0 = TREE_VALUE (arglist);
3763 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3764 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3765 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3766 mode0 = insn_data[icode].operand[0].mode;
3767 mode1 = insn_data[icode].operand[1].mode;
3769 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3770 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3771 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3772 op1 = copy_to_mode_reg (mode1, op1);
3774 pat = GEN_FCN (icode) (op0, op1);
3779 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3780 icode = CODE_FOR_altivec_stvx_4si;
3781 arg0 = TREE_VALUE (arglist);
3782 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3783 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3784 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3785 mode0 = insn_data[icode].operand[0].mode;
3786 mode1 = insn_data[icode].operand[1].mode;
3788 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3789 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3790 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3791 op1 = copy_to_mode_reg (mode1, op1);
3793 pat = GEN_FCN (icode) (op0, op1);
3798 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3799 icode = CODE_FOR_altivec_stvx_4sf;
3800 arg0 = TREE_VALUE (arglist);
3801 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3802 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3803 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3804 mode0 = insn_data[icode].operand[0].mode;
3805 mode1 = insn_data[icode].operand[1].mode;
3807 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3808 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3809 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3810 op1 = copy_to_mode_reg (mode1, op1);
3812 pat = GEN_FCN (icode) (op0, op1);
3817 case ALTIVEC_BUILTIN_STVX:
3818 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3819 case ALTIVEC_BUILTIN_STVEBX:
3820 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3821 case ALTIVEC_BUILTIN_STVEHX:
3822 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3823 case ALTIVEC_BUILTIN_STVEWX:
3824 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3825 case ALTIVEC_BUILTIN_STVXL:
3826 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3828 case ALTIVEC_BUILTIN_MFVSCR:
3829 icode = CODE_FOR_altivec_mfvscr;
3830 tmode = insn_data[icode].operand[0].mode;
3833 || GET_MODE (target) != tmode
3834 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3835 target = gen_reg_rtx (tmode);
3837 pat = GEN_FCN (icode) (target);
3843 case ALTIVEC_BUILTIN_MTVSCR:
3844 icode = CODE_FOR_altivec_mtvscr;
3845 arg0 = TREE_VALUE (arglist);
3846 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3847 mode0 = insn_data[icode].operand[0].mode;
3849 /* If we got invalid arguments bail out before generating bad rtl. */
3850 if (arg0 == error_mark_node)
3853 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3854 op0 = copy_to_mode_reg (mode0, op0);
3856 pat = GEN_FCN (icode) (op0);
3861 case ALTIVEC_BUILTIN_DSSALL:
3862 emit_insn (gen_altivec_dssall ());
3865 case ALTIVEC_BUILTIN_DSS:
3866 icode = CODE_FOR_altivec_dss;
3867 arg0 = TREE_VALUE (arglist);
3868 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3869 mode0 = insn_data[icode].operand[0].mode;
3871 /* If we got invalid arguments bail out before generating bad rtl. */
3872 if (arg0 == error_mark_node)
3875 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3876 op0 = copy_to_mode_reg (mode0, op0);
3878 emit_insn (gen_altivec_dss (op0));
3882 /* Handle DST variants. */
3883 d = (struct builtin_description *) bdesc_dst;
3884 for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
3885 if (d->code == fcode)
3887 arg0 = TREE_VALUE (arglist);
3888 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3889 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3890 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3891 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3892 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3893 mode0 = insn_data[d->icode].operand[0].mode;
3894 mode1 = insn_data[d->icode].operand[1].mode;
3895 mode2 = insn_data[d->icode].operand[2].mode;
3897 /* Invalid arguments, bail out before generating bad rtl. */
3898 if (arg0 == error_mark_node
3899 || arg1 == error_mark_node
3900 || arg2 == error_mark_node)
3903 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
3904 op0 = copy_to_mode_reg (mode0, op0);
3905 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
3906 op1 = copy_to_mode_reg (mode1, op1);
3908 if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
3910 error ("argument 3 of `%s' must be a 2-bit literal", d->name);
3914 pat = GEN_FCN (d->icode) (op0, op1, op2);
3921 /* Handle simple unary operations. */
3922 d = (struct builtin_description *) bdesc_1arg;
3923 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
3924 if (d->code == fcode)
3925 return altivec_expand_unop_builtin (d->icode, arglist, target);
3927 /* Handle simple binary operations. */
3928 d = (struct builtin_description *) bdesc_2arg;
3929 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3930 if (d->code == fcode)
3931 return altivec_expand_binop_builtin (d->icode, arglist, target);
3933 /* Expand the AltiVec predicates. */
3934 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
3935 for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
3936 if (dp->code == fcode)
3937 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
3939 /* LV* are funky. We initialized them differently. */
3942 case ALTIVEC_BUILTIN_LVSL:
3943 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
3945 case ALTIVEC_BUILTIN_LVSR:
3946 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
3948 case ALTIVEC_BUILTIN_LVEBX:
3949 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
3951 case ALTIVEC_BUILTIN_LVEHX:
3952 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
3954 case ALTIVEC_BUILTIN_LVEWX:
3955 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
3957 case ALTIVEC_BUILTIN_LVXL:
3958 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
3960 case ALTIVEC_BUILTIN_LVX:
3961 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
3968 /* Handle simple ternary operations. */
3969 d = (struct builtin_description *) bdesc_3arg;
3970 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
3971 if (d->code == fcode)
3972 return altivec_expand_ternop_builtin (d->icode, arglist, target);
3978 /* Expand an expression EXP that calls a built-in function,
3979 with result going to TARGET if that's convenient
3980 (and in mode MODE if that's convenient).
3981 SUBTARGET may be used as the target for computing one of EXP's operands.
3982 IGNORE is nonzero if the value is to be ignored. */
3985 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
3988 rtx subtarget ATTRIBUTE_UNUSED;
3989 enum machine_mode mode ATTRIBUTE_UNUSED;
3990 int ignore ATTRIBUTE_UNUSED;
3993 return altivec_expand_builtin (exp, target);
3999 rs6000_init_builtins ()
4002 altivec_init_builtins ();
4006 altivec_init_builtins (void)
4008 struct builtin_description *d;
4009 struct builtin_description_predicates *dp;
4012 tree endlink = void_list_node;
4014 tree pint_type_node = build_pointer_type (integer_type_node);
4015 tree pvoid_type_node = build_pointer_type (void_type_node);
4016 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4017 tree pchar_type_node = build_pointer_type (char_type_node);
4018 tree pfloat_type_node = build_pointer_type (float_type_node);
4020 tree v4sf_ftype_v4sf_v4sf_v16qi
4021 = build_function_type (V4SF_type_node,
4022 tree_cons (NULL_TREE, V4SF_type_node,
4023 tree_cons (NULL_TREE, V4SF_type_node,
4024 tree_cons (NULL_TREE,
4027 tree v4si_ftype_v4si_v4si_v16qi
4028 = build_function_type (V4SI_type_node,
4029 tree_cons (NULL_TREE, V4SI_type_node,
4030 tree_cons (NULL_TREE, V4SI_type_node,
4031 tree_cons (NULL_TREE,
4034 tree v8hi_ftype_v8hi_v8hi_v16qi
4035 = build_function_type (V8HI_type_node,
4036 tree_cons (NULL_TREE, V8HI_type_node,
4037 tree_cons (NULL_TREE, V8HI_type_node,
4038 tree_cons (NULL_TREE,
4041 tree v16qi_ftype_v16qi_v16qi_v16qi
4042 = build_function_type (V16QI_type_node,
4043 tree_cons (NULL_TREE, V16QI_type_node,
4044 tree_cons (NULL_TREE, V16QI_type_node,
4045 tree_cons (NULL_TREE,
4049 /* V4SI foo (char). */
4050 tree v4si_ftype_char
4051 = build_function_type (V4SI_type_node,
4052 tree_cons (NULL_TREE, char_type_node, endlink));
4054 /* V8HI foo (char). */
4055 tree v8hi_ftype_char
4056 = build_function_type (V8HI_type_node,
4057 tree_cons (NULL_TREE, char_type_node, endlink));
4059 /* V16QI foo (char). */
4060 tree v16qi_ftype_char
4061 = build_function_type (V16QI_type_node,
4062 tree_cons (NULL_TREE, char_type_node, endlink));
4063 /* V4SF foo (V4SF). */
4064 tree v4sf_ftype_v4sf
4065 = build_function_type (V4SF_type_node,
4066 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4068 /* V4SI foo (int *). */
4069 tree v4si_ftype_pint
4070 = build_function_type (V4SI_type_node,
4071 tree_cons (NULL_TREE, pint_type_node, endlink));
4072 /* V8HI foo (short *). */
4073 tree v8hi_ftype_pshort
4074 = build_function_type (V8HI_type_node,
4075 tree_cons (NULL_TREE, pshort_type_node, endlink));
4076 /* V16QI foo (char *). */
4077 tree v16qi_ftype_pchar
4078 = build_function_type (V16QI_type_node,
4079 tree_cons (NULL_TREE, pchar_type_node, endlink));
4080 /* V4SF foo (float *). */
4081 tree v4sf_ftype_pfloat
4082 = build_function_type (V4SF_type_node,
4083 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4085 /* V8HI foo (V16QI). */
4086 tree v8hi_ftype_v16qi
4087 = build_function_type (V8HI_type_node,
4088 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4090 /* void foo (void *, int, char/literal). */
4091 tree void_ftype_pvoid_int_char
4092 = build_function_type (void_type_node,
4093 tree_cons (NULL_TREE, pvoid_type_node,
4094 tree_cons (NULL_TREE, integer_type_node,
4095 tree_cons (NULL_TREE,
4099 /* void foo (int *, V4SI). */
4100 tree void_ftype_pint_v4si
4101 = build_function_type (void_type_node,
4102 tree_cons (NULL_TREE, pint_type_node,
4103 tree_cons (NULL_TREE, V4SI_type_node,
4105 /* void foo (short *, V8HI). */
4106 tree void_ftype_pshort_v8hi
4107 = build_function_type (void_type_node,
4108 tree_cons (NULL_TREE, pshort_type_node,
4109 tree_cons (NULL_TREE, V8HI_type_node,
4111 /* void foo (char *, V16QI). */
4112 tree void_ftype_pchar_v16qi
4113 = build_function_type (void_type_node,
4114 tree_cons (NULL_TREE, pchar_type_node,
4115 tree_cons (NULL_TREE, V16QI_type_node,
4117 /* void foo (float *, V4SF). */
4118 tree void_ftype_pfloat_v4sf
4119 = build_function_type (void_type_node,
4120 tree_cons (NULL_TREE, pfloat_type_node,
4121 tree_cons (NULL_TREE, V4SF_type_node,
4124 /* void foo (V4SI). */
4125 tree void_ftype_v4si
4126 = build_function_type (void_type_node,
4127 tree_cons (NULL_TREE, V4SI_type_node,
4130 /* void foo (vint, int, void *). */
4131 tree void_ftype_v4si_int_pvoid
4132 = build_function_type (void_type_node,
4133 tree_cons (NULL_TREE, V4SI_type_node,
4134 tree_cons (NULL_TREE, integer_type_node,
4135 tree_cons (NULL_TREE,
4139 /* void foo (vchar, int, void *). */
4140 tree void_ftype_v16qi_int_pvoid
4141 = build_function_type (void_type_node,
4142 tree_cons (NULL_TREE, V16QI_type_node,
4143 tree_cons (NULL_TREE, integer_type_node,
4144 tree_cons (NULL_TREE,
4148 /* void foo (vshort, int, void *). */
4149 tree void_ftype_v8hi_int_pvoid
4150 = build_function_type (void_type_node,
4151 tree_cons (NULL_TREE, V8HI_type_node,
4152 tree_cons (NULL_TREE, integer_type_node,
4153 tree_cons (NULL_TREE,
4157 /* void foo (char). */
4159 = build_function_type (void_type_node,
4160 tree_cons (NULL_TREE, char_type_node,
4163 /* void foo (void). */
4164 tree void_ftype_void
4165 = build_function_type (void_type_node, void_list_node);
4167 /* vshort foo (void). */
4168 tree v8hi_ftype_void
4169 = build_function_type (V8HI_type_node, void_list_node);
4171 tree v4si_ftype_v4si_v4si
4172 = build_function_type (V4SI_type_node,
4173 tree_cons (NULL_TREE, V4SI_type_node,
4174 tree_cons (NULL_TREE, V4SI_type_node,
4177 /* These are for the unsigned 5 bit literals. */
4179 tree v4sf_ftype_v4si_char
4180 = build_function_type (V4SF_type_node,
4181 tree_cons (NULL_TREE, V4SI_type_node,
4182 tree_cons (NULL_TREE, char_type_node,
4184 tree v4si_ftype_v4sf_char
4185 = build_function_type (V4SI_type_node,
4186 tree_cons (NULL_TREE, V4SF_type_node,
4187 tree_cons (NULL_TREE, char_type_node,
4189 tree v4si_ftype_v4si_char
4190 = build_function_type (V4SI_type_node,
4191 tree_cons (NULL_TREE, V4SI_type_node,
4192 tree_cons (NULL_TREE, char_type_node,
4194 tree v8hi_ftype_v8hi_char
4195 = build_function_type (V8HI_type_node,
4196 tree_cons (NULL_TREE, V8HI_type_node,
4197 tree_cons (NULL_TREE, char_type_node,
4199 tree v16qi_ftype_v16qi_char
4200 = build_function_type (V16QI_type_node,
4201 tree_cons (NULL_TREE, V16QI_type_node,
4202 tree_cons (NULL_TREE, char_type_node,
4205 /* These are for the unsigned 4 bit literals. */
4207 tree v16qi_ftype_v16qi_v16qi_char
4208 = build_function_type (V16QI_type_node,
4209 tree_cons (NULL_TREE, V16QI_type_node,
4210 tree_cons (NULL_TREE, V16QI_type_node,
4211 tree_cons (NULL_TREE,
4215 tree v8hi_ftype_v8hi_v8hi_char
4216 = build_function_type (V8HI_type_node,
4217 tree_cons (NULL_TREE, V8HI_type_node,
4218 tree_cons (NULL_TREE, V8HI_type_node,
4219 tree_cons (NULL_TREE,
4223 tree v4si_ftype_v4si_v4si_char
4224 = build_function_type (V4SI_type_node,
4225 tree_cons (NULL_TREE, V4SI_type_node,
4226 tree_cons (NULL_TREE, V4SI_type_node,
4227 tree_cons (NULL_TREE,
4231 tree v4sf_ftype_v4sf_v4sf_char
4232 = build_function_type (V4SF_type_node,
4233 tree_cons (NULL_TREE, V4SF_type_node,
4234 tree_cons (NULL_TREE, V4SF_type_node,
4235 tree_cons (NULL_TREE,
4239 /* End of 4 bit literals. */
4241 tree v4sf_ftype_v4sf_v4sf
4242 = build_function_type (V4SF_type_node,
4243 tree_cons (NULL_TREE, V4SF_type_node,
4244 tree_cons (NULL_TREE, V4SF_type_node,
4246 tree v4sf_ftype_v4sf_v4sf_v4si
4247 = build_function_type (V4SF_type_node,
4248 tree_cons (NULL_TREE, V4SF_type_node,
4249 tree_cons (NULL_TREE, V4SF_type_node,
4250 tree_cons (NULL_TREE,
4253 tree v4sf_ftype_v4sf_v4sf_v4sf
4254 = build_function_type (V4SF_type_node,
4255 tree_cons (NULL_TREE, V4SF_type_node,
4256 tree_cons (NULL_TREE, V4SF_type_node,
4257 tree_cons (NULL_TREE,
4260 tree v4si_ftype_v4si_v4si_v4si
4261 = build_function_type (V4SI_type_node,
4262 tree_cons (NULL_TREE, V4SI_type_node,
4263 tree_cons (NULL_TREE, V4SI_type_node,
4264 tree_cons (NULL_TREE,
4268 tree v8hi_ftype_v8hi_v8hi
4269 = build_function_type (V8HI_type_node,
4270 tree_cons (NULL_TREE, V8HI_type_node,
4271 tree_cons (NULL_TREE, V8HI_type_node,
4273 tree v8hi_ftype_v8hi_v8hi_v8hi
4274 = build_function_type (V8HI_type_node,
4275 tree_cons (NULL_TREE, V8HI_type_node,
4276 tree_cons (NULL_TREE, V8HI_type_node,
4277 tree_cons (NULL_TREE,
4280 tree v4si_ftype_v8hi_v8hi_v4si
4281 = build_function_type (V4SI_type_node,
4282 tree_cons (NULL_TREE, V8HI_type_node,
4283 tree_cons (NULL_TREE, V8HI_type_node,
4284 tree_cons (NULL_TREE,
4287 tree v4si_ftype_v16qi_v16qi_v4si
4288 = build_function_type (V4SI_type_node,
4289 tree_cons (NULL_TREE, V16QI_type_node,
4290 tree_cons (NULL_TREE, V16QI_type_node,
4291 tree_cons (NULL_TREE,
4295 tree v16qi_ftype_v16qi_v16qi
4296 = build_function_type (V16QI_type_node,
4297 tree_cons (NULL_TREE, V16QI_type_node,
4298 tree_cons (NULL_TREE, V16QI_type_node,
4301 tree v4si_ftype_v4sf_v4sf
4302 = build_function_type (V4SI_type_node,
4303 tree_cons (NULL_TREE, V4SF_type_node,
4304 tree_cons (NULL_TREE, V4SF_type_node,
4307 tree v8hi_ftype_v16qi_v16qi
4308 = build_function_type (V8HI_type_node,
4309 tree_cons (NULL_TREE, V16QI_type_node,
4310 tree_cons (NULL_TREE, V16QI_type_node,
4313 tree v4si_ftype_v8hi_v8hi
4314 = build_function_type (V4SI_type_node,
4315 tree_cons (NULL_TREE, V8HI_type_node,
4316 tree_cons (NULL_TREE, V8HI_type_node,
4319 tree v8hi_ftype_v4si_v4si
4320 = build_function_type (V8HI_type_node,
4321 tree_cons (NULL_TREE, V4SI_type_node,
4322 tree_cons (NULL_TREE, V4SI_type_node,
4325 tree v16qi_ftype_v8hi_v8hi
4326 = build_function_type (V16QI_type_node,
4327 tree_cons (NULL_TREE, V8HI_type_node,
4328 tree_cons (NULL_TREE, V8HI_type_node,
4331 tree v4si_ftype_v16qi_v4si
4332 = build_function_type (V4SI_type_node,
4333 tree_cons (NULL_TREE, V16QI_type_node,
4334 tree_cons (NULL_TREE, V4SI_type_node,
4337 tree v4si_ftype_v16qi_v16qi
4338 = build_function_type (V4SI_type_node,
4339 tree_cons (NULL_TREE, V16QI_type_node,
4340 tree_cons (NULL_TREE, V16QI_type_node,
4343 tree v4si_ftype_v8hi_v4si
4344 = build_function_type (V4SI_type_node,
4345 tree_cons (NULL_TREE, V8HI_type_node,
4346 tree_cons (NULL_TREE, V4SI_type_node,
4349 tree v4si_ftype_v8hi
4350 = build_function_type (V4SI_type_node,
4351 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4353 tree int_ftype_v4si_v4si
4354 = build_function_type (integer_type_node,
4355 tree_cons (NULL_TREE, V4SI_type_node,
4356 tree_cons (NULL_TREE, V4SI_type_node,
4359 tree int_ftype_v4sf_v4sf
4360 = build_function_type (integer_type_node,
4361 tree_cons (NULL_TREE, V4SF_type_node,
4362 tree_cons (NULL_TREE, V4SF_type_node,
4365 tree int_ftype_v16qi_v16qi
4366 = build_function_type (integer_type_node,
4367 tree_cons (NULL_TREE, V16QI_type_node,
4368 tree_cons (NULL_TREE, V16QI_type_node,
4371 tree int_ftype_int_v4si_v4si
4372 = build_function_type
4374 tree_cons (NULL_TREE, integer_type_node,
4375 tree_cons (NULL_TREE, V4SI_type_node,
4376 tree_cons (NULL_TREE, V4SI_type_node,
4379 tree int_ftype_int_v4sf_v4sf
4380 = build_function_type
4382 tree_cons (NULL_TREE, integer_type_node,
4383 tree_cons (NULL_TREE, V4SF_type_node,
4384 tree_cons (NULL_TREE, V4SF_type_node,
4387 tree int_ftype_int_v8hi_v8hi
4388 = build_function_type
4390 tree_cons (NULL_TREE, integer_type_node,
4391 tree_cons (NULL_TREE, V8HI_type_node,
4392 tree_cons (NULL_TREE, V8HI_type_node,
4395 tree int_ftype_int_v16qi_v16qi
4396 = build_function_type
4398 tree_cons (NULL_TREE, integer_type_node,
4399 tree_cons (NULL_TREE, V16QI_type_node,
4400 tree_cons (NULL_TREE, V16QI_type_node,
4403 tree v16qi_ftype_int_pvoid
4404 = build_function_type (V16QI_type_node,
4405 tree_cons (NULL_TREE, integer_type_node,
4406 tree_cons (NULL_TREE, pvoid_type_node,
4409 tree v4si_ftype_int_pvoid
4410 = build_function_type (V4SI_type_node,
4411 tree_cons (NULL_TREE, integer_type_node,
4412 tree_cons (NULL_TREE, pvoid_type_node,
4415 tree v8hi_ftype_int_pvoid
4416 = build_function_type (V8HI_type_node,
4417 tree_cons (NULL_TREE, integer_type_node,
4418 tree_cons (NULL_TREE, pvoid_type_node,
4421 tree int_ftype_v8hi_v8hi
4422 = build_function_type (integer_type_node,
4423 tree_cons (NULL_TREE, V8HI_type_node,
4424 tree_cons (NULL_TREE, V8HI_type_node,
4427 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4428 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4429 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4430 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4431 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4432 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4433 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4434 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4435 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4436 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4437 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4438 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4439 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4440 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4441 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4442 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4443 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4444 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4445 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4446 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4447 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4448 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4449 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4450 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4452 /* Add the simple ternary operators. */
4453 d = (struct builtin_description *) bdesc_3arg;
4454 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4457 enum machine_mode mode0, mode1, mode2, mode3;
4463 mode0 = insn_data[d->icode].operand[0].mode;
4464 mode1 = insn_data[d->icode].operand[1].mode;
4465 mode2 = insn_data[d->icode].operand[2].mode;
4466 mode3 = insn_data[d->icode].operand[3].mode;
4468 /* When all four are of the same mode. */
4469 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4474 type = v4si_ftype_v4si_v4si_v4si;
4477 type = v4sf_ftype_v4sf_v4sf_v4sf;
4480 type = v8hi_ftype_v8hi_v8hi_v8hi;
4483 type = v16qi_ftype_v16qi_v16qi_v16qi;
4489 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4494 type = v4si_ftype_v4si_v4si_v16qi;
4497 type = v4sf_ftype_v4sf_v4sf_v16qi;
4500 type = v8hi_ftype_v8hi_v8hi_v16qi;
4503 type = v16qi_ftype_v16qi_v16qi_v16qi;
4509 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4510 && mode3 == V4SImode)
4511 type = v4si_ftype_v16qi_v16qi_v4si;
4512 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4513 && mode3 == V4SImode)
4514 type = v4si_ftype_v8hi_v8hi_v4si;
4515 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4516 && mode3 == V4SImode)
4517 type = v4sf_ftype_v4sf_v4sf_v4si;
4519 /* vchar, vchar, vchar, 4 bit literal. */
4520 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4522 type = v16qi_ftype_v16qi_v16qi_char;
4524 /* vshort, vshort, vshort, 4 bit literal. */
4525 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4527 type = v8hi_ftype_v8hi_v8hi_char;
4529 /* vint, vint, vint, 4 bit literal. */
4530 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4532 type = v4si_ftype_v4si_v4si_char;
4534 /* vfloat, vfloat, vfloat, 4 bit literal. */
4535 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4537 type = v4sf_ftype_v4sf_v4sf_char;
4542 def_builtin (d->mask, d->name, type, d->code);
4545 /* Add the DST variants. */
4546 d = (struct builtin_description *) bdesc_dst;
4547 for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4548 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4550 /* Initialize the predicates. */
4551 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4552 for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4554 enum machine_mode mode1;
4557 mode1 = insn_data[dp->icode].operand[1].mode;
4562 type = int_ftype_int_v4si_v4si;
4565 type = int_ftype_int_v8hi_v8hi;
4568 type = int_ftype_int_v16qi_v16qi;
4571 type = int_ftype_int_v4sf_v4sf;
4577 def_builtin (dp->mask, dp->name, type, dp->code);
4580 /* Add the simple binary operators. */
4581 d = (struct builtin_description *) bdesc_2arg;
4582 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4584 enum machine_mode mode0, mode1, mode2;
4590 mode0 = insn_data[d->icode].operand[0].mode;
4591 mode1 = insn_data[d->icode].operand[1].mode;
4592 mode2 = insn_data[d->icode].operand[2].mode;
4594 /* When all three operands are of the same mode. */
4595 if (mode0 == mode1 && mode1 == mode2)
4600 type = v4sf_ftype_v4sf_v4sf;
4603 type = v4si_ftype_v4si_v4si;
4606 type = v16qi_ftype_v16qi_v16qi;
4609 type = v8hi_ftype_v8hi_v8hi;
4616 /* A few other combos we really don't want to do manually. */
4618 /* vint, vfloat, vfloat. */
4619 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4620 type = v4si_ftype_v4sf_v4sf;
4622 /* vshort, vchar, vchar. */
4623 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4624 type = v8hi_ftype_v16qi_v16qi;
4626 /* vint, vshort, vshort. */
4627 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4628 type = v4si_ftype_v8hi_v8hi;
4630 /* vshort, vint, vint. */
4631 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4632 type = v8hi_ftype_v4si_v4si;
4634 /* vchar, vshort, vshort. */
4635 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4636 type = v16qi_ftype_v8hi_v8hi;
4638 /* vint, vchar, vint. */
4639 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4640 type = v4si_ftype_v16qi_v4si;
4642 /* vint, vchar, vchar. */
4643 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4644 type = v4si_ftype_v16qi_v16qi;
4646 /* vint, vshort, vint. */
4647 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4648 type = v4si_ftype_v8hi_v4si;
4650 /* vint, vint, 5 bit literal. */
4651 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4652 type = v4si_ftype_v4si_char;
4654 /* vshort, vshort, 5 bit literal. */
4655 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4656 type = v8hi_ftype_v8hi_char;
4658 /* vchar, vchar, 5 bit literal. */
4659 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4660 type = v16qi_ftype_v16qi_char;
4662 /* vfloat, vint, 5 bit literal. */
4663 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4664 type = v4sf_ftype_v4si_char;
4666 /* vint, vfloat, 5 bit literal. */
4667 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4668 type = v4si_ftype_v4sf_char;
4671 else if (mode0 == SImode)
4676 type = int_ftype_v4si_v4si;
4679 type = int_ftype_v4sf_v4sf;
4682 type = int_ftype_v16qi_v16qi;
4685 type = int_ftype_v8hi_v8hi;
4695 def_builtin (d->mask, d->name, type, d->code);
4698 /* Add the simple unary operators. */
4699 d = (struct builtin_description *) bdesc_1arg;
4700 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4702 enum machine_mode mode0, mode1;
4708 mode0 = insn_data[d->icode].operand[0].mode;
4709 mode1 = insn_data[d->icode].operand[1].mode;
4711 if (mode0 == V4SImode && mode1 == QImode)
4712 type = v4si_ftype_char;
4713 else if (mode0 == V8HImode && mode1 == QImode)
4714 type = v8hi_ftype_char;
4715 else if (mode0 == V16QImode && mode1 == QImode)
4716 type = v16qi_ftype_char;
4717 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4718 type = v4sf_ftype_v4sf;
4719 else if (mode0 == V8HImode && mode1 == V16QImode)
4720 type = v8hi_ftype_v16qi;
4721 else if (mode0 == V4SImode && mode1 == V8HImode)
4722 type = v4si_ftype_v8hi;
4726 def_builtin (d->mask, d->name, type, d->code);
4731 /* Generate a memory reference for expand_block_move, copying volatile,
4732 and other bits from an original memory reference. */
4735 expand_block_move_mem (mode, addr, orig_mem)
4736 enum machine_mode mode;
4740 rtx mem = gen_rtx_MEM (mode, addr);
4742 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4746 /* Expand a block move operation, and return 1 if successful. Return 0
4747 if we should let the compiler generate normal code.
4749 operands[0] is the destination
4750 operands[1] is the source
4751 operands[2] is the length
4752 operands[3] is the alignment */
4754 #define MAX_MOVE_REG 4
4757 expand_block_move (operands)
4760 rtx orig_dest = operands[0];
4761 rtx orig_src = operands[1];
4762 rtx bytes_rtx = operands[2];
4763 rtx align_rtx = operands[3];
4764 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4775 rtx stores[MAX_MOVE_REG];
4778 /* If this is not a fixed size move, just call memcpy */
4782 /* If this is not a fixed size alignment, abort */
4783 if (GET_CODE (align_rtx) != CONST_INT)
4785 align = INTVAL (align_rtx);
4787 /* Anything to move? */
4788 bytes = INTVAL (bytes_rtx);
4792 /* store_one_arg depends on expand_block_move to handle at least the size of
4793 reg_parm_stack_space. */
4794 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4797 /* Move the address into scratch registers. */
4798 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4799 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4801 if (TARGET_STRING) /* string instructions are available */
4803 for ( ; bytes > 0; bytes -= move_bytes)
4805 if (bytes > 24 /* move up to 32 bytes at a time */
4813 && ! fixed_regs[12])
4815 move_bytes = (bytes > 32) ? 32 : bytes;
4816 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4819 expand_block_move_mem (BLKmode,
4822 GEN_INT ((move_bytes == 32)
4826 else if (bytes > 16 /* move up to 24 bytes at a time */
4832 && ! fixed_regs[10])
4834 move_bytes = (bytes > 24) ? 24 : bytes;
4835 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4838 expand_block_move_mem (BLKmode,
4841 GEN_INT (move_bytes),
4844 else if (bytes > 8 /* move up to 16 bytes at a time */
4850 move_bytes = (bytes > 16) ? 16 : bytes;
4851 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
4854 expand_block_move_mem (BLKmode,
4857 GEN_INT (move_bytes),
4860 else if (bytes >= 8 && TARGET_POWERPC64
4861 /* 64-bit loads and stores require word-aligned
4863 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4866 tmp_reg = gen_reg_rtx (DImode);
4867 emit_move_insn (tmp_reg,
4868 expand_block_move_mem (DImode,
4869 src_reg, orig_src));
4870 emit_move_insn (expand_block_move_mem (DImode,
4871 dest_reg, orig_dest),
4874 else if (bytes > 4 && !TARGET_POWERPC64)
4875 { /* move up to 8 bytes at a time */
4876 move_bytes = (bytes > 8) ? 8 : bytes;
4877 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
4880 expand_block_move_mem (BLKmode,
4883 GEN_INT (move_bytes),
4886 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4887 { /* move 4 bytes */
4889 tmp_reg = gen_reg_rtx (SImode);
4890 emit_move_insn (tmp_reg,
4891 expand_block_move_mem (SImode,
4892 src_reg, orig_src));
4893 emit_move_insn (expand_block_move_mem (SImode,
4894 dest_reg, orig_dest),
4897 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4898 { /* move 2 bytes */
4900 tmp_reg = gen_reg_rtx (HImode);
4901 emit_move_insn (tmp_reg,
4902 expand_block_move_mem (HImode,
4903 src_reg, orig_src));
4904 emit_move_insn (expand_block_move_mem (HImode,
4905 dest_reg, orig_dest),
4908 else if (bytes == 1) /* move 1 byte */
4911 tmp_reg = gen_reg_rtx (QImode);
4912 emit_move_insn (tmp_reg,
4913 expand_block_move_mem (QImode,
4914 src_reg, orig_src));
4915 emit_move_insn (expand_block_move_mem (QImode,
4916 dest_reg, orig_dest),
4920 { /* move up to 4 bytes at a time */
4921 move_bytes = (bytes > 4) ? 4 : bytes;
4922 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
4925 expand_block_move_mem (BLKmode,
4928 GEN_INT (move_bytes),
4932 if (bytes > move_bytes)
4934 if (! TARGET_POWERPC64)
4936 emit_insn (gen_addsi3 (src_reg, src_reg,
4937 GEN_INT (move_bytes)));
4938 emit_insn (gen_addsi3 (dest_reg, dest_reg,
4939 GEN_INT (move_bytes)));
4943 emit_insn (gen_adddi3 (src_reg, src_reg,
4944 GEN_INT (move_bytes)));
4945 emit_insn (gen_adddi3 (dest_reg, dest_reg,
4946 GEN_INT (move_bytes)));
4952 else /* string instructions not available */
4954 num_reg = offset = 0;
4955 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
4957 /* Calculate the correct offset for src/dest */
4961 dest_addr = dest_reg;
4965 src_addr = plus_constant (src_reg, offset);
4966 dest_addr = plus_constant (dest_reg, offset);
4969 /* Generate the appropriate load and store, saving the stores
4971 if (bytes >= 8 && TARGET_POWERPC64
4972 /* 64-bit loads and stores require word-aligned
4974 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4977 tmp_reg = gen_reg_rtx (DImode);
4978 emit_insn (gen_movdi (tmp_reg,
4979 expand_block_move_mem (DImode,
4982 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
4987 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4990 tmp_reg = gen_reg_rtx (SImode);
4991 emit_insn (gen_movsi (tmp_reg,
4992 expand_block_move_mem (SImode,
4995 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5000 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5003 tmp_reg = gen_reg_rtx (HImode);
5004 emit_insn (gen_movhi (tmp_reg,
5005 expand_block_move_mem (HImode,
5008 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5016 tmp_reg = gen_reg_rtx (QImode);
5017 emit_insn (gen_movqi (tmp_reg,
5018 expand_block_move_mem (QImode,
5021 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5027 if (num_reg >= MAX_MOVE_REG)
5029 for (i = 0; i < num_reg; i++)
5030 emit_insn (stores[i]);
5035 for (i = 0; i < num_reg; i++)
5036 emit_insn (stores[i]);
5043 /* Return 1 if OP is a load multiple operation. It is known to be a
5044 PARALLEL and the first section will be tested. */
5047 load_multiple_operation (op, mode)
5049 enum machine_mode mode ATTRIBUTE_UNUSED;
5051 int count = XVECLEN (op, 0);
5052 unsigned int dest_regno;
5056 /* Perform a quick check so we don't blow up below. */
5058 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5059 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5060 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5063 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5064 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5066 for (i = 1; i < count; i++)
5068 rtx elt = XVECEXP (op, 0, i);
5070 if (GET_CODE (elt) != SET
5071 || GET_CODE (SET_DEST (elt)) != REG
5072 || GET_MODE (SET_DEST (elt)) != SImode
5073 || REGNO (SET_DEST (elt)) != dest_regno + i
5074 || GET_CODE (SET_SRC (elt)) != MEM
5075 || GET_MODE (SET_SRC (elt)) != SImode
5076 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5077 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5078 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5079 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5086 /* Similar, but tests for store multiple. Here, the second vector element
5087 is a CLOBBER. It will be tested later. */
5090 store_multiple_operation (op, mode)
5092 enum machine_mode mode ATTRIBUTE_UNUSED;
5094 int count = XVECLEN (op, 0) - 1;
5095 unsigned int src_regno;
5099 /* Perform a quick check so we don't blow up below. */
5101 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5102 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5103 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5106 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5107 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5109 for (i = 1; i < count; i++)
5111 rtx elt = XVECEXP (op, 0, i + 1);
5113 if (GET_CODE (elt) != SET
5114 || GET_CODE (SET_SRC (elt)) != REG
5115 || GET_MODE (SET_SRC (elt)) != SImode
5116 || REGNO (SET_SRC (elt)) != src_regno + i
5117 || GET_CODE (SET_DEST (elt)) != MEM
5118 || GET_MODE (SET_DEST (elt)) != SImode
5119 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5120 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5121 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5122 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5129 /* Return 1 for a parallel vrsave operation. */
5132 vrsave_operation (op, mode)
5134 enum machine_mode mode ATTRIBUTE_UNUSED;
5136 int count = XVECLEN (op, 0);
5137 unsigned int dest_regno, src_regno;
5141 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5142 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5143 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5146 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5147 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5149 if (dest_regno != VRSAVE_REGNO
5150 && src_regno != VRSAVE_REGNO)
5153 for (i = 1; i < count; i++)
5155 rtx elt = XVECEXP (op, 0, i);
5157 if (GET_CODE (elt) != CLOBBER
5158 && GET_CODE (elt) != SET)
5165 /* Return 1 for an PARALLEL suitable for mtcrf. */
5168 mtcrf_operation (op, mode)
5170 enum machine_mode mode ATTRIBUTE_UNUSED;
5172 int count = XVECLEN (op, 0);
5176 /* Perform a quick check so we don't blow up below. */
5178 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5179 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5180 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5182 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5184 if (GET_CODE (src_reg) != REG
5185 || GET_MODE (src_reg) != SImode
5186 || ! INT_REGNO_P (REGNO (src_reg)))
5189 for (i = 0; i < count; i++)
5191 rtx exp = XVECEXP (op, 0, i);
5195 if (GET_CODE (exp) != SET
5196 || GET_CODE (SET_DEST (exp)) != REG
5197 || GET_MODE (SET_DEST (exp)) != CCmode
5198 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5200 unspec = SET_SRC (exp);
5201 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5203 if (GET_CODE (unspec) != UNSPEC
5204 || XINT (unspec, 1) != 20
5205 || XVECLEN (unspec, 0) != 2
5206 || XVECEXP (unspec, 0, 0) != src_reg
5207 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5208 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5214 /* Return 1 for an PARALLEL suitable for lmw. */
5217 lmw_operation (op, mode)
5219 enum machine_mode mode ATTRIBUTE_UNUSED;
5221 int count = XVECLEN (op, 0);
5222 unsigned int dest_regno;
5224 unsigned int base_regno;
5225 HOST_WIDE_INT offset;
5228 /* Perform a quick check so we don't blow up below. */
5230 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5231 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5232 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5235 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5236 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5239 || count != 32 - (int) dest_regno)
5242 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5245 base_regno = REGNO (src_addr);
5246 if (base_regno == 0)
5249 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5251 offset = INTVAL (XEXP (src_addr, 1));
5252 base_regno = REGNO (XEXP (src_addr, 0));
5257 for (i = 0; i < count; i++)
5259 rtx elt = XVECEXP (op, 0, i);
5262 HOST_WIDE_INT newoffset;
5264 if (GET_CODE (elt) != SET
5265 || GET_CODE (SET_DEST (elt)) != REG
5266 || GET_MODE (SET_DEST (elt)) != SImode
5267 || REGNO (SET_DEST (elt)) != dest_regno + i
5268 || GET_CODE (SET_SRC (elt)) != MEM
5269 || GET_MODE (SET_SRC (elt)) != SImode)
5271 newaddr = XEXP (SET_SRC (elt), 0);
5272 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5277 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5279 addr_reg = XEXP (newaddr, 0);
5280 newoffset = INTVAL (XEXP (newaddr, 1));
5284 if (REGNO (addr_reg) != base_regno
5285 || newoffset != offset + 4 * i)
5292 /* Return 1 for an PARALLEL suitable for stmw. */
5295 stmw_operation (op, mode)
5297 enum machine_mode mode ATTRIBUTE_UNUSED;
5299 int count = XVECLEN (op, 0);
5300 unsigned int src_regno;
5302 unsigned int base_regno;
5303 HOST_WIDE_INT offset;
5306 /* Perform a quick check so we don't blow up below. */
5308 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5309 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5310 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5313 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5314 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5317 || count != 32 - (int) src_regno)
5320 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5323 base_regno = REGNO (dest_addr);
5324 if (base_regno == 0)
5327 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5329 offset = INTVAL (XEXP (dest_addr, 1));
5330 base_regno = REGNO (XEXP (dest_addr, 0));
5335 for (i = 0; i < count; i++)
5337 rtx elt = XVECEXP (op, 0, i);
5340 HOST_WIDE_INT newoffset;
5342 if (GET_CODE (elt) != SET
5343 || GET_CODE (SET_SRC (elt)) != REG
5344 || GET_MODE (SET_SRC (elt)) != SImode
5345 || REGNO (SET_SRC (elt)) != src_regno + i
5346 || GET_CODE (SET_DEST (elt)) != MEM
5347 || GET_MODE (SET_DEST (elt)) != SImode)
5349 newaddr = XEXP (SET_DEST (elt), 0);
5350 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5355 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5357 addr_reg = XEXP (newaddr, 0);
5358 newoffset = INTVAL (XEXP (newaddr, 1));
5362 if (REGNO (addr_reg) != base_regno
5363 || newoffset != offset + 4 * i)
5370 /* A validation routine: say whether CODE, a condition code, and MODE
5371 match. The other alternatives either don't make sense or should
5372 never be generated. */
5375 validate_condition_mode (code, mode)
5377 enum machine_mode mode;
5379 if (GET_RTX_CLASS (code) != '<'
5380 || GET_MODE_CLASS (mode) != MODE_CC)
5383 /* These don't make sense. */
5384 if ((code == GT || code == LT || code == GE || code == LE)
5385 && mode == CCUNSmode)
5388 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5389 && mode != CCUNSmode)
5392 if (mode != CCFPmode
5393 && (code == ORDERED || code == UNORDERED
5394 || code == UNEQ || code == LTGT
5395 || code == UNGT || code == UNLT
5396 || code == UNGE || code == UNLE))
5399 /* These should never be generated except for
5400 flag_unsafe_math_optimizations. */
5401 if (mode == CCFPmode
5402 && ! flag_unsafe_math_optimizations
5403 && (code == LE || code == GE
5404 || code == UNEQ || code == LTGT
5405 || code == UNGT || code == UNLT))
5408 /* These are invalid; the information is not there. */
5409 if (mode == CCEQmode
5410 && code != EQ && code != NE)
5414 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5415 We only check the opcode against the mode of the CC value here. */
5418 branch_comparison_operator (op, mode)
5420 enum machine_mode mode ATTRIBUTE_UNUSED;
5422 enum rtx_code code = GET_CODE (op);
5423 enum machine_mode cc_mode;
5425 if (GET_RTX_CLASS (code) != '<')
5428 cc_mode = GET_MODE (XEXP (op, 0));
5429 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5432 validate_condition_mode (code, cc_mode);
5437 /* Return 1 if OP is a comparison operation that is valid for a branch
5438 insn and which is true if the corresponding bit in the CC register
5442 branch_positive_comparison_operator (op, mode)
5444 enum machine_mode mode;
5448 if (! branch_comparison_operator (op, mode))
5451 code = GET_CODE (op);
5452 return (code == EQ || code == LT || code == GT
5453 || code == LTU || code == GTU
5454 || code == UNORDERED);
5457 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5458 We check the opcode against the mode of the CC value and disallow EQ or
5459 NE comparisons for integers. */
5462 scc_comparison_operator (op, mode)
5464 enum machine_mode mode;
5466 enum rtx_code code = GET_CODE (op);
5467 enum machine_mode cc_mode;
5469 if (GET_MODE (op) != mode && mode != VOIDmode)
5472 if (GET_RTX_CLASS (code) != '<')
5475 cc_mode = GET_MODE (XEXP (op, 0));
5476 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5479 validate_condition_mode (code, cc_mode);
5481 if (code == NE && cc_mode != CCFPmode)
5488 trap_comparison_operator (op, mode)
5490 enum machine_mode mode;
5492 if (mode != VOIDmode && mode != GET_MODE (op))
5494 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5498 boolean_operator (op, mode)
5500 enum machine_mode mode ATTRIBUTE_UNUSED;
5502 enum rtx_code code = GET_CODE (op);
5503 return (code == AND || code == IOR || code == XOR);
5507 boolean_or_operator (op, mode)
5509 enum machine_mode mode ATTRIBUTE_UNUSED;
5511 enum rtx_code code = GET_CODE (op);
5512 return (code == IOR || code == XOR);
5516 min_max_operator (op, mode)
5518 enum machine_mode mode ATTRIBUTE_UNUSED;
5520 enum rtx_code code = GET_CODE (op);
5521 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5524 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5525 mask required to convert the result of a rotate insn into a shift
5526 left insn of SHIFTOP bits. Both are known to be CONST_INT. */
5529 includes_lshift_p (shiftop, andop)
5533 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5535 shift_mask <<= INTVAL (shiftop);
5537 return (INTVAL (andop) & ~shift_mask) == 0;
5540 /* Similar, but for right shift. */
5543 includes_rshift_p (shiftop, andop)
5547 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5549 shift_mask >>= INTVAL (shiftop);
5551 return (INTVAL (andop) & ~shift_mask) == 0;
5554 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5555 to perform a left shift. It must have exactly SHIFTOP least
5556 signifigant 0's, then one or more 1's, then zero or more 0's. */
5559 includes_rldic_lshift_p (shiftop, andop)
5563 if (GET_CODE (andop) == CONST_INT)
5565 HOST_WIDE_INT c, lsb, shift_mask;
5568 if (c == 0 || c == ~0)
5572 shift_mask <<= INTVAL (shiftop);
5574 /* Find the least signifigant one bit. */
5577 /* It must coincide with the LSB of the shift mask. */
5578 if (-lsb != shift_mask)
5581 /* Invert to look for the next transition (if any). */
5584 /* Remove the low group of ones (originally low group of zeros). */
5587 /* Again find the lsb, and check we have all 1's above. */
5591 else if (GET_CODE (andop) == CONST_DOUBLE
5592 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5594 HOST_WIDE_INT low, high, lsb;
5595 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5597 low = CONST_DOUBLE_LOW (andop);
5598 if (HOST_BITS_PER_WIDE_INT < 64)
5599 high = CONST_DOUBLE_HIGH (andop);
5601 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5602 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5605 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5607 shift_mask_high = ~0;
5608 if (INTVAL (shiftop) > 32)
5609 shift_mask_high <<= INTVAL (shiftop) - 32;
5613 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5620 return high == -lsb;
5623 shift_mask_low = ~0;
5624 shift_mask_low <<= INTVAL (shiftop);
5628 if (-lsb != shift_mask_low)
5631 if (HOST_BITS_PER_WIDE_INT < 64)
5636 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5639 return high == -lsb;
5643 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5649 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5650 to perform a left shift. It must have SHIFTOP or more least
5651 signifigant 0's, with the remainder of the word 1's. */
5654 includes_rldicr_lshift_p (shiftop, andop)
5658 if (GET_CODE (andop) == CONST_INT)
5660 HOST_WIDE_INT c, lsb, shift_mask;
5663 shift_mask <<= INTVAL (shiftop);
5666 /* Find the least signifigant one bit. */
5669 /* It must be covered by the shift mask.
5670 This test also rejects c == 0. */
5671 if ((lsb & shift_mask) == 0)
5674 /* Check we have all 1's above the transition, and reject all 1's. */
5675 return c == -lsb && lsb != 1;
5677 else if (GET_CODE (andop) == CONST_DOUBLE
5678 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5680 HOST_WIDE_INT low, lsb, shift_mask_low;
5682 low = CONST_DOUBLE_LOW (andop);
5684 if (HOST_BITS_PER_WIDE_INT < 64)
5686 HOST_WIDE_INT high, shift_mask_high;
5688 high = CONST_DOUBLE_HIGH (andop);
5692 shift_mask_high = ~0;
5693 if (INTVAL (shiftop) > 32)
5694 shift_mask_high <<= INTVAL (shiftop) - 32;
5698 if ((lsb & shift_mask_high) == 0)
5701 return high == -lsb;
5707 shift_mask_low = ~0;
5708 shift_mask_low <<= INTVAL (shiftop);
5712 if ((lsb & shift_mask_low) == 0)
5715 return low == -lsb && lsb != 1;
5721 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5722 for lfq and stfq insns.
5724 Note reg1 and reg2 *must* be hard registers. To be sure we will
5725 abort if we are passed pseudo registers. */
5728 registers_ok_for_quad_peep (reg1, reg2)
5731 /* We might have been passed a SUBREG. */
5732 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5735 return (REGNO (reg1) == REGNO (reg2) - 1);
5738 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5739 addr1 and addr2 must be in consecutive memory locations
5740 (addr2 == addr1 + 8). */
5743 addrs_ok_for_quad_peep (addr1, addr2)
5750 /* Extract an offset (if used) from the first addr. */
5751 if (GET_CODE (addr1) == PLUS)
5753 /* If not a REG, return zero. */
5754 if (GET_CODE (XEXP (addr1, 0)) != REG)
5758 reg1 = REGNO (XEXP (addr1, 0));
5759 /* The offset must be constant! */
5760 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5762 offset1 = INTVAL (XEXP (addr1, 1));
5765 else if (GET_CODE (addr1) != REG)
5769 reg1 = REGNO (addr1);
5770 /* This was a simple (mem (reg)) expression. Offset is 0. */
5774 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5775 if (GET_CODE (addr2) != PLUS)
5778 if (GET_CODE (XEXP (addr2, 0)) != REG
5779 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5782 if (reg1 != REGNO (XEXP (addr2, 0)))
5785 /* The offset for the second addr must be 8 more than the first addr. */
5786 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5789 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5794 /* Return the register class of a scratch register needed to copy IN into
5795 or out of a register in CLASS in MODE. If it can be done directly,
5796 NO_REGS is returned. */
5799 secondary_reload_class (class, mode, in)
5800 enum reg_class class;
5801 enum machine_mode mode ATTRIBUTE_UNUSED;
5806 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5808 /* We cannot copy a symbolic operand directly into anything
5809 other than BASE_REGS for TARGET_ELF. So indicate that a
5810 register from BASE_REGS is needed as an intermediate
5813 On Darwin, pic addresses require a load from memory, which
5814 needs a base register. */
5815 if (class != BASE_REGS
5816 && (GET_CODE (in) == SYMBOL_REF
5817 || GET_CODE (in) == HIGH
5818 || GET_CODE (in) == LABEL_REF
5819 || GET_CODE (in) == CONST))
5823 if (GET_CODE (in) == REG)
5826 if (regno >= FIRST_PSEUDO_REGISTER)
5828 regno = true_regnum (in);
5829 if (regno >= FIRST_PSEUDO_REGISTER)
5833 else if (GET_CODE (in) == SUBREG)
5835 regno = true_regnum (in);
5836 if (regno >= FIRST_PSEUDO_REGISTER)
5842 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5844 if (class == GENERAL_REGS || class == BASE_REGS
5845 || (regno >= 0 && INT_REGNO_P (regno)))
5848 /* Constants, memory, and FP registers can go into FP registers. */
5849 if ((regno == -1 || FP_REGNO_P (regno))
5850 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5853 /* Memory, and AltiVec registers can go into AltiVec registers. */
5854 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5855 && class == ALTIVEC_REGS)
5858 /* We can copy among the CR registers. */
5859 if ((class == CR_REGS || class == CR0_REGS)
5860 && regno >= 0 && CR_REGNO_P (regno))
5863 /* Otherwise, we need GENERAL_REGS. */
5864 return GENERAL_REGS;
5867 /* Given a comparison operation, return the bit number in CCR to test. We
5868 know this is a valid comparison.
5870 SCC_P is 1 if this is for an scc. That means that %D will have been
5871 used instead of %C, so the bits will be in different places.
5873 Return -1 if OP isn't a valid comparison for some reason. */
5880 enum rtx_code code = GET_CODE (op);
5881 enum machine_mode cc_mode;
5886 if (GET_RTX_CLASS (code) != '<')
5891 if (GET_CODE (reg) != REG
5892 || ! CR_REGNO_P (REGNO (reg)))
5895 cc_mode = GET_MODE (reg);
5896 cc_regnum = REGNO (reg);
5897 base_bit = 4 * (cc_regnum - CR0_REGNO);
5899 validate_condition_mode (code, cc_mode);
5904 return scc_p ? base_bit + 3 : base_bit + 2;
5906 return base_bit + 2;
5907 case GT: case GTU: case UNLE:
5908 return base_bit + 1;
5909 case LT: case LTU: case UNGE:
5911 case ORDERED: case UNORDERED:
5912 return base_bit + 3;
5915 /* If scc, we will have done a cror to put the bit in the
5916 unordered position. So test that bit. For integer, this is ! LT
5917 unless this is an scc insn. */
5918 return scc_p ? base_bit + 3 : base_bit;
5921 return scc_p ? base_bit + 3 : base_bit + 1;
5928 /* Return the GOT register. */
5931 rs6000_got_register (value)
5932 rtx value ATTRIBUTE_UNUSED;
5934 /* The second flow pass currently (June 1999) can't update
5935 regs_ever_live without disturbing other parts of the compiler, so
5936 update it here to make the prolog/epilogue code happy. */
5937 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5938 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
5940 current_function_uses_pic_offset_table = 1;
5942 return pic_offset_table_rtx;
5945 /* Functions to init, mark and free struct machine_function.
5946 These will be called, via pointer variables,
5947 from push_function_context and pop_function_context. */
5950 rs6000_init_machine_status (p)
5953 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
5957 rs6000_free_machine_status (p)
5960 if (p->machine == NULL)
5968 /* Print an operand. Recognize special options, documented below. */
5971 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
5972 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
5974 #define SMALL_DATA_RELOC "sda21"
5975 #define SMALL_DATA_REG 0
5979 print_operand (file, x, code)
5987 /* These macros test for integers and extract the low-order bits. */
5989 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
5990 && GET_MODE (X) == VOIDmode)
5992 #define INT_LOWPART(X) \
5993 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
5998 /* Write out an instruction after the call which may be replaced
5999 with glue code by the loader. This depends on the AIX version. */
6000 asm_fprintf (file, RS6000_CALL_GLUE);
6003 /* %a is output_address. */
6006 /* If X is a constant integer whose low-order 5 bits are zero,
6007 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6008 in the AIX assembler where "sri" with a zero shift count
6009 writes a trash instruction. */
6010 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6017 /* If constant, low-order 16 bits of constant, unsigned.
6018 Otherwise, write normally. */
6020 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6022 print_operand (file, x, 0);
6026 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6027 for 64-bit mask direction. */
6028 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6031 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6035 /* There used to be a comment for 'C' reading "This is an
6036 optional cror needed for certain floating-point
6037 comparisons. Otherwise write nothing." */
6039 /* Similar, except that this is for an scc, so we must be able to
6040 encode the test in a single bit that is one. We do the above
6041 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6042 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6043 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6045 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6047 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6049 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6052 else if (GET_CODE (x) == NE)
6054 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6056 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6057 base_bit + 2, base_bit + 2);
6062 /* X is a CR register. Print the number of the EQ bit of the CR */
6063 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6064 output_operand_lossage ("invalid %%E value");
6066 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6070 /* X is a CR register. Print the shift count needed to move it
6071 to the high-order four bits. */
6072 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6073 output_operand_lossage ("invalid %%f value");
6075 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6079 /* Similar, but print the count for the rotate in the opposite
6081 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6082 output_operand_lossage ("invalid %%F value");
6084 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6088 /* X is a constant integer. If it is negative, print "m",
6089 otherwise print "z". This is to make a aze or ame insn. */
6090 if (GET_CODE (x) != CONST_INT)
6091 output_operand_lossage ("invalid %%G value");
6092 else if (INTVAL (x) >= 0)
6099 /* If constant, output low-order five bits. Otherwise, write
6102 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6104 print_operand (file, x, 0);
6108 /* If constant, output low-order six bits. Otherwise, write
6111 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6113 print_operand (file, x, 0);
6117 /* Print `i' if this is a constant, else nothing. */
6123 /* Write the bit number in CCR for jump. */
6126 output_operand_lossage ("invalid %%j code");
6128 fprintf (file, "%d", i);
6132 /* Similar, but add one for shift count in rlinm for scc and pass
6133 scc flag to `ccr_bit'. */
6136 output_operand_lossage ("invalid %%J code");
6138 /* If we want bit 31, write a shift count of zero, not 32. */
6139 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6143 /* X must be a constant. Write the 1's complement of the
6146 output_operand_lossage ("invalid %%k value");
6148 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6152 /* X must be a symbolic constant on ELF. Write an
6153 expression suitable for an 'addi' that adds in the low 16
6155 if (GET_CODE (x) != CONST)
6157 print_operand_address (file, x);
6162 if (GET_CODE (XEXP (x, 0)) != PLUS
6163 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6164 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6165 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6166 output_operand_lossage ("invalid %%K value");
6167 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6169 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6173 /* %l is output_asm_label. */
6176 /* Write second word of DImode or DFmode reference. Works on register
6177 or non-indexed memory only. */
6178 if (GET_CODE (x) == REG)
6179 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6180 else if (GET_CODE (x) == MEM)
6182 /* Handle possible auto-increment. Since it is pre-increment and
6183 we have already done it, we can just use an offset of word. */
6184 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6185 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6186 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6189 output_address (XEXP (adjust_address_nv (x, SImode,
6193 if (small_data_operand (x, GET_MODE (x)))
6194 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6195 reg_names[SMALL_DATA_REG]);
6200 /* MB value for a mask operand. */
6201 if (! mask_operand (x, VOIDmode))
6202 output_operand_lossage ("invalid %%m value");
6204 val = INT_LOWPART (x);
6206 /* If the high bit is set and the low bit is not, the value is zero.
6207 If the high bit is zero, the value is the first 1 bit we find from
6209 if ((val & 0x80000000) && ((val & 1) == 0))
6214 else if ((val & 0x80000000) == 0)
6216 for (i = 1; i < 32; i++)
6217 if ((val <<= 1) & 0x80000000)
6219 fprintf (file, "%d", i);
6223 /* Otherwise, look for the first 0 bit from the right. The result is its
6224 number plus 1. We know the low-order bit is one. */
6225 for (i = 0; i < 32; i++)
6226 if (((val >>= 1) & 1) == 0)
6229 /* If we ended in ...01, i would be 0. The correct value is 31, so
6231 fprintf (file, "%d", 31 - i);
6235 /* ME value for a mask operand. */
6236 if (! mask_operand (x, VOIDmode))
6237 output_operand_lossage ("invalid %%M value");
6239 val = INT_LOWPART (x);
6241 /* If the low bit is set and the high bit is not, the value is 31.
6242 If the low bit is zero, the value is the first 1 bit we find from
6244 if ((val & 1) && ((val & 0x80000000) == 0))
6249 else if ((val & 1) == 0)
6251 for (i = 0; i < 32; i++)
6252 if ((val >>= 1) & 1)
6255 /* If we had ....10, i would be 0. The result should be
6256 30, so we need 30 - i. */
6257 fprintf (file, "%d", 30 - i);
6261 /* Otherwise, look for the first 0 bit from the left. The result is its
6262 number minus 1. We know the high-order bit is one. */
6263 for (i = 0; i < 32; i++)
6264 if (((val <<= 1) & 0x80000000) == 0)
6267 fprintf (file, "%d", i);
6270 /* %n outputs the negative of its operand. */
6273 /* Write the number of elements in the vector times 4. */
6274 if (GET_CODE (x) != PARALLEL)
6275 output_operand_lossage ("invalid %%N value");
6277 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6281 /* Similar, but subtract 1 first. */
6282 if (GET_CODE (x) != PARALLEL)
6283 output_operand_lossage ("invalid %%O value");
6285 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6289 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6291 || INT_LOWPART (x) < 0
6292 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6293 output_operand_lossage ("invalid %%p value");
6295 fprintf (file, "%d", i);
6299 /* The operand must be an indirect memory reference. The result
6300 is the register number. */
6301 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6302 || REGNO (XEXP (x, 0)) >= 32)
6303 output_operand_lossage ("invalid %%P value");
6305 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6309 /* This outputs the logical code corresponding to a boolean
6310 expression. The expression may have one or both operands
6311 negated (if one, only the first one). For condition register
6312 logical operations, it will also treat the negated
6313 CR codes as NOTs, but not handle NOTs of them. */
6315 const char *const *t = 0;
6317 enum rtx_code code = GET_CODE (x);
6318 static const char * const tbl[3][3] = {
6319 { "and", "andc", "nor" },
6320 { "or", "orc", "nand" },
6321 { "xor", "eqv", "xor" } };
6325 else if (code == IOR)
6327 else if (code == XOR)
6330 output_operand_lossage ("invalid %%q value");
6332 if (GET_CODE (XEXP (x, 0)) != NOT)
6336 if (GET_CODE (XEXP (x, 1)) == NOT)
6347 /* X is a CR register. Print the mask for `mtcrf'. */
6348 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6349 output_operand_lossage ("invalid %%R value");
6351 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6355 /* Low 5 bits of 32 - value */
6357 output_operand_lossage ("invalid %%s value");
6359 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6363 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6364 CONST_INT 32-bit mask is considered sign-extended so any
6365 transition must occur within the CONST_INT, not on the boundary. */
6366 if (! mask64_operand (x, VOIDmode))
6367 output_operand_lossage ("invalid %%S value");
6369 val = INT_LOWPART (x);
6371 if (val & 1) /* Clear Left */
6373 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6374 if (!((val >>= 1) & 1))
6377 #if HOST_BITS_PER_WIDE_INT == 32
6378 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6380 val = CONST_DOUBLE_HIGH (x);
6385 for (i = 32; i < 64; i++)
6386 if (!((val >>= 1) & 1))
6390 /* i = index of last set bit from right
6391 mask begins at 63 - i from left */
6393 output_operand_lossage ("%%S computed all 1's mask");
6395 fprintf (file, "%d", 63 - i);
6398 else /* Clear Right */
6400 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6401 if ((val >>= 1) & 1)
6404 #if HOST_BITS_PER_WIDE_INT == 32
6405 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6407 val = CONST_DOUBLE_HIGH (x);
6409 if (val == (HOST_WIDE_INT) -1)
6412 for (i = 32; i < 64; i++)
6413 if ((val >>= 1) & 1)
6417 /* i = index of last clear bit from right
6418 mask ends at 62 - i from left */
6420 output_operand_lossage ("%%S computed all 0's mask");
6422 fprintf (file, "%d", 62 - i);
6427 /* Print the symbolic name of a branch target register. */
6428 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6429 && REGNO (x) != COUNT_REGISTER_REGNUM))
6430 output_operand_lossage ("invalid %%T value");
6431 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6432 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6434 fputs ("ctr", file);
6438 /* High-order 16 bits of constant for use in unsigned operand. */
6440 output_operand_lossage ("invalid %%u value");
6442 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6443 (INT_LOWPART (x) >> 16) & 0xffff);
6447 /* High-order 16 bits of constant for use in signed operand. */
6449 output_operand_lossage ("invalid %%v value");
6451 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6452 (INT_LOWPART (x) >> 16) & 0xffff);
6456 /* Print `u' if this has an auto-increment or auto-decrement. */
6457 if (GET_CODE (x) == MEM
6458 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6459 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6464 /* Print the trap code for this operand. */
6465 switch (GET_CODE (x))
6468 fputs ("eq", file); /* 4 */
6471 fputs ("ne", file); /* 24 */
6474 fputs ("lt", file); /* 16 */
6477 fputs ("le", file); /* 20 */
6480 fputs ("gt", file); /* 8 */
6483 fputs ("ge", file); /* 12 */
6486 fputs ("llt", file); /* 2 */
6489 fputs ("lle", file); /* 6 */
6492 fputs ("lgt", file); /* 1 */
6495 fputs ("lge", file); /* 5 */
6503 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6506 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6507 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6509 print_operand (file, x, 0);
6513 /* MB value for a PowerPC64 rldic operand. */
6514 val = (GET_CODE (x) == CONST_INT
6515 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6520 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6521 if ((val <<= 1) < 0)
6524 #if HOST_BITS_PER_WIDE_INT == 32
6525 if (GET_CODE (x) == CONST_INT && i >= 0)
6526 i += 32; /* zero-extend high-part was all 0's */
6527 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6529 val = CONST_DOUBLE_LOW (x);
6536 for ( ; i < 64; i++)
6537 if ((val <<= 1) < 0)
6542 fprintf (file, "%d", i + 1);
6546 if (GET_CODE (x) == MEM
6547 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6552 /* Like 'L', for third word of TImode */
6553 if (GET_CODE (x) == REG)
6554 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6555 else if (GET_CODE (x) == MEM)
6557 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6558 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6559 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6561 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6562 if (small_data_operand (x, GET_MODE (x)))
6563 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6564 reg_names[SMALL_DATA_REG]);
6569 /* X is a SYMBOL_REF. Write out the name preceded by a
6570 period and without any trailing data in brackets. Used for function
6571 names. If we are configured for System V (or the embedded ABI) on
6572 the PowerPC, do not emit the period, since those systems do not use
6573 TOCs and the like. */
6574 if (GET_CODE (x) != SYMBOL_REF)
6577 if (XSTR (x, 0)[0] != '.')
6579 switch (DEFAULT_ABI)
6589 case ABI_AIX_NODESC:
6595 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6597 assemble_name (file, XSTR (x, 0));
6602 /* Like 'L', for last word of TImode. */
6603 if (GET_CODE (x) == REG)
6604 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6605 else if (GET_CODE (x) == MEM)
6607 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6608 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6609 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6611 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6612 if (small_data_operand (x, GET_MODE (x)))
6613 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6614 reg_names[SMALL_DATA_REG]);
6618 /* Print AltiVec memory operand. */
6623 if (GET_CODE (x) != MEM)
6628 if (GET_CODE (tmp) == REG)
6629 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6630 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6632 if (REGNO (XEXP (tmp, 0)) == 0)
6633 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6634 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6636 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6637 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6645 if (GET_CODE (x) == REG)
6646 fprintf (file, "%s", reg_names[REGNO (x)]);
6647 else if (GET_CODE (x) == MEM)
6649 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6650 know the width from the mode. */
6651 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6652 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6653 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6654 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6655 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6656 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6658 output_address (XEXP (x, 0));
6661 output_addr_const (file, x);
6665 output_operand_lossage ("invalid %%xn code");
6669 /* Print the address of an operand. */
6672 print_operand_address (file, x)
6676 if (GET_CODE (x) == REG)
6677 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6678 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6679 || GET_CODE (x) == LABEL_REF)
6681 output_addr_const (file, x);
6682 if (small_data_operand (x, GET_MODE (x)))
6683 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6684 reg_names[SMALL_DATA_REG]);
6685 else if (TARGET_TOC)
6688 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6690 if (REGNO (XEXP (x, 0)) == 0)
6691 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6692 reg_names[ REGNO (XEXP (x, 0)) ]);
6694 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6695 reg_names[ REGNO (XEXP (x, 1)) ]);
6697 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6699 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6700 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6703 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6704 && CONSTANT_P (XEXP (x, 1)))
6706 output_addr_const (file, XEXP (x, 1));
6707 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6711 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6712 && CONSTANT_P (XEXP (x, 1)))
6714 fprintf (file, "lo16(");
6715 output_addr_const (file, XEXP (x, 1));
6716 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6719 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6721 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6723 rtx contains_minus = XEXP (x, 1);
6727 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6728 turn it into (sym) for output_addr_const. */
6729 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6730 contains_minus = XEXP (contains_minus, 0);
6732 minus = XEXP (contains_minus, 0);
6733 symref = XEXP (minus, 0);
6734 XEXP (contains_minus, 0) = symref;
6739 name = XSTR (symref, 0);
6740 newname = alloca (strlen (name) + sizeof ("@toc"));
6741 strcpy (newname, name);
6742 strcat (newname, "@toc");
6743 XSTR (symref, 0) = newname;
6745 output_addr_const (file, XEXP (x, 1));
6747 XSTR (symref, 0) = name;
6748 XEXP (contains_minus, 0) = minus;
6751 output_addr_const (file, XEXP (x, 1));
6753 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6759 /* Target hook for assembling integer objects. The powerpc version has
6760 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6761 is defined. It also needs to handle DI-mode objects on 64-bit
6765 rs6000_assemble_integer (x, size, aligned_p)
6770 #ifdef RELOCATABLE_NEEDS_FIXUP
6771 /* Special handling for SI values. */
6772 if (size == 4 && aligned_p)
6774 extern int in_toc_section PARAMS ((void));
6775 static int recurse = 0;
6777 /* For -mrelocatable, we mark all addresses that need to be fixed up
6778 in the .fixup section. */
6779 if (TARGET_RELOCATABLE
6780 && !in_toc_section ()
6781 && !in_text_section ()
6783 && GET_CODE (x) != CONST_INT
6784 && GET_CODE (x) != CONST_DOUBLE
6790 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6792 ASM_OUTPUT_LABEL (asm_out_file, buf);
6793 fprintf (asm_out_file, "\t.long\t(");
6794 output_addr_const (asm_out_file, x);
6795 fprintf (asm_out_file, ")@fixup\n");
6796 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6797 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6798 fprintf (asm_out_file, "\t.long\t");
6799 assemble_name (asm_out_file, buf);
6800 fprintf (asm_out_file, "\n\t.previous\n");
6804 /* Remove initial .'s to turn a -mcall-aixdesc function
6805 address into the address of the descriptor, not the function
6807 else if (GET_CODE (x) == SYMBOL_REF
6808 && XSTR (x, 0)[0] == '.'
6809 && DEFAULT_ABI == ABI_AIX)
6811 const char *name = XSTR (x, 0);
6812 while (*name == '.')
6815 fprintf (asm_out_file, "\t.long\t%s\n", name);
6819 #endif /* RELOCATABLE_NEEDS_FIXUP */
6820 return default_assemble_integer (x, size, aligned_p);
6824 rs6000_reverse_condition (mode, code)
6825 enum machine_mode mode;
6828 /* Reversal of FP compares takes care -- an ordered compare
6829 becomes an unordered compare and vice versa. */
6830 if (mode == CCFPmode)
6831 return reverse_condition_maybe_unordered (code);
6833 return reverse_condition (code);
6836 /* Generate a compare for CODE. Return a brand-new rtx that
6837 represents the result of the compare. */
6840 rs6000_generate_compare (code)
6843 enum machine_mode comp_mode;
6846 if (rs6000_compare_fp_p)
6847 comp_mode = CCFPmode;
6848 else if (code == GTU || code == LTU
6849 || code == GEU || code == LEU)
6850 comp_mode = CCUNSmode;
6854 /* First, the compare. */
6855 compare_result = gen_reg_rtx (comp_mode);
6856 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6857 gen_rtx_COMPARE (comp_mode,
6859 rs6000_compare_op1)));
6861 /* Some kinds of FP comparisons need an OR operation;
6862 except for flag_unsafe_math_optimizations we don't bother. */
6863 if (rs6000_compare_fp_p
6864 && ! flag_unsafe_math_optimizations
6865 && (code == LE || code == GE
6866 || code == UNEQ || code == LTGT
6867 || code == UNGT || code == UNLT))
6869 enum rtx_code or1, or2;
6870 rtx or1_rtx, or2_rtx, compare2_rtx;
6871 rtx or_result = gen_reg_rtx (CCEQmode);
6875 case LE: or1 = LT; or2 = EQ; break;
6876 case GE: or1 = GT; or2 = EQ; break;
6877 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
6878 case LTGT: or1 = LT; or2 = GT; break;
6879 case UNGT: or1 = UNORDERED; or2 = GT; break;
6880 case UNLT: or1 = UNORDERED; or2 = LT; break;
6883 validate_condition_mode (or1, comp_mode);
6884 validate_condition_mode (or2, comp_mode);
6885 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6886 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6887 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6888 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6890 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6892 compare_result = or_result;
6896 validate_condition_mode (code, GET_MODE (compare_result));
6898 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6902 /* Emit the RTL for an sCOND pattern. */
6905 rs6000_emit_sCOND (code, result)
6910 enum machine_mode op_mode;
6912 condition_rtx = rs6000_generate_compare (code);
6914 op_mode = GET_MODE (rs6000_compare_op0);
6915 if (op_mode == VOIDmode)
6916 op_mode = GET_MODE (rs6000_compare_op1);
6918 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
6920 PUT_MODE (condition_rtx, DImode);
6921 convert_move (result, condition_rtx, 0);
6925 PUT_MODE (condition_rtx, SImode);
6926 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
6930 /* Emit a branch of kind CODE to location LOC. */
6933 rs6000_emit_cbranch (code, loc)
6937 rtx condition_rtx, loc_ref;
6939 condition_rtx = rs6000_generate_compare (code);
6940 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
6941 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
6942 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
6946 /* Return the string to output a conditional branch to LABEL, which is
6947 the operand number of the label, or -1 if the branch is really a
6950 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
6951 condition code register and its mode specifies what kind of
6954 REVERSED is non-zero if we should reverse the sense of the comparison.
6956 INSN is the insn. */
6959 output_cbranch (op, label, reversed, insn)
6965 static char string[64];
6966 enum rtx_code code = GET_CODE (op);
6967 rtx cc_reg = XEXP (op, 0);
6968 enum machine_mode mode = GET_MODE (cc_reg);
6969 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
6970 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
6971 int really_reversed = reversed ^ need_longbranch;
6977 validate_condition_mode (code, mode);
6979 /* Work out which way this really branches. We could use
6980 reverse_condition_maybe_unordered here always but this
6981 makes the resulting assembler clearer. */
6982 if (really_reversed)
6983 code = rs6000_reverse_condition (mode, code);
6987 /* Not all of these are actually distinct opcodes, but
6988 we distinguish them for clarity of the resulting assembler. */
6990 ccode = "ne"; break;
6992 ccode = "eq"; break;
6994 ccode = "ge"; break;
6995 case GT: case GTU: case UNGT:
6996 ccode = "gt"; break;
6998 ccode = "le"; break;
6999 case LT: case LTU: case UNLT:
7000 ccode = "lt"; break;
7001 case UNORDERED: ccode = "un"; break;
7002 case ORDERED: ccode = "nu"; break;
7003 case UNGE: ccode = "nl"; break;
7004 case UNLE: ccode = "ng"; break;
7009 /* Maybe we have a guess as to how likely the branch is.
7010 The old mnemonics don't have a way to specify this information. */
7011 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7012 if (note != NULL_RTX)
7014 /* PROB is the difference from 50%. */
7015 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7017 /* For branches that are very close to 50%, assume not-taken. */
7018 if (abs (prob) > REG_BR_PROB_BASE / 20
7019 && ((prob > 0) ^ need_longbranch))
7028 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7030 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7032 /* We need to escape any '%' characters in the reg_names string.
7033 Assume they'd only be the first character... */
7034 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7036 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7040 /* If the branch distance was too far, we may have to use an
7041 unconditional branch to go the distance. */
7042 if (need_longbranch)
7043 s += sprintf (s, ",$+8\n\tb %s", label);
7045 s += sprintf (s, ",%s", label);
7051 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7052 operands of the last comparison is nonzero/true, FALSE_COND if it
7053 is zero/false. Return 0 if the hardware has no such operation. */
7056 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7062 enum rtx_code code = GET_CODE (op);
7063 rtx op0 = rs6000_compare_op0;
7064 rtx op1 = rs6000_compare_op1;
7066 enum machine_mode mode = GET_MODE (op0);
7069 /* First, work out if the hardware can do this at all, or
7070 if it's too slow... */
7071 /* If the comparison is an integer one, since we only have fsel
7072 it'll be cheaper to use a branch. */
7073 if (! rs6000_compare_fp_p)
7076 /* Eliminate half of the comparisons by switching operands, this
7077 makes the remaining code simpler. */
7078 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7079 || code == LTGT || code == LT)
7081 code = reverse_condition_maybe_unordered (code);
7083 true_cond = false_cond;
7087 /* UNEQ and LTGT take four instructions for a comparison with zero,
7088 it'll probably be faster to use a branch here too. */
7092 if (GET_CODE (op1) == CONST_DOUBLE)
7093 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7095 /* We're going to try to implement comparions by performing
7096 a subtract, then comparing against zero. Unfortunately,
7097 Inf - Inf is NaN which is not zero, and so if we don't
7098 know that the the operand is finite and the comparison
7099 would treat EQ different to UNORDERED, we can't do it. */
7100 if (! flag_unsafe_math_optimizations
7101 && code != GT && code != UNGE
7102 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7103 /* Constructs of the form (a OP b ? a : b) are safe. */
7104 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7105 || (! rtx_equal_p (op0, true_cond)
7106 && ! rtx_equal_p (op1, true_cond))))
7108 /* At this point we know we can use fsel. */
7110 /* Reduce the comparison to a comparison against zero. */
7111 temp = gen_reg_rtx (mode);
7112 emit_insn (gen_rtx_SET (VOIDmode, temp,
7113 gen_rtx_MINUS (mode, op0, op1)));
7115 op1 = CONST0_RTX (mode);
7117 /* If we don't care about NaNs we can reduce some of the comparisons
7118 down to faster ones. */
7119 if (flag_unsafe_math_optimizations)
7125 true_cond = false_cond;
7138 /* Now, reduce everything down to a GE. */
7145 temp = gen_reg_rtx (mode);
7146 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7151 temp = gen_reg_rtx (mode);
7152 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
7157 temp = gen_reg_rtx (mode);
7158 emit_insn (gen_rtx_SET (VOIDmode, temp,
7160 gen_rtx_ABS (mode, op0))));
7165 temp = gen_reg_rtx (mode);
7166 emit_insn (gen_rtx_SET (VOIDmode, temp,
7167 gen_rtx_IF_THEN_ELSE (mode,
7168 gen_rtx_GE (VOIDmode,
7170 true_cond, false_cond)));
7172 true_cond = false_cond;
7174 temp = gen_reg_rtx (mode);
7175 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7180 temp = gen_reg_rtx (mode);
7181 emit_insn (gen_rtx_SET (VOIDmode, temp,
7182 gen_rtx_IF_THEN_ELSE (mode,
7183 gen_rtx_GE (VOIDmode,
7185 true_cond, false_cond)));
7187 false_cond = true_cond;
7189 temp = gen_reg_rtx (mode);
7190 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7198 emit_insn (gen_rtx_SET (VOIDmode, dest,
7199 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
7200 gen_rtx_GE (VOIDmode,
7202 true_cond, false_cond)));
7207 rs6000_emit_minmax (dest, code, op0, op1)
7213 enum machine_mode mode = GET_MODE (op0);
7215 if (code == SMAX || code == UMAX)
7216 target = emit_conditional_move (dest, GE, op0, op1, mode,
7219 target = emit_conditional_move (dest, GE, op0, op1, mode,
7221 if (target == NULL_RTX)
7224 emit_move_insn (dest, target);
7227 /* This page contains routines that are used to determine what the
7228 function prologue and epilogue code will do and write them out. */
7230 /* Return the first fixed-point register that is required to be
7231 saved. 32 if none. */
7234 first_reg_to_save ()
7238 /* Find lowest numbered live register. */
7239 for (first_reg = 13; first_reg <= 31; first_reg++)
7240 if (regs_ever_live[first_reg]
7241 && (! call_used_regs[first_reg]
7242 || (first_reg == PIC_OFFSET_TABLE_REGNUM
7243 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7244 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7247 if (current_function_profile)
7249 /* AIX must save/restore every register that contains a parameter
7250 before/after the .__mcount call plus an additional register
7251 for the static chain, if needed; use registers from 30 down to 22
7253 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7255 int last_parm_reg, profile_first_reg;
7257 /* Figure out last used parameter register. The proper thing
7258 to do is to walk incoming args of the function. A function
7259 might have live parameter registers even if it has no
7261 for (last_parm_reg = 10;
7262 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7266 /* Calculate first reg for saving parameter registers
7268 Skip reg 31 which may contain the frame pointer. */
7269 profile_first_reg = (33 - last_parm_reg
7270 - (current_function_needs_context ? 1 : 0));
7272 /* Need to skip another reg to account for R31 being PICBASE
7273 (when flag_pic is set) or R30 being used as the frame
7274 pointer (when flag_pic is not set). */
7275 --profile_first_reg;
7277 /* Do not save frame pointer if no parameters needs to be saved. */
7278 if (profile_first_reg == 31)
7279 profile_first_reg = 32;
7281 if (first_reg > profile_first_reg)
7282 first_reg = profile_first_reg;
7285 /* SVR4 may need one register to preserve the static chain. */
7286 else if (current_function_needs_context)
7288 /* Skip reg 31 which may contain the frame pointer. */
7295 if (flag_pic && current_function_uses_pic_offset_table &&
7296 (first_reg > PIC_OFFSET_TABLE_REGNUM))
7297 return PIC_OFFSET_TABLE_REGNUM;
7303 /* Similar, for FP regs. */
7306 first_fp_reg_to_save ()
7310 /* Find lowest numbered live register. */
7311 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7312 if (regs_ever_live[first_reg])
7318 /* Similar, for AltiVec regs. */
7321 first_altivec_reg_to_save ()
7325 /* Stack frame remains as is unless we are in AltiVec ABI. */
7326 if (! TARGET_ALTIVEC_ABI)
7327 return LAST_ALTIVEC_REGNO + 1;
7329 /* Find lowest numbered live register. */
7330 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7331 if (regs_ever_live[i])
7337 /* Return a 32-bit mask of the AltiVec registers we need to set in
7338 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7339 the 32-bit word is 0. */
7342 compute_vrsave_mask ()
7344 unsigned int i, mask = 0;
7346 /* First, find out if we use _any_ altivec registers. */
7347 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7348 if (regs_ever_live[i])
7349 mask |= ALTIVEC_REG_BIT (i);
7354 /* Next, add all registers that are call-clobbered. We do this
7355 because post-reload register optimizers such as regrename_optimize
7356 may choose to use them. They never change the register class
7357 chosen by reload, so cannot create new uses of altivec registers
7358 if there were none before, so the early exit above is safe. */
7359 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7360 altivec registers not saved in the mask, which might well make the
7361 adjustments below more effective in eliding the save/restore of
7362 VRSAVE in small functions. */
7363 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7364 if (call_used_regs[i])
7365 mask |= ALTIVEC_REG_BIT (i);
7367 /* Next, remove the argument registers from the set. These must
7368 be in the VRSAVE mask set by the caller, so we don't need to add
7369 them in again. More importantly, the mask we compute here is
7370 used to generate CLOBBERs in the set_vrsave insn, and we do not
7371 wish the argument registers to die. */
7372 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7373 mask &= ~ALTIVEC_REG_BIT (i);
7375 /* Similarly, remove the return value from the set. */
7378 diddle_return_value (is_altivec_return_reg, &yes);
7380 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7387 is_altivec_return_reg (reg, xyes)
7391 bool *yes = (bool *) xyes;
7392 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7397 /* Calculate the stack information for the current function. This is
7398 complicated by having two separate calling sequences, the AIX calling
7399 sequence and the V.4 calling sequence.
7401 AIX (and Darwin/Mac OS X) stack frames look like:
7403 SP----> +---------------------------------------+
7404 | back chain to caller | 0 0
7405 +---------------------------------------+
7406 | saved CR | 4 8 (8-11)
7407 +---------------------------------------+
7409 +---------------------------------------+
7410 | reserved for compilers | 12 24
7411 +---------------------------------------+
7412 | reserved for binders | 16 32
7413 +---------------------------------------+
7414 | saved TOC pointer | 20 40
7415 +---------------------------------------+
7416 | Parameter save area (P) | 24 48
7417 +---------------------------------------+
7418 | Alloca space (A) | 24+P etc.
7419 +---------------------------------------+
7420 | Local variable space (L) | 24+P+A
7421 +---------------------------------------+
7422 | Float/int conversion temporary (X) | 24+P+A+L
7423 +---------------------------------------+
7424 | Save area for AltiVec registers (W) | 24+P+A+L+X
7425 +---------------------------------------+
7426 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7427 +---------------------------------------+
7428 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7429 +---------------------------------------+
7430 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7431 +---------------------------------------+
7432 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7433 +---------------------------------------+
7434 old SP->| back chain to caller's caller |
7435 +---------------------------------------+
7437 The required alignment for AIX configurations is two words (i.e., 8
7441 V.4 stack frames look like:
7443 SP----> +---------------------------------------+
7444 | back chain to caller | 0
7445 +---------------------------------------+
7446 | caller's saved LR | 4
7447 +---------------------------------------+
7448 | Parameter save area (P) | 8
7449 +---------------------------------------+
7450 | Alloca space (A) | 8+P
7451 +---------------------------------------+
7452 | Varargs save area (V) | 8+P+A
7453 +---------------------------------------+
7454 | Local variable space (L) | 8+P+A+V
7455 +---------------------------------------+
7456 | Float/int conversion temporary (X) | 8+P+A+V+L
7457 +---------------------------------------+
7458 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7459 +---------------------------------------+
7460 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7461 +---------------------------------------+
7462 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7463 +---------------------------------------+
7464 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7465 +---------------------------------------+
7466 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7467 +---------------------------------------+
7468 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7469 +---------------------------------------+
7470 old SP->| back chain to caller's caller |
7471 +---------------------------------------+
7473 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7474 given. (But note below and in sysv4.h that we require only 8 and
7475 may round up the size of our stack frame anyways. The historical
7476 reason is early versions of powerpc-linux which didn't properly
7477 align the stack at program startup. A happy side-effect is that
7478 -mno-eabi libraries can be used with -meabi programs.)
7480 The EABI configuration defaults to the V.4 layout, unless
7481 -mcall-aix is used, in which case the AIX layout is used. However,
7482 the stack alignment requirements may differ. If -mno-eabi is not
7483 given, the required stack alignment is 8 bytes; if -mno-eabi is
7484 given, the required alignment is 16 bytes. (But see V.4 comment
7487 #ifndef ABI_STACK_BOUNDARY
7488 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7492 rs6000_stack_info ()
7494 static rs6000_stack_t info, zero_info;
7495 rs6000_stack_t *info_ptr = &info;
7496 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7497 enum rs6000_abi abi;
7501 /* Zero all fields portably. */
7504 /* Select which calling sequence. */
7505 info_ptr->abi = abi = DEFAULT_ABI;
7507 /* Calculate which registers need to be saved & save area size. */
7508 info_ptr->first_gp_reg_save = first_reg_to_save ();
7509 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
7510 even if it currently looks like we won't. */
7511 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7512 || (flag_pic == 1 && abi == ABI_V4)
7513 || (flag_pic && abi == ABI_DARWIN))
7514 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
7515 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
7517 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7519 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7520 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7522 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7523 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7524 - info_ptr->first_altivec_reg_save);
7526 /* Does this function call anything? */
7527 info_ptr->calls_p = (! current_function_is_leaf
7528 || cfun->machine->ra_needs_full_frame);
7530 /* Determine if we need to save the link register. */
7531 if (rs6000_ra_ever_killed ()
7532 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7533 #ifdef TARGET_RELOCATABLE
7534 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7536 || (info_ptr->first_fp_reg_save != 64
7537 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7538 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7539 || (abi == ABI_V4 && current_function_calls_alloca)
7540 || (DEFAULT_ABI == ABI_DARWIN
7542 && current_function_uses_pic_offset_table)
7543 || info_ptr->calls_p)
7545 info_ptr->lr_save_p = 1;
7546 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7549 /* Determine if we need to save the condition code registers. */
7550 if (regs_ever_live[CR2_REGNO]
7551 || regs_ever_live[CR3_REGNO]
7552 || regs_ever_live[CR4_REGNO])
7554 info_ptr->cr_save_p = 1;
7556 info_ptr->cr_size = reg_size;
7559 /* If the current function calls __builtin_eh_return, then we need
7560 to allocate stack space for registers that will hold data for
7561 the exception handler. */
7562 if (current_function_calls_eh_return)
7565 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7567 ehrd_size = i * UNITS_PER_WORD;
7572 /* Determine various sizes. */
7573 info_ptr->reg_size = reg_size;
7574 info_ptr->fixed_size = RS6000_SAVE_AREA;
7575 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7576 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7577 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7580 if (TARGET_ALTIVEC_ABI)
7582 info_ptr->vrsave_mask = compute_vrsave_mask ();
7583 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7587 info_ptr->vrsave_mask = 0;
7588 info_ptr->vrsave_size = 0;
7591 /* Calculate the offsets. */
7599 case ABI_AIX_NODESC:
7601 info_ptr->fp_save_offset = - info_ptr->fp_size;
7602 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7604 if (TARGET_ALTIVEC_ABI)
7606 info_ptr->vrsave_save_offset
7607 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7609 /* Align stack so vector save area is on a quadword boundary. */
7610 if (info_ptr->altivec_size != 0)
7611 info_ptr->altivec_padding_size
7612 = 16 - (-info_ptr->vrsave_save_offset % 16);
7614 info_ptr->altivec_padding_size = 0;
7616 info_ptr->altivec_save_offset
7617 = info_ptr->vrsave_save_offset
7618 - info_ptr->altivec_padding_size
7619 - info_ptr->altivec_size;
7621 /* Adjust for AltiVec case. */
7622 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7625 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7626 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7627 info_ptr->lr_save_offset = 2*reg_size;
7631 info_ptr->fp_save_offset = - info_ptr->fp_size;
7632 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7633 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7635 if (TARGET_ALTIVEC_ABI)
7637 info_ptr->vrsave_save_offset
7638 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7640 /* Align stack so vector save area is on a quadword boundary. */
7641 if (info_ptr->altivec_size != 0)
7642 info_ptr->altivec_padding_size
7643 = 16 - (-info_ptr->vrsave_save_offset % 16);
7645 info_ptr->altivec_padding_size = 0;
7647 info_ptr->altivec_save_offset
7648 = info_ptr->vrsave_save_offset
7649 - info_ptr->altivec_padding_size
7650 - info_ptr->altivec_size;
7652 /* Adjust for AltiVec case. */
7653 info_ptr->toc_save_offset
7654 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7657 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7658 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7659 info_ptr->lr_save_offset = reg_size;
7663 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7665 + info_ptr->altivec_size
7666 + info_ptr->altivec_padding_size
7667 + info_ptr->vrsave_size
7671 + info_ptr->vrsave_size
7672 + info_ptr->toc_size,
7673 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7676 total_raw_size = (info_ptr->vars_size
7677 + info_ptr->parm_size
7678 + info_ptr->save_size
7679 + info_ptr->varargs_size
7680 + info_ptr->fixed_size);
7682 info_ptr->total_size =
7683 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7685 /* Determine if we need to allocate any stack frame:
7687 For AIX we need to push the stack if a frame pointer is needed
7688 (because the stack might be dynamically adjusted), if we are
7689 debugging, if we make calls, or if the sum of fp_save, gp_save,
7690 and local variables are more than the space needed to save all
7691 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7692 + 18*8 = 288 (GPR13 reserved).
7694 For V.4 we don't have the stack cushion that AIX uses, but assume
7695 that the debugger can handle stackless frames. */
7697 if (info_ptr->calls_p)
7698 info_ptr->push_p = 1;
7700 else if (abi == ABI_V4)
7701 info_ptr->push_p = (total_raw_size > info_ptr->fixed_size
7702 || info_ptr->calls_p);
7705 info_ptr->push_p = (frame_pointer_needed
7706 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7707 || ((total_raw_size - info_ptr->fixed_size)
7708 > (TARGET_32BIT ? 220 : 288)));
7710 /* Zero offsets if we're not saving those registers. */
7711 if (info_ptr->fp_size == 0)
7712 info_ptr->fp_save_offset = 0;
7714 if (info_ptr->gp_size == 0)
7715 info_ptr->gp_save_offset = 0;
7717 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7718 info_ptr->altivec_save_offset = 0;
7720 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7721 info_ptr->vrsave_save_offset = 0;
7723 if (! info_ptr->lr_save_p)
7724 info_ptr->lr_save_offset = 0;
7726 if (! info_ptr->cr_save_p)
7727 info_ptr->cr_save_offset = 0;
7729 if (! info_ptr->toc_save_p)
7730 info_ptr->toc_save_offset = 0;
7736 debug_stack_info (info)
7737 rs6000_stack_t *info;
7739 const char *abi_string;
7742 info = rs6000_stack_info ();
7744 fprintf (stderr, "\nStack information for function %s:\n",
7745 ((current_function_decl && DECL_NAME (current_function_decl))
7746 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7751 default: abi_string = "Unknown"; break;
7752 case ABI_NONE: abi_string = "NONE"; break;
7754 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7755 case ABI_DARWIN: abi_string = "Darwin"; break;
7756 case ABI_V4: abi_string = "V.4"; break;
7759 fprintf (stderr, "\tABI = %5s\n", abi_string);
7761 if (TARGET_ALTIVEC_ABI)
7762 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7764 if (info->first_gp_reg_save != 32)
7765 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7767 if (info->first_fp_reg_save != 64)
7768 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7770 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7771 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7772 info->first_altivec_reg_save);
7774 if (info->lr_save_p)
7775 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7777 if (info->cr_save_p)
7778 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7780 if (info->toc_save_p)
7781 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7783 if (info->vrsave_mask)
7784 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7787 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7790 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7792 if (info->gp_save_offset)
7793 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7795 if (info->fp_save_offset)
7796 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7798 if (info->altivec_save_offset)
7799 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7800 info->altivec_save_offset);
7802 if (info->vrsave_save_offset)
7803 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7804 info->vrsave_save_offset);
7806 if (info->lr_save_offset)
7807 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7809 if (info->cr_save_offset)
7810 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7812 if (info->toc_save_offset)
7813 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7815 if (info->varargs_save_offset)
7816 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7818 if (info->total_size)
7819 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7821 if (info->varargs_size)
7822 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7824 if (info->vars_size)
7825 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7827 if (info->parm_size)
7828 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7830 if (info->fixed_size)
7831 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7834 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7837 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7839 if (info->altivec_size)
7840 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
7842 if (info->vrsave_size)
7843 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
7845 if (info->altivec_padding_size)
7846 fprintf (stderr, "\taltivec_padding_size= %5d\n",
7847 info->altivec_padding_size);
7850 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
7853 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
7856 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
7858 if (info->save_size)
7859 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
7861 if (info->reg_size != 4)
7862 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
7864 fprintf (stderr, "\n");
7868 rs6000_return_addr (count, frame)
7872 /* Currently we don't optimize very well between prolog and body
7873 code and for PIC code the code can be actually quite bad, so
7874 don't try to be too clever here. */
7877 || DEFAULT_ABI == ABI_AIX
7878 || DEFAULT_ABI == ABI_AIX_NODESC)
7880 cfun->machine->ra_needs_full_frame = 1;
7887 plus_constant (copy_to_reg
7888 (gen_rtx_MEM (Pmode,
7889 memory_address (Pmode, frame))),
7890 RETURN_ADDRESS_OFFSET)));
7893 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
7897 rs6000_ra_ever_killed ()
7901 #ifdef ASM_OUTPUT_MI_THUNK
7902 if (current_function_is_thunk)
7905 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
7906 || cfun->machine->ra_needs_full_frame)
7907 return regs_ever_live[LINK_REGISTER_REGNUM];
7909 push_topmost_sequence ();
7911 pop_topmost_sequence ();
7913 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7917 /* Add a REG_MAYBE_DEAD note to the insn. */
7919 rs6000_maybe_dead (insn)
7922 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
7927 /* Emit instructions needed to load the TOC register.
7928 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
7929 a constant pool; or for SVR4 -fpic. */
7932 rs6000_emit_load_toc_table (fromprolog)
7936 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
7938 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
7940 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7942 rtx temp = (fromprolog
7943 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7944 : gen_reg_rtx (Pmode));
7945 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
7946 rs6000_maybe_dead (emit_move_insn (dest, temp));
7948 else if (flag_pic == 2)
7951 rtx tempLR = (fromprolog
7952 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7953 : gen_reg_rtx (Pmode));
7954 rtx temp0 = (fromprolog
7955 ? gen_rtx_REG (Pmode, 0)
7956 : gen_reg_rtx (Pmode));
7959 /* possibly create the toc section */
7960 if (! toc_initialized)
7963 function_section (current_function_decl);
7970 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
7971 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7973 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
7974 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7976 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
7978 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7979 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
7986 static int reload_toc_labelno = 0;
7988 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
7990 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
7991 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7993 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
7996 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7997 rs6000_maybe_dead (emit_move_insn (temp0,
7998 gen_rtx_MEM (Pmode, dest)));
8000 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8002 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8004 /* This is for AIX code running in non-PIC ELF. */
8007 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8008 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8010 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8011 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8019 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8021 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8026 get_TOC_alias_set ()
8028 static int set = -1;
8030 set = new_alias_set ();
8034 /* This retuns nonzero if the current function uses the TOC. This is
8035 determined by the presence of (unspec ... 7), which is generated by
8036 the various load_toc_* patterns. */
8043 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8046 rtx pat = PATTERN (insn);
8049 if (GET_CODE (pat) == PARALLEL)
8050 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8051 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8052 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8059 create_TOC_reference (symbol)
8062 return gen_rtx_PLUS (Pmode,
8063 gen_rtx_REG (Pmode, TOC_REGISTER),
8064 gen_rtx_CONST (Pmode,
8065 gen_rtx_MINUS (Pmode, symbol,
8066 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8070 /* __throw will restore its own return address to be the same as the
8071 return address of the function that the throw is being made to.
8072 This is unfortunate, because we want to check the original
8073 return address to see if we need to restore the TOC.
8074 So we have to squirrel it away here.
8075 This is used only in compiling __throw and __rethrow.
8077 Most of this code should be removed by CSE. */
8078 static rtx insn_after_throw;
8080 /* This does the saving... */
8082 rs6000_aix_emit_builtin_unwind_init ()
8085 rtx stack_top = gen_reg_rtx (Pmode);
8086 rtx opcode_addr = gen_reg_rtx (Pmode);
8088 insn_after_throw = gen_reg_rtx (SImode);
8090 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8091 emit_move_insn (stack_top, mem);
8093 mem = gen_rtx_MEM (Pmode,
8094 gen_rtx_PLUS (Pmode, stack_top,
8095 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8096 emit_move_insn (opcode_addr, mem);
8097 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8100 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8101 in _eh.o). Only used on AIX.
8103 The idea is that on AIX, function calls look like this:
8104 bl somefunction-trampoline
8108 somefunction-trampoline:
8110 ... load function address in the count register ...
8112 or like this, if the linker determines that this is not a cross-module call
8113 and so the TOC need not be restored:
8116 or like this, if the compiler could determine that this is not a
8119 now, the tricky bit here is that register 2 is saved and restored
8120 by the _linker_, so we can't readily generate debugging information
8121 for it. So we need to go back up the call chain looking at the
8122 insns at return addresses to see which calls saved the TOC register
8123 and so see where it gets restored from.
8125 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8126 just before the actual epilogue.
8128 On the bright side, this incurs no space or time overhead unless an
8129 exception is thrown, except for the extra code in libgcc.a.
8131 The parameter STACKSIZE is a register containing (at runtime)
8132 the amount to be popped off the stack in addition to the stack frame
8133 of this routine (which will be __throw or __rethrow, and so is
8134 guaranteed to have a stack frame). */
8137 rs6000_emit_eh_toc_restore (stacksize)
8141 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8142 rtx tocompare = gen_reg_rtx (SImode);
8143 rtx opcode = gen_reg_rtx (SImode);
8144 rtx opcode_addr = gen_reg_rtx (Pmode);
8146 rtx loop_start = gen_label_rtx ();
8147 rtx no_toc_restore_needed = gen_label_rtx ();
8148 rtx loop_exit = gen_label_rtx ();
8150 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8151 set_mem_alias_set (mem, rs6000_sr_alias_set);
8152 emit_move_insn (bottom_of_stack, mem);
8154 top_of_stack = expand_binop (Pmode, add_optab,
8155 bottom_of_stack, stacksize,
8156 NULL_RTX, 1, OPTAB_WIDEN);
8158 emit_move_insn (tocompare,
8159 GEN_INT (trunc_int_for_mode (TARGET_32BIT
8161 : 0xE8410028, SImode)));
8163 if (insn_after_throw == NULL_RTX)
8165 emit_move_insn (opcode, insn_after_throw);
8167 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8168 emit_label (loop_start);
8170 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8171 SImode, NULL_RTX, NULL_RTX,
8172 no_toc_restore_needed);
8174 mem = gen_rtx_MEM (Pmode,
8175 gen_rtx_PLUS (Pmode, bottom_of_stack,
8176 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8177 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8179 emit_label (no_toc_restore_needed);
8180 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8181 Pmode, NULL_RTX, NULL_RTX,
8184 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8185 set_mem_alias_set (mem, rs6000_sr_alias_set);
8186 emit_move_insn (bottom_of_stack, mem);
8188 mem = gen_rtx_MEM (Pmode,
8189 gen_rtx_PLUS (Pmode, bottom_of_stack,
8190 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8191 emit_move_insn (opcode_addr, mem);
8192 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8194 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8195 emit_jump (loop_start);
8196 emit_note (NULL, NOTE_INSN_LOOP_END);
8197 emit_label (loop_exit);
8199 #endif /* TARGET_AIX */
8201 /* This ties together stack memory (MEM with an alias set of
8202 rs6000_sr_alias_set) and the change to the stack pointer. */
8205 rs6000_emit_stack_tie ()
8207 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8209 set_mem_alias_set (mem, rs6000_sr_alias_set);
8210 emit_insn (gen_stack_tie (mem));
8213 /* Emit the correct code for allocating stack space, as insns.
8214 If COPY_R12, make sure a copy of the old frame is left in r12.
8215 The generated code may use hard register 0 as a temporary. */
8218 rs6000_emit_allocate_stack (size, copy_r12)
8223 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8224 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8225 rtx todec = GEN_INT (-size);
8227 if (current_function_limit_stack)
8229 if (REG_P (stack_limit_rtx)
8230 && REGNO (stack_limit_rtx) > 1
8231 && REGNO (stack_limit_rtx) <= 31)
8233 emit_insn (Pmode == SImode
8234 ? gen_addsi3 (tmp_reg,
8237 : gen_adddi3 (tmp_reg,
8241 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8244 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8246 && DEFAULT_ABI == ABI_V4)
8248 rtx toload = gen_rtx_CONST (VOIDmode,
8249 gen_rtx_PLUS (Pmode,
8253 emit_insn (gen_elf_high (tmp_reg, toload));
8254 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8255 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8259 warning ("stack limit expression is not supported");
8262 if (copy_r12 || ! TARGET_UPDATE)
8263 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8269 /* Need a note here so that try_split doesn't get confused. */
8270 if (get_last_insn() == NULL_RTX)
8271 emit_note (0, NOTE_INSN_DELETED);
8272 insn = emit_move_insn (tmp_reg, todec);
8273 try_split (PATTERN (insn), insn, 0);
8277 if (Pmode == SImode)
8278 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8281 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8286 if (Pmode == SImode)
8287 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8289 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8290 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8291 gen_rtx_REG (Pmode, 12));
8294 RTX_FRAME_RELATED_P (insn) = 1;
8296 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8297 gen_rtx_SET (VOIDmode, stack_reg,
8298 gen_rtx_PLUS (Pmode, stack_reg,
8303 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8306 (mem (plus (blah) (regXX)))
8310 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8313 altivec_frame_fixup (insn, reg, val)
8319 real = copy_rtx (PATTERN (insn));
8321 real = replace_rtx (real, reg, GEN_INT (val));
8323 RTX_FRAME_RELATED_P (insn) = 1;
8324 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8329 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8330 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8331 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8332 deduce these equivalences by itself so it wasn't necessary to hold
8333 its hand so much. */
8336 rs6000_frame_related (insn, reg, val, reg2, rreg)
8345 real = copy_rtx (PATTERN (insn));
8347 real = replace_rtx (real, reg,
8348 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8349 STACK_POINTER_REGNUM),
8352 /* We expect that 'real' is either a SET or a PARALLEL containing
8353 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8354 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8356 if (GET_CODE (real) == SET)
8360 temp = simplify_rtx (SET_SRC (set));
8362 SET_SRC (set) = temp;
8363 temp = simplify_rtx (SET_DEST (set));
8365 SET_DEST (set) = temp;
8366 if (GET_CODE (SET_DEST (set)) == MEM)
8368 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8370 XEXP (SET_DEST (set), 0) = temp;
8373 else if (GET_CODE (real) == PARALLEL)
8376 for (i = 0; i < XVECLEN (real, 0); i++)
8377 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8379 rtx set = XVECEXP (real, 0, i);
8381 temp = simplify_rtx (SET_SRC (set));
8383 SET_SRC (set) = temp;
8384 temp = simplify_rtx (SET_DEST (set));
8386 SET_DEST (set) = temp;
8387 if (GET_CODE (SET_DEST (set)) == MEM)
8389 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8391 XEXP (SET_DEST (set), 0) = temp;
8393 RTX_FRAME_RELATED_P (set) = 1;
8399 if (reg2 != NULL_RTX)
8400 real = replace_rtx (real, reg2, rreg);
8402 RTX_FRAME_RELATED_P (insn) = 1;
8403 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8408 /* Returns an insn that has a vrsave set operation with the
8409 appropriate CLOBBERs. */
8412 generate_set_vrsave (reg, info, epiloguep)
8414 rs6000_stack_t *info;
8418 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8419 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8422 = gen_rtx_SET (VOIDmode,
8424 gen_rtx_UNSPEC_VOLATILE (SImode,
8425 gen_rtvec (2, reg, vrsave),
8430 /* We need to clobber the registers in the mask so the scheduler
8431 does not move sets to VRSAVE before sets of AltiVec registers.
8433 However, if the function receives nonlocal gotos, reload will set
8434 all call saved registers live. We will end up with:
8436 (set (reg 999) (mem))
8437 (parallel [ (set (reg vrsave) (unspec blah))
8438 (clobber (reg 999))])
8440 The clobber will cause the store into reg 999 to be dead, and
8441 flow will attempt to delete an epilogue insn. In this case, we
8442 need an unspec use/set of the register. */
8444 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8445 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8447 if (!epiloguep || call_used_regs [i])
8448 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8449 gen_rtx_REG (V4SImode, i));
8452 rtx reg = gen_rtx_REG (V4SImode, i);
8455 = gen_rtx_SET (VOIDmode,
8457 gen_rtx_UNSPEC (V4SImode,
8458 gen_rtvec (1, reg), 27));
8462 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8464 for (i = 0; i < nclobs; ++i)
8465 XVECEXP (insn, 0, i) = clobs[i];
8470 /* Emit function prologue as insns. */
8473 rs6000_emit_prologue ()
8475 rs6000_stack_t *info = rs6000_stack_info ();
8476 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8477 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8478 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8479 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8480 rtx frame_reg_rtx = sp_reg_rtx;
8481 rtx cr_save_rtx = NULL;
8483 int saving_FPRs_inline;
8484 int using_store_multiple;
8485 HOST_WIDE_INT sp_offset = 0;
8487 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8488 && info->first_gp_reg_save < 31);
8489 saving_FPRs_inline = (info->first_fp_reg_save == 64
8490 || FP_SAVE_INLINE (info->first_fp_reg_save));
8492 /* For V.4, update stack before we do any saving and set back pointer. */
8493 if (info->push_p && DEFAULT_ABI == ABI_V4)
8495 if (info->total_size < 32767)
8496 sp_offset = info->total_size;
8498 frame_reg_rtx = frame_ptr_rtx;
8499 rs6000_emit_allocate_stack (info->total_size,
8500 (frame_reg_rtx != sp_reg_rtx
8503 || info->first_fp_reg_save < 64
8504 || info->first_gp_reg_save < 32
8506 if (frame_reg_rtx != sp_reg_rtx)
8507 rs6000_emit_stack_tie ();
8510 /* Save AltiVec registers if needed. */
8511 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8515 /* There should be a non inline version of this, for when we
8516 are saving lots of vector registers. */
8517 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8518 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8520 rtx areg, savereg, mem;
8523 offset = info->altivec_save_offset + sp_offset
8524 + 16 * (i - info->first_altivec_reg_save);
8526 savereg = gen_rtx_REG (V4SImode, i);
8528 areg = gen_rtx_REG (Pmode, 0);
8529 emit_move_insn (areg, GEN_INT (offset));
8531 /* AltiVec addressing mode is [reg+reg]. */
8532 mem = gen_rtx_MEM (V4SImode,
8533 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8535 set_mem_alias_set (mem, rs6000_sr_alias_set);
8537 insn = emit_move_insn (mem, savereg);
8539 altivec_frame_fixup (insn, areg, offset);
8543 /* VRSAVE is a bit vector representing which AltiVec registers
8544 are used. The OS uses this to determine which vector
8545 registers to save on a context switch. We need to save
8546 VRSAVE on the stack frame, add whatever AltiVec registers we
8547 used in this function, and do the corresponding magic in the
8550 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8552 rtx reg, mem, vrsave;
8555 /* Get VRSAVE onto a GPR. */
8556 reg = gen_rtx_REG (SImode, 12);
8557 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8558 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8561 offset = info->vrsave_save_offset + sp_offset;
8563 = gen_rtx_MEM (SImode,
8564 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8565 set_mem_alias_set (mem, rs6000_sr_alias_set);
8566 insn = emit_move_insn (mem, reg);
8568 /* Include the registers in the mask. */
8569 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8571 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8574 /* If we use the link register, get it into r0. */
8575 if (info->lr_save_p)
8576 emit_move_insn (gen_rtx_REG (Pmode, 0),
8577 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8579 /* If we need to save CR, put it into r12. */
8580 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8582 cr_save_rtx = gen_rtx_REG (SImode, 12);
8583 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8586 /* Do any required saving of fpr's. If only one or two to save, do
8587 it ourselves. Otherwise, call function. */
8588 if (saving_FPRs_inline)
8591 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8592 if ((regs_ever_live[info->first_fp_reg_save+i]
8593 && ! call_used_regs[info->first_fp_reg_save+i]))
8596 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8597 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8598 GEN_INT (info->fp_save_offset
8601 mem = gen_rtx_MEM (DFmode, addr);
8602 set_mem_alias_set (mem, rs6000_sr_alias_set);
8604 insn = emit_move_insn (mem, reg);
8605 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8606 NULL_RTX, NULL_RTX);
8609 else if (info->first_fp_reg_save != 64)
8613 const char *alloc_rname;
8615 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8617 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8619 LINK_REGISTER_REGNUM));
8620 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8621 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8622 alloc_rname = ggc_strdup (rname);
8623 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8624 gen_rtx_SYMBOL_REF (Pmode,
8626 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8629 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8630 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8631 GEN_INT (info->fp_save_offset
8632 + sp_offset + 8*i));
8633 mem = gen_rtx_MEM (DFmode, addr);
8634 set_mem_alias_set (mem, rs6000_sr_alias_set);
8636 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8638 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8639 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8640 NULL_RTX, NULL_RTX);
8643 /* Save GPRs. This is done as a PARALLEL if we are using
8644 the store-multiple instructions. */
8645 if (using_store_multiple)
8649 p = rtvec_alloc (32 - info->first_gp_reg_save);
8650 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8651 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8654 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8655 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8656 GEN_INT (info->gp_save_offset
8659 mem = gen_rtx_MEM (reg_mode, addr);
8660 set_mem_alias_set (mem, rs6000_sr_alias_set);
8662 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8664 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8665 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8666 NULL_RTX, NULL_RTX);
8671 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8672 if ((regs_ever_live[info->first_gp_reg_save+i]
8673 && ! call_used_regs[info->first_gp_reg_save+i])
8674 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8675 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8676 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8679 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8680 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8681 GEN_INT (info->gp_save_offset
8684 mem = gen_rtx_MEM (reg_mode, addr);
8685 set_mem_alias_set (mem, rs6000_sr_alias_set);
8687 insn = emit_move_insn (mem, reg);
8688 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8689 NULL_RTX, NULL_RTX);
8693 /* ??? There's no need to emit actual instructions here, but it's the
8694 easiest way to get the frame unwind information emitted. */
8695 if (current_function_calls_eh_return)
8697 unsigned int i, regno;
8703 regno = EH_RETURN_DATA_REGNO (i);
8704 if (regno == INVALID_REGNUM)
8707 reg = gen_rtx_REG (reg_mode, regno);
8708 addr = plus_constant (frame_reg_rtx,
8709 info->ehrd_offset + sp_offset
8710 + reg_size * (int) i);
8711 mem = gen_rtx_MEM (reg_mode, addr);
8712 set_mem_alias_set (mem, rs6000_sr_alias_set);
8714 insn = emit_move_insn (mem, reg);
8715 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8716 NULL_RTX, NULL_RTX);
8720 /* Save lr if we used it. */
8721 if (info->lr_save_p)
8723 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8724 GEN_INT (info->lr_save_offset + sp_offset));
8725 rtx reg = gen_rtx_REG (Pmode, 0);
8726 rtx mem = gen_rtx_MEM (Pmode, addr);
8727 /* This should not be of rs6000_sr_alias_set, because of
8728 __builtin_return_address. */
8730 insn = emit_move_insn (mem, reg);
8731 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8732 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8735 /* Save CR if we use any that must be preserved. */
8736 if (info->cr_save_p)
8738 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8739 GEN_INT (info->cr_save_offset + sp_offset));
8740 rtx mem = gen_rtx_MEM (SImode, addr);
8742 set_mem_alias_set (mem, rs6000_sr_alias_set);
8744 /* If r12 was used to hold the original sp, copy cr into r0 now
8746 if (REGNO (frame_reg_rtx) == 12)
8748 cr_save_rtx = gen_rtx_REG (SImode, 0);
8749 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8751 insn = emit_move_insn (mem, cr_save_rtx);
8753 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8754 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8755 OK. All we have to do is specify that _one_ condition code
8756 register is saved in this stack slot. The thrower's epilogue
8757 will then restore all the call-saved registers.
8758 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8759 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8760 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8763 /* Update stack and set back pointer unless this is V.4,
8764 for which it was done previously. */
8765 if (info->push_p && DEFAULT_ABI != ABI_V4)
8766 rs6000_emit_allocate_stack (info->total_size, FALSE);
8768 /* Set frame pointer, if needed. */
8769 if (frame_pointer_needed)
8771 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8773 RTX_FRAME_RELATED_P (insn) = 1;
8776 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8777 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8778 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8779 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
8781 /* If emit_load_toc_table will use the link register, we need to save
8782 it. We use R11 for this purpose because emit_load_toc_table
8783 can use register 0. This allows us to use a plain 'blr' to return
8784 from the procedure more often. */
8785 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8786 && ! info->lr_save_p
8787 && EXIT_BLOCK_PTR->pred != NULL);
8788 if (save_LR_around_toc_setup)
8789 emit_move_insn (gen_rtx_REG (Pmode, 11),
8790 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8792 rs6000_emit_load_toc_table (TRUE);
8794 if (save_LR_around_toc_setup)
8795 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8796 gen_rtx_REG (Pmode, 11));
8799 if (DEFAULT_ABI == ABI_DARWIN
8800 && flag_pic && current_function_uses_pic_offset_table)
8802 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8804 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8807 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
8808 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8812 /* Write function prologue. */
8815 rs6000_output_function_prologue (file, size)
8817 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8819 rs6000_stack_t *info = rs6000_stack_info ();
8821 if (TARGET_DEBUG_STACK)
8822 debug_stack_info (info);
8824 /* Write .extern for any function we will call to save and restore
8826 if (info->first_fp_reg_save < 64
8827 && !FP_SAVE_INLINE (info->first_fp_reg_save))
8828 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8829 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8830 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8833 /* Write .extern for AIX common mode routines, if needed. */
8834 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8836 fputs ("\t.extern __mulh\n", file);
8837 fputs ("\t.extern __mull\n", file);
8838 fputs ("\t.extern __divss\n", file);
8839 fputs ("\t.extern __divus\n", file);
8840 fputs ("\t.extern __quoss\n", file);
8841 fputs ("\t.extern __quous\n", file);
8842 common_mode_defined = 1;
8845 if (! HAVE_prologue)
8849 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8850 the "toplevel" insn chain. */
8851 emit_note (0, NOTE_INSN_DELETED);
8852 rs6000_emit_prologue ();
8853 emit_note (0, NOTE_INSN_DELETED);
8855 if (TARGET_DEBUG_STACK)
8856 debug_rtx_list (get_insns (), 100);
8857 final (get_insns (), file, FALSE, FALSE);
8861 rs6000_pic_labelno++;
8864 /* Emit function epilogue as insns.
8866 At present, dwarf2out_frame_debug_expr doesn't understand
8867 register restores, so we don't bother setting RTX_FRAME_RELATED_P
8868 anywhere in the epilogue. Most of the insns below would in any case
8869 need special notes to explain where r11 is in relation to the stack. */
8872 rs6000_emit_epilogue (sibcall)
8875 rs6000_stack_t *info;
8876 int restoring_FPRs_inline;
8877 int using_load_multiple;
8878 int using_mfcr_multiple;
8879 int use_backchain_to_restore_sp;
8881 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
8882 rtx frame_reg_rtx = sp_reg_rtx;
8883 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8884 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8887 info = rs6000_stack_info ();
8888 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8889 && info->first_gp_reg_save < 31);
8890 restoring_FPRs_inline = (sibcall
8891 || current_function_calls_eh_return
8892 || info->first_fp_reg_save == 64
8893 || FP_SAVE_INLINE (info->first_fp_reg_save));
8894 use_backchain_to_restore_sp = (frame_pointer_needed
8895 || current_function_calls_alloca
8896 || info->total_size > 32767);
8897 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
8898 || rs6000_cpu == PROCESSOR_PPC603
8899 || rs6000_cpu == PROCESSOR_PPC750
8902 /* If we have a frame pointer, a call to alloca, or a large stack
8903 frame, restore the old stack pointer using the backchain. Otherwise,
8904 we know what size to update it with. */
8905 if (use_backchain_to_restore_sp)
8907 /* Under V.4, don't reset the stack pointer until after we're done
8908 loading the saved registers. */
8909 if (DEFAULT_ABI == ABI_V4)
8910 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
8912 emit_move_insn (frame_reg_rtx,
8913 gen_rtx_MEM (Pmode, sp_reg_rtx));
8916 else if (info->push_p)
8918 if (DEFAULT_ABI == ABI_V4)
8919 sp_offset = info->total_size;
8922 emit_insn (TARGET_32BIT
8923 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8924 GEN_INT (info->total_size))
8925 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8926 GEN_INT (info->total_size)));
8930 /* Restore AltiVec registers if needed. */
8931 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8935 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8936 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8938 rtx addr, areg, mem;
8940 areg = gen_rtx_REG (Pmode, 0);
8942 (areg, GEN_INT (info->altivec_save_offset
8944 + 16 * (i - info->first_altivec_reg_save)));
8946 /* AltiVec addressing mode is [reg+reg]. */
8947 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
8948 mem = gen_rtx_MEM (V4SImode, addr);
8949 set_mem_alias_set (mem, rs6000_sr_alias_set);
8951 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
8955 /* Restore VRSAVE if needed. */
8956 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
8960 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8961 GEN_INT (info->vrsave_save_offset + sp_offset));
8962 mem = gen_rtx_MEM (SImode, addr);
8963 set_mem_alias_set (mem, rs6000_sr_alias_set);
8964 reg = gen_rtx_REG (SImode, 12);
8965 emit_move_insn (reg, mem);
8967 emit_insn (generate_set_vrsave (reg, info, 1));
8970 /* Get the old lr if we saved it. */
8971 if (info->lr_save_p)
8973 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8974 GEN_INT (info->lr_save_offset + sp_offset));
8975 rtx mem = gen_rtx_MEM (Pmode, addr);
8977 set_mem_alias_set (mem, rs6000_sr_alias_set);
8979 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
8982 /* Get the old cr if we saved it. */
8983 if (info->cr_save_p)
8985 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8986 GEN_INT (info->cr_save_offset + sp_offset));
8987 rtx mem = gen_rtx_MEM (SImode, addr);
8989 set_mem_alias_set (mem, rs6000_sr_alias_set);
8991 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
8994 /* Set LR here to try to overlap restores below. */
8995 if (info->lr_save_p)
8996 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8997 gen_rtx_REG (Pmode, 0));
8999 /* Load exception handler data registers, if needed. */
9000 if (current_function_calls_eh_return)
9002 unsigned int i, regno;
9008 regno = EH_RETURN_DATA_REGNO (i);
9009 if (regno == INVALID_REGNUM)
9012 addr = plus_constant (frame_reg_rtx,
9013 info->ehrd_offset + sp_offset
9014 + reg_size * (int) i);
9015 mem = gen_rtx_MEM (reg_mode, addr);
9016 set_mem_alias_set (mem, rs6000_sr_alias_set);
9018 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9022 /* Restore GPRs. This is done as a PARALLEL if we are using
9023 the load-multiple instructions. */
9024 if (using_load_multiple)
9027 p = rtvec_alloc (32 - info->first_gp_reg_save);
9028 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9030 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9031 GEN_INT (info->gp_save_offset
9034 rtx mem = gen_rtx_MEM (reg_mode, addr);
9036 set_mem_alias_set (mem, rs6000_sr_alias_set);
9039 gen_rtx_SET (VOIDmode,
9040 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9043 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9046 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9047 if ((regs_ever_live[info->first_gp_reg_save+i]
9048 && ! call_used_regs[info->first_gp_reg_save+i])
9049 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
9050 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9051 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9053 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9054 GEN_INT (info->gp_save_offset
9057 rtx mem = gen_rtx_MEM (reg_mode, addr);
9059 set_mem_alias_set (mem, rs6000_sr_alias_set);
9061 emit_move_insn (gen_rtx_REG (reg_mode,
9062 info->first_gp_reg_save + i),
9066 /* Restore fpr's if we need to do it without calling a function. */
9067 if (restoring_FPRs_inline)
9068 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9069 if ((regs_ever_live[info->first_fp_reg_save+i]
9070 && ! call_used_regs[info->first_fp_reg_save+i]))
9073 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9074 GEN_INT (info->fp_save_offset
9077 mem = gen_rtx_MEM (DFmode, addr);
9078 set_mem_alias_set (mem, rs6000_sr_alias_set);
9080 emit_move_insn (gen_rtx_REG (DFmode,
9081 info->first_fp_reg_save + i),
9085 /* If we saved cr, restore it here. Just those that were used. */
9086 if (info->cr_save_p)
9088 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9091 if (using_mfcr_multiple)
9093 for (i = 0; i < 8; i++)
9094 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9100 if (using_mfcr_multiple && count > 1)
9105 p = rtvec_alloc (count);
9108 for (i = 0; i < 8; i++)
9109 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9111 rtvec r = rtvec_alloc (2);
9112 RTVEC_ELT (r, 0) = r12_rtx;
9113 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9114 RTVEC_ELT (p, ndx) =
9115 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9116 gen_rtx_UNSPEC (CCmode, r, 20));
9119 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9124 for (i = 0; i < 8; i++)
9125 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9127 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9133 /* If this is V.4, unwind the stack pointer after all of the loads
9134 have been done. We need to emit a block here so that sched
9135 doesn't decide to move the sp change before the register restores
9136 (which may not have any obvious dependency on the stack). This
9137 doesn't hurt performance, because there is no scheduling that can
9138 be done after this point. */
9139 if (DEFAULT_ABI == ABI_V4)
9141 if (frame_reg_rtx != sp_reg_rtx)
9142 rs6000_emit_stack_tie ();
9144 if (use_backchain_to_restore_sp)
9146 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9148 else if (sp_offset != 0)
9150 emit_insn (Pmode == SImode
9151 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9152 GEN_INT (sp_offset))
9153 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9154 GEN_INT (sp_offset)));
9158 if (current_function_calls_eh_return)
9160 rtx sa = EH_RETURN_STACKADJ_RTX;
9161 emit_insn (Pmode == SImode
9162 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9163 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9169 if (! restoring_FPRs_inline)
9170 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9172 p = rtvec_alloc (2);
9174 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9175 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9177 LINK_REGISTER_REGNUM));
9179 /* If we have to restore more than two FP registers, branch to the
9180 restore function. It will return to our caller. */
9181 if (! restoring_FPRs_inline)
9185 const char *alloc_rname;
9187 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9188 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9189 alloc_rname = ggc_strdup (rname);
9190 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9191 gen_rtx_SYMBOL_REF (Pmode,
9194 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9197 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9198 GEN_INT (info->fp_save_offset + 8*i));
9199 mem = gen_rtx_MEM (DFmode, addr);
9200 set_mem_alias_set (mem, rs6000_sr_alias_set);
9202 RTVEC_ELT (p, i+3) =
9203 gen_rtx_SET (VOIDmode,
9204 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9209 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9213 /* Write function epilogue. */
9216 rs6000_output_function_epilogue (file, size)
9218 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9220 rs6000_stack_t *info = rs6000_stack_info ();
9221 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9223 if (! HAVE_epilogue)
9225 rtx insn = get_last_insn ();
9226 /* If the last insn was a BARRIER, we don't have to write anything except
9228 if (GET_CODE (insn) == NOTE)
9229 insn = prev_nonnote_insn (insn);
9230 if (insn == 0 || GET_CODE (insn) != BARRIER)
9232 /* This is slightly ugly, but at least we don't have two
9233 copies of the epilogue-emitting code. */
9236 /* A NOTE_INSN_DELETED is supposed to be at the start
9237 and end of the "toplevel" insn chain. */
9238 emit_note (0, NOTE_INSN_DELETED);
9239 rs6000_emit_epilogue (FALSE);
9240 emit_note (0, NOTE_INSN_DELETED);
9242 if (TARGET_DEBUG_STACK)
9243 debug_rtx_list (get_insns (), 100);
9244 final (get_insns (), file, FALSE, FALSE);
9249 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9252 We don't output a traceback table if -finhibit-size-directive was
9253 used. The documentation for -finhibit-size-directive reads
9254 ``don't output a @code{.size} assembler directive, or anything
9255 else that would cause trouble if the function is split in the
9256 middle, and the two halves are placed at locations far apart in
9257 memory.'' The traceback table has this property, since it
9258 includes the offset from the start of the function to the
9259 traceback table itself.
9261 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9262 different traceback table. */
9263 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9265 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9266 const char *language_string = lang_hooks.name;
9267 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9270 while (*fname == '.') /* V.4 encodes . in the name */
9273 /* Need label immediately before tbtab, so we can compute its offset
9274 from the function start. */
9277 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9278 ASM_OUTPUT_LABEL (file, fname);
9280 /* The .tbtab pseudo-op can only be used for the first eight
9281 expressions, since it can't handle the possibly variable
9282 length fields that follow. However, if you omit the optional
9283 fields, the assembler outputs zeros for all optional fields
9284 anyways, giving each variable length field is minimum length
9285 (as defined in sys/debug.h). Thus we can not use the .tbtab
9286 pseudo-op at all. */
9288 /* An all-zero word flags the start of the tbtab, for debuggers
9289 that have to find it by searching forward from the entry
9290 point or from the current pc. */
9291 fputs ("\t.long 0\n", file);
9293 /* Tbtab format type. Use format type 0. */
9294 fputs ("\t.byte 0,", file);
9296 /* Language type. Unfortunately, there doesn't seem to be any
9297 official way to get this info, so we use language_string. C
9298 is 0. C++ is 9. No number defined for Obj-C, so use the
9299 value for C for now. There is no official value for Java,
9300 although IBM appears to be using 13. There is no official value
9301 for Chill, so we've chosen 44 pseudo-randomly. */
9302 if (! strcmp (language_string, "GNU C")
9303 || ! strcmp (language_string, "GNU Objective-C"))
9305 else if (! strcmp (language_string, "GNU F77"))
9307 else if (! strcmp (language_string, "GNU Ada"))
9309 else if (! strcmp (language_string, "GNU Pascal"))
9311 else if (! strcmp (language_string, "GNU C++"))
9313 else if (! strcmp (language_string, "GNU Java"))
9315 else if (! strcmp (language_string, "GNU CHILL"))
9319 fprintf (file, "%d,", i);
9321 /* 8 single bit fields: global linkage (not set for C extern linkage,
9322 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9323 from start of procedure stored in tbtab, internal function, function
9324 has controlled storage, function has no toc, function uses fp,
9325 function logs/aborts fp operations. */
9326 /* Assume that fp operations are used if any fp reg must be saved. */
9327 fprintf (file, "%d,",
9328 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9330 /* 6 bitfields: function is interrupt handler, name present in
9331 proc table, function calls alloca, on condition directives
9332 (controls stack walks, 3 bits), saves condition reg, saves
9334 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9335 set up as a frame pointer, even when there is no alloca call. */
9336 fprintf (file, "%d,",
9337 ((optional_tbtab << 6)
9338 | ((optional_tbtab & frame_pointer_needed) << 5)
9339 | (info->cr_save_p << 1)
9340 | (info->lr_save_p)));
9342 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9344 fprintf (file, "%d,",
9345 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9347 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9348 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9352 /* Compute the parameter info from the function decl argument
9355 int next_parm_info_bit = 31;
9357 for (decl = DECL_ARGUMENTS (current_function_decl);
9358 decl; decl = TREE_CHAIN (decl))
9360 rtx parameter = DECL_INCOMING_RTL (decl);
9361 enum machine_mode mode = GET_MODE (parameter);
9363 if (GET_CODE (parameter) == REG)
9365 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9373 else if (mode == DFmode)
9378 /* If only one bit will fit, don't or in this entry. */
9379 if (next_parm_info_bit > 0)
9380 parm_info |= (bits << (next_parm_info_bit - 1));
9381 next_parm_info_bit -= 2;
9385 fixed_parms += ((GET_MODE_SIZE (mode)
9386 + (UNITS_PER_WORD - 1))
9388 next_parm_info_bit -= 1;
9394 /* Number of fixed point parameters. */
9395 /* This is actually the number of words of fixed point parameters; thus
9396 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9397 fprintf (file, "%d,", fixed_parms);
9399 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9401 /* This is actually the number of fp registers that hold parameters;
9402 and thus the maximum value is 13. */
9403 /* Set parameters on stack bit if parameters are not in their original
9404 registers, regardless of whether they are on the stack? Xlc
9405 seems to set the bit when not optimizing. */
9406 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9408 if (! optional_tbtab)
9411 /* Optional fields follow. Some are variable length. */
9413 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9415 /* There is an entry for each parameter in a register, in the order that
9416 they occur in the parameter list. Any intervening arguments on the
9417 stack are ignored. If the list overflows a long (max possible length
9418 34 bits) then completely leave off all elements that don't fit. */
9419 /* Only emit this long if there was at least one parameter. */
9420 if (fixed_parms || float_parms)
9421 fprintf (file, "\t.long %d\n", parm_info);
9423 /* Offset from start of code to tb table. */
9424 fputs ("\t.long ", file);
9425 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9427 RS6000_OUTPUT_BASENAME (file, fname);
9429 assemble_name (file, fname);
9433 RS6000_OUTPUT_BASENAME (file, fname);
9435 assemble_name (file, fname);
9439 /* Interrupt handler mask. */
9440 /* Omit this long, since we never set the interrupt handler bit
9443 /* Number of CTL (controlled storage) anchors. */
9444 /* Omit this long, since the has_ctl bit is never set above. */
9446 /* Displacement into stack of each CTL anchor. */
9447 /* Omit this list of longs, because there are no CTL anchors. */
9449 /* Length of function name. */
9450 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9452 /* Function name. */
9453 assemble_string (fname, strlen (fname));
9455 /* Register for alloca automatic storage; this is always reg 31.
9456 Only emit this if the alloca bit was set above. */
9457 if (frame_pointer_needed)
9458 fputs ("\t.byte 31\n", file);
9463 /* A C compound statement that outputs the assembler code for a thunk
9464 function, used to implement C++ virtual function calls with
9465 multiple inheritance. The thunk acts as a wrapper around a virtual
9466 function, adjusting the implicit object parameter before handing
9467 control off to the real function.
9469 First, emit code to add the integer DELTA to the location that
9470 contains the incoming first argument. Assume that this argument
9471 contains a pointer, and is the one used to pass the `this' pointer
9472 in C++. This is the incoming argument *before* the function
9473 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9474 values of all other incoming arguments.
9476 After the addition, emit code to jump to FUNCTION, which is a
9477 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9478 not touch the return address. Hence returning from FUNCTION will
9479 return to whoever called the current `thunk'.
9481 The effect must be as if FUNCTION had been called directly with the
9482 adjusted first argument. This macro is responsible for emitting
9483 all of the code for a thunk function; output_function_prologue()
9484 and output_function_epilogue() are not invoked.
9486 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9487 been extracted from it.) It might possibly be useful on some
9488 targets, but probably not.
9490 If you do not define this macro, the target-independent code in the
9491 C++ frontend will generate a less efficient heavyweight thunk that
9492 calls FUNCTION instead of jumping to it. The generic approach does
9493 not support varargs. */
9496 output_mi_thunk (file, thunk_fndecl, delta, function)
9498 tree thunk_fndecl ATTRIBUTE_UNUSED;
9502 const char *this_reg =
9503 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9506 const char *r0 = reg_names[0];
9507 const char *toc = reg_names[2];
9508 const char *schain = reg_names[11];
9509 const char *r12 = reg_names[12];
9511 static int labelno = 0;
9513 /* Small constants that can be done by one add instruction. */
9514 if (delta >= -32768 && delta <= 32767)
9516 if (! TARGET_NEW_MNEMONICS)
9517 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9519 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9522 /* Large constants that can be done by one addis instruction. */
9523 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9524 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9527 /* 32-bit constants that can be done by an add and addis instruction. */
9528 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9530 /* Break into two pieces, propagating the sign bit from the low
9531 word to the upper word. */
9532 int delta_high = delta >> 16;
9533 int delta_low = delta & 0xffff;
9534 if ((delta_low & 0x8000) != 0)
9537 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9540 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9543 if (! TARGET_NEW_MNEMONICS)
9544 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9546 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9549 /* 64-bit constants, fixme */
9553 /* Get the prefix in front of the names. */
9554 switch (DEFAULT_ABI)
9564 case ABI_AIX_NODESC:
9569 /* If the function is compiled in this module, jump to it directly.
9570 Otherwise, load up its address and jump to it. */
9572 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9574 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9575 && ! lookup_attribute ("longcall",
9576 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9578 fprintf (file, "\tb %s", prefix);
9579 assemble_name (file, fname);
9580 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9586 switch (DEFAULT_ABI)
9592 /* Set up a TOC entry for the function. */
9593 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9595 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9598 if (TARGET_MINIMAL_TOC)
9599 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9602 fputs ("\t.tc ", file);
9603 assemble_name (file, fname);
9604 fputs ("[TC],", file);
9606 assemble_name (file, fname);
9609 if (TARGET_MINIMAL_TOC)
9610 asm_fprintf (file, (TARGET_32BIT)
9611 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9612 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9613 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9614 assemble_name (file, buf);
9615 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9616 fputs ("-(.LCTOC1)", file);
9617 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9619 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9623 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9626 asm_fprintf (file, "\tmtctr %s\n", r0);
9628 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9631 asm_fprintf (file, "\tbctr\n");
9634 case ABI_AIX_NODESC:
9636 fprintf (file, "\tb %s", prefix);
9637 assemble_name (file, fname);
9638 if (flag_pic) fputs ("@plt", file);
9644 fprintf (file, "\tb %s", prefix);
9645 if (flag_pic && !machopic_name_defined_p (fname))
9646 assemble_name (file, machopic_stub_name (fname));
9648 assemble_name (file, fname);
9657 /* A quick summary of the various types of 'constant-pool tables'
9660 Target Flags Name One table per
9661 AIX (none) AIX TOC object file
9662 AIX -mfull-toc AIX TOC object file
9663 AIX -mminimal-toc AIX minimal TOC translation unit
9664 SVR4/EABI (none) SVR4 SDATA object file
9665 SVR4/EABI -fpic SVR4 pic object file
9666 SVR4/EABI -fPIC SVR4 PIC translation unit
9667 SVR4/EABI -mrelocatable EABI TOC function
9668 SVR4/EABI -maix AIX TOC object file
9669 SVR4/EABI -maix -mminimal-toc
9670 AIX minimal TOC translation unit
9672 Name Reg. Set by entries contains:
9673 made by addrs? fp? sum?
9675 AIX TOC 2 crt0 as Y option option
9676 AIX minimal TOC 30 prolog gcc Y Y option
9677 SVR4 SDATA 13 crt0 gcc N Y N
9678 SVR4 pic 30 prolog ld Y not yet N
9679 SVR4 PIC 30 prolog gcc Y option option
9680 EABI TOC 30 prolog gcc Y option option
9684 /* Hash table stuff for keeping track of TOC entries. */
9686 struct toc_hash_struct
9688 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9689 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9691 enum machine_mode key_mode;
9695 static htab_t toc_hash_table;
9697 /* Hash functions for the hash table. */
9700 rs6000_hash_constant (k)
9703 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9704 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9705 int flen = strlen (format);
9708 if (GET_CODE (k) == LABEL_REF)
9709 return result * 1231 + X0INT (XEXP (k, 0), 3);
9711 if (GET_CODE (k) == CONST_DOUBLE)
9713 else if (GET_CODE (k) == CODE_LABEL)
9718 for (; fidx < flen; fidx++)
9719 switch (format[fidx])
9724 const char *str = XSTR (k, fidx);
9726 result = result * 613 + len;
9727 for (i = 0; i < len; i++)
9728 result = result * 613 + (unsigned) str[i];
9733 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9737 result = result * 613 + (unsigned) XINT (k, fidx);
9740 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9741 result = result * 613 + (unsigned) XWINT (k, fidx);
9745 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9746 result = result * 613 + (unsigned) (XWINT (k, fidx)
9757 toc_hash_function (hash_entry)
9758 const void * hash_entry;
9760 const struct toc_hash_struct *thc =
9761 (const struct toc_hash_struct *) hash_entry;
9762 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9765 /* Compare H1 and H2 for equivalence. */
9768 toc_hash_eq (h1, h2)
9772 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9773 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9775 if (((const struct toc_hash_struct *) h1)->key_mode
9776 != ((const struct toc_hash_struct *) h2)->key_mode)
9779 /* Gotcha: One of these const_doubles will be in memory.
9780 The other may be on the constant-pool chain.
9781 So rtx_equal_p will think they are different... */
9784 if (GET_CODE (r1) != GET_CODE (r2)
9785 || GET_MODE (r1) != GET_MODE (r2))
9787 if (GET_CODE (r1) == CONST_DOUBLE)
9789 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9791 for (i = 1; i < format_len; i++)
9792 if (XWINT (r1, i) != XWINT (r2, i))
9797 else if (GET_CODE (r1) == LABEL_REF)
9798 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9799 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9801 return rtx_equal_p (r1, r2);
9804 /* Mark the hash table-entry HASH_ENTRY. */
9807 toc_hash_mark_entry (hash_slot, unused)
9809 void * unused ATTRIBUTE_UNUSED;
9811 const struct toc_hash_struct * hash_entry =
9812 *(const struct toc_hash_struct **) hash_slot;
9813 rtx r = hash_entry->key;
9814 ggc_set_mark (hash_entry);
9815 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
9816 if (GET_CODE (r) == LABEL_REF)
9819 ggc_set_mark (XEXP (r, 0));
9826 /* Mark all the elements of the TOC hash-table *HT. */
9829 toc_hash_mark_table (vht)
9834 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9837 /* These are the names given by the C++ front-end to vtables, and
9838 vtable-like objects. Ideally, this logic should not be here;
9839 instead, there should be some programmatic way of inquiring as
9840 to whether or not an object is a vtable. */
9842 #define VTABLE_NAME_P(NAME) \
9843 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
9844 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
9845 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
9846 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9849 rs6000_output_symbol_ref (file, x)
9853 /* Currently C++ toc references to vtables can be emitted before it
9854 is decided whether the vtable is public or private. If this is
9855 the case, then the linker will eventually complain that there is
9856 a reference to an unknown section. Thus, for vtables only,
9857 we emit the TOC reference to reference the symbol and not the
9859 const char *name = XSTR (x, 0);
9861 if (VTABLE_NAME_P (name))
9863 RS6000_OUTPUT_BASENAME (file, name);
9866 assemble_name (file, name);
9869 /* Output a TOC entry. We derive the entry name from what is being
9873 output_toc (file, x, labelno, mode)
9877 enum machine_mode mode;
9880 const char *name = buf;
9881 const char *real_name;
9888 /* When the linker won't eliminate them, don't output duplicate
9889 TOC entries (this happens on AIX if there is any kind of TOC,
9890 and on SVR4 under -fPIC or -mrelocatable). */
9893 struct toc_hash_struct *h;
9896 h = ggc_alloc (sizeof (*h));
9899 h->labelno = labelno;
9901 found = htab_find_slot (toc_hash_table, h, 1);
9904 else /* This is indeed a duplicate.
9905 Set this label equal to that label. */
9907 fputs ("\t.set ", file);
9908 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9909 fprintf (file, "%d,", labelno);
9910 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9911 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9917 /* If we're going to put a double constant in the TOC, make sure it's
9918 aligned properly when strict alignment is on. */
9919 if (GET_CODE (x) == CONST_DOUBLE
9921 && GET_MODE_BITSIZE (mode) >= 64
9922 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
9923 ASM_OUTPUT_ALIGN (file, 3);
9926 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
9928 /* Handle FP constants specially. Note that if we have a minimal
9929 TOC, things we put here aren't actually in the TOC, so we can allow
9931 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
9936 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9937 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
9941 if (TARGET_MINIMAL_TOC)
9942 fputs (DOUBLE_INT_ASM_OP, file);
9944 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9945 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
9950 if (TARGET_MINIMAL_TOC)
9951 fputs ("\t.long ", file);
9953 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9954 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
9958 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
9963 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9964 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
9968 if (TARGET_MINIMAL_TOC)
9969 fputs (DOUBLE_INT_ASM_OP, file);
9971 fprintf (file, "\t.tc FS_%lx[TC],", l);
9972 fprintf (file, "0x%lx00000000\n", l);
9977 if (TARGET_MINIMAL_TOC)
9978 fputs ("\t.long ", file);
9980 fprintf (file, "\t.tc FS_%lx[TC],", l);
9981 fprintf (file, "0x%lx\n", l);
9985 else if (GET_MODE (x) == VOIDmode
9986 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
9988 unsigned HOST_WIDE_INT low;
9991 if (GET_CODE (x) == CONST_DOUBLE)
9993 low = CONST_DOUBLE_LOW (x);
9994 high = CONST_DOUBLE_HIGH (x);
9997 #if HOST_BITS_PER_WIDE_INT == 32
10000 high = (low & 0x80000000) ? ~0 : 0;
10004 low = INTVAL (x) & 0xffffffff;
10005 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10009 /* TOC entries are always Pmode-sized, but since this
10010 is a bigendian machine then if we're putting smaller
10011 integer constants in the TOC we have to pad them.
10012 (This is still a win over putting the constants in
10013 a separate constant pool, because then we'd have
10014 to have both a TOC entry _and_ the actual constant.)
10016 For a 32-bit target, CONST_INT values are loaded and shifted
10017 entirely within `low' and can be stored in one TOC entry. */
10019 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10020 abort ();/* It would be easy to make this work, but it doesn't now. */
10022 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10023 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10024 POINTER_SIZE, &low, &high, 0);
10028 if (TARGET_MINIMAL_TOC)
10029 fputs (DOUBLE_INT_ASM_OP, file);
10031 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long)high, (long)low);
10032 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10037 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10039 if (TARGET_MINIMAL_TOC)
10040 fputs ("\t.long ", file);
10042 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10043 (long)high, (long)low);
10044 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10048 if (TARGET_MINIMAL_TOC)
10049 fputs ("\t.long ", file);
10051 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10052 fprintf (file, "0x%lx\n", (long) low);
10058 if (GET_CODE (x) == CONST)
10060 if (GET_CODE (XEXP (x, 0)) != PLUS)
10063 base = XEXP (XEXP (x, 0), 0);
10064 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10067 if (GET_CODE (base) == SYMBOL_REF)
10068 name = XSTR (base, 0);
10069 else if (GET_CODE (base) == LABEL_REF)
10070 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10071 else if (GET_CODE (base) == CODE_LABEL)
10072 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10076 STRIP_NAME_ENCODING (real_name, name);
10077 if (TARGET_MINIMAL_TOC)
10078 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10081 fprintf (file, "\t.tc %s", real_name);
10084 fprintf (file, ".N%d", - offset);
10086 fprintf (file, ".P%d", offset);
10088 fputs ("[TC],", file);
10091 /* Currently C++ toc references to vtables can be emitted before it
10092 is decided whether the vtable is public or private. If this is
10093 the case, then the linker will eventually complain that there is
10094 a TOC reference to an unknown section. Thus, for vtables only,
10095 we emit the TOC reference to reference the symbol and not the
10097 if (VTABLE_NAME_P (name))
10099 RS6000_OUTPUT_BASENAME (file, name);
10101 fprintf (file, "%d", offset);
10102 else if (offset > 0)
10103 fprintf (file, "+%d", offset);
10106 output_addr_const (file, x);
10110 /* Output an assembler pseudo-op to write an ASCII string of N characters
10111 starting at P to FILE.
10113 On the RS/6000, we have to do this using the .byte operation and
10114 write out special characters outside the quoted string.
10115 Also, the assembler is broken; very long strings are truncated,
10116 so we must artificially break them up early. */
10119 output_ascii (file, p, n)
10125 int i, count_string;
10126 const char *for_string = "\t.byte \"";
10127 const char *for_decimal = "\t.byte ";
10128 const char *to_close = NULL;
10131 for (i = 0; i < n; i++)
10134 if (c >= ' ' && c < 0177)
10137 fputs (for_string, file);
10140 /* Write two quotes to get one. */
10148 for_decimal = "\"\n\t.byte ";
10152 if (count_string >= 512)
10154 fputs (to_close, file);
10156 for_string = "\t.byte \"";
10157 for_decimal = "\t.byte ";
10165 fputs (for_decimal, file);
10166 fprintf (file, "%d", c);
10168 for_string = "\n\t.byte \"";
10169 for_decimal = ", ";
10175 /* Now close the string if we have written one. Then end the line. */
10177 fputs (to_close, file);
10180 /* Generate a unique section name for FILENAME for a section type
10181 represented by SECTION_DESC. Output goes into BUF.
10183 SECTION_DESC can be any string, as long as it is different for each
10184 possible section type.
10186 We name the section in the same manner as xlc. The name begins with an
10187 underscore followed by the filename (after stripping any leading directory
10188 names) with the last period replaced by the string SECTION_DESC. If
10189 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10193 rs6000_gen_section_name (buf, filename, section_desc)
10195 const char *filename;
10196 const char *section_desc;
10198 const char *q, *after_last_slash, *last_period = 0;
10202 after_last_slash = filename;
10203 for (q = filename; *q; q++)
10206 after_last_slash = q + 1;
10207 else if (*q == '.')
10211 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10212 *buf = (char *) permalloc (len);
10217 for (q = after_last_slash; *q; q++)
10219 if (q == last_period)
10221 strcpy (p, section_desc);
10222 p += strlen (section_desc);
10225 else if (ISALNUM (*q))
10229 if (last_period == 0)
10230 strcpy (p, section_desc);
10235 /* Emit profile function. */
10238 output_profile_hook (labelno)
10241 if (DEFAULT_ABI == ABI_AIX)
10244 const char *label_name;
10249 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10250 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10251 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10253 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10256 else if (DEFAULT_ABI == ABI_DARWIN)
10258 const char *mcount_name = RS6000_MCOUNT;
10259 int caller_addr_regno = LINK_REGISTER_REGNUM;
10261 /* Be conservative and always set this, at least for now. */
10262 current_function_uses_pic_offset_table = 1;
10265 /* For PIC code, set up a stub and collect the caller's address
10266 from r0, which is where the prologue puts it. */
10269 mcount_name = machopic_stub_name (mcount_name);
10270 if (current_function_uses_pic_offset_table)
10271 caller_addr_regno = 0;
10274 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10276 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10280 /* Write function profiler code. */
10283 output_function_profiler (file, labelno)
10289 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10290 switch (DEFAULT_ABI)
10296 case ABI_AIX_NODESC:
10297 fprintf (file, "\tmflr %s\n", reg_names[0]);
10300 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10301 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10302 reg_names[0], reg_names[1]);
10303 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10304 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10305 assemble_name (file, buf);
10306 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10308 else if (flag_pic > 1)
10310 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10311 reg_names[0], reg_names[1]);
10312 /* Now, we need to get the address of the label. */
10313 fputs ("\tbl 1f\n\t.long ", file);
10314 assemble_name (file, buf);
10315 fputs ("-.\n1:", file);
10316 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10317 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10318 reg_names[0], reg_names[11]);
10319 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10320 reg_names[0], reg_names[0], reg_names[11]);
10324 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10325 assemble_name (file, buf);
10326 fputs ("@ha\n", file);
10327 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10328 reg_names[0], reg_names[1]);
10329 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10330 assemble_name (file, buf);
10331 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10334 if (current_function_needs_context)
10335 asm_fprintf (file, "\tmr %s,%s\n",
10336 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10337 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10338 if (current_function_needs_context)
10339 asm_fprintf (file, "\tmr %s,%s\n",
10340 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10345 /* Don't do anything, done in output_profile_hook (). */
10351 /* Adjust the cost of a scheduling dependency. Return the new cost of
10352 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10355 rs6000_adjust_cost (insn, link, dep_insn, cost)
10358 rtx dep_insn ATTRIBUTE_UNUSED;
10361 if (! recog_memoized (insn))
10364 if (REG_NOTE_KIND (link) != 0)
10367 if (REG_NOTE_KIND (link) == 0)
10369 /* Data dependency; DEP_INSN writes a register that INSN reads
10370 some cycles later. */
10371 switch (get_attr_type (insn))
10374 /* Tell the first scheduling pass about the latency between
10375 a mtctr and bctr (and mtlr and br/blr). The first
10376 scheduling pass will not know about this latency since
10377 the mtctr instruction, which has the latency associated
10378 to it, will be generated by reload. */
10379 return TARGET_POWER ? 5 : 4;
10381 /* Leave some extra cycles between a compare and its
10382 dependent branch, to inhibit expensive mispredicts. */
10383 if ((rs6000_cpu_attr == CPU_PPC750
10384 || rs6000_cpu_attr == CPU_PPC7400
10385 || rs6000_cpu_attr == CPU_PPC7450)
10386 && recog_memoized (dep_insn)
10387 && (INSN_CODE (dep_insn) >= 0)
10388 && (get_attr_type (dep_insn) == TYPE_COMPARE
10389 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10390 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10391 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10396 /* Fall out to return default cost. */
10402 /* A C statement (sans semicolon) to update the integer scheduling
10403 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10404 INSN earlier, increase the priority to execute INSN later. Do not
10405 define this macro if you do not need to adjust the scheduling
10406 priorities of insns. */
10409 rs6000_adjust_priority (insn, priority)
10410 rtx insn ATTRIBUTE_UNUSED;
10413 /* On machines (like the 750) which have asymmetric integer units,
10414 where one integer unit can do multiply and divides and the other
10415 can't, reduce the priority of multiply/divide so it is scheduled
10416 before other integer operations. */
10419 if (! INSN_P (insn))
10422 if (GET_CODE (PATTERN (insn)) == USE)
10425 switch (rs6000_cpu_attr) {
10427 switch (get_attr_type (insn))
10434 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10435 priority, priority);
10436 if (priority >= 0 && priority < 0x01000000)
10446 /* Return how many instructions the machine can issue per cycle. */
10449 rs6000_issue_rate ()
10451 switch (rs6000_cpu_attr) {
10452 case CPU_RIOS1: /* ? */
10454 case CPU_PPC601: /* ? */
10473 /* Length in units of the trampoline for entering a nested function. */
10476 rs6000_trampoline_size ()
10480 switch (DEFAULT_ABI)
10486 ret = (TARGET_32BIT) ? 12 : 24;
10491 case ABI_AIX_NODESC:
10492 ret = (TARGET_32BIT) ? 40 : 48;
10499 /* Emit RTL insns to initialize the variable parts of a trampoline.
10500 FNADDR is an RTX for the address of the function's pure code.
10501 CXT is an RTX for the static chain value for the function. */
10504 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10509 enum machine_mode pmode = Pmode;
10510 int regsize = (TARGET_32BIT) ? 4 : 8;
10511 rtx ctx_reg = force_reg (pmode, cxt);
10513 switch (DEFAULT_ABI)
10518 /* Macros to shorten the code expansions below. */
10519 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10520 #define MEM_PLUS(addr,offset) \
10521 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10523 /* Under AIX, just build the 3 word function descriptor */
10526 rtx fn_reg = gen_reg_rtx (pmode);
10527 rtx toc_reg = gen_reg_rtx (pmode);
10528 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10529 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10530 emit_move_insn (MEM_DEREF (addr), fn_reg);
10531 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10532 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10536 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10539 case ABI_AIX_NODESC:
10540 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10541 FALSE, VOIDmode, 4,
10543 GEN_INT (rs6000_trampoline_size ()), SImode,
10553 /* Table of valid machine attributes. */
10555 const struct attribute_spec rs6000_attribute_table[] =
10557 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10558 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10559 { NULL, 0, 0, false, false, false, NULL }
10562 /* Handle a "longcall" attribute; arguments as in struct
10563 attribute_spec.handler. */
10566 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10569 tree args ATTRIBUTE_UNUSED;
10570 int flags ATTRIBUTE_UNUSED;
10571 bool *no_add_attrs;
10573 if (TREE_CODE (*node) != FUNCTION_TYPE
10574 && TREE_CODE (*node) != FIELD_DECL
10575 && TREE_CODE (*node) != TYPE_DECL)
10577 warning ("`%s' attribute only applies to functions",
10578 IDENTIFIER_POINTER (name));
10579 *no_add_attrs = true;
10585 /* Return a reference suitable for calling a function with the
10586 longcall attribute. */
10589 rs6000_longcall_ref (call_ref)
10592 const char *call_name;
10595 if (GET_CODE (call_ref) != SYMBOL_REF)
10598 /* System V adds '.' to the internal name, so skip them. */
10599 call_name = XSTR (call_ref, 0);
10600 if (*call_name == '.')
10602 while (*call_name == '.')
10605 node = get_identifier (call_name);
10606 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10609 return force_reg (Pmode, call_ref);
10613 /* A C statement or statements to switch to the appropriate section
10614 for output of RTX in mode MODE. You can assume that RTX is some
10615 kind of constant in RTL. The argument MODE is redundant except in
10616 the case of a `const_int' rtx. Select the section by calling
10617 `text_section' or one of the alternatives for other sections.
10619 Do not define this macro if you put all constants in the read-only
10622 #ifdef USING_ELFOS_H
10625 rs6000_select_rtx_section (mode, x)
10626 enum machine_mode mode;
10629 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10635 /* A C statement or statements to switch to the appropriate
10636 section for output of DECL. DECL is either a `VAR_DECL' node
10637 or a constant of some sort. RELOC indicates whether forming
10638 the initial value of DECL requires link-time relocations. */
10641 rs6000_select_section (decl, reloc)
10645 int size = int_size_in_bytes (TREE_TYPE (decl));
10648 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10655 needs_sdata = (size > 0
10656 && size <= g_switch_value
10657 && rs6000_sdata != SDATA_NONE
10658 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10660 if (TREE_CODE (decl) == STRING_CST)
10661 readonly = ! flag_writable_strings;
10662 else if (TREE_CODE (decl) == VAR_DECL)
10663 readonly = (! (flag_pic && reloc)
10664 && TREE_READONLY (decl)
10665 && ! TREE_SIDE_EFFECTS (decl)
10666 && DECL_INITIAL (decl)
10667 && DECL_INITIAL (decl) != error_mark_node
10668 && TREE_CONSTANT (DECL_INITIAL (decl)));
10669 else if (TREE_CODE (decl) == CONSTRUCTOR)
10670 readonly = (! (flag_pic && reloc)
10671 && ! TREE_SIDE_EFFECTS (decl)
10672 && TREE_CONSTANT (decl));
10675 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10678 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10681 /* A C statement to build up a unique section name, expressed as a
10682 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10683 RELOC indicates whether the initial value of EXP requires
10684 link-time relocations. If you do not define this macro, GCC will use
10685 the symbol name prefixed by `.' as the section name. Note - this
10686 macro can now be called for uninitialized data items as well as
10687 initialised data and functions. */
10690 rs6000_unique_section (decl, reloc)
10698 const char *prefix;
10700 static const char *const prefixes[7][2] =
10702 { ".rodata.", ".gnu.linkonce.r." },
10703 { ".sdata2.", ".gnu.linkonce.s2." },
10704 { ".data.", ".gnu.linkonce.d." },
10705 { ".sdata.", ".gnu.linkonce.s." },
10706 { ".bss.", ".gnu.linkonce.b." },
10707 { ".sbss.", ".gnu.linkonce.sb." },
10708 { ".text.", ".gnu.linkonce.t." }
10711 if (TREE_CODE (decl) == FUNCTION_DECL)
10720 if (TREE_CODE (decl) == STRING_CST)
10721 readonly = ! flag_writable_strings;
10722 else if (TREE_CODE (decl) == VAR_DECL)
10723 readonly = (! (flag_pic && reloc)
10724 && TREE_READONLY (decl)
10725 && ! TREE_SIDE_EFFECTS (decl)
10726 && TREE_CONSTANT (DECL_INITIAL (decl)));
10728 size = int_size_in_bytes (TREE_TYPE (decl));
10729 needs_sdata = (size > 0
10730 && size <= g_switch_value
10731 && rs6000_sdata != SDATA_NONE
10732 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10734 if (DECL_INITIAL (decl) == 0
10735 || DECL_INITIAL (decl) == error_mark_node)
10737 else if (! readonly)
10744 /* .sdata2 is only for EABI. */
10745 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10751 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10752 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10753 len = strlen (name) + strlen (prefix);
10754 string = alloca (len + 1);
10756 sprintf (string, "%s%s", prefix, name);
10758 DECL_SECTION_NAME (decl) = build_string (len, string);
10762 /* If we are referencing a function that is static or is known to be
10763 in this file, make the SYMBOL_REF special. We can use this to indicate
10764 that we can branch to this function without emitting a no-op after the
10765 call. For real AIX calling sequences, we also replace the
10766 function name with the real name (1 or 2 leading .'s), rather than
10767 the function descriptor name. This saves a lot of overriding code
10768 to read the prefixes. */
10771 rs6000_encode_section_info (decl)
10774 if (TREE_CODE (decl) == FUNCTION_DECL)
10776 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10777 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10778 && ! DECL_WEAK (decl))
10779 SYMBOL_REF_FLAG (sym_ref) = 1;
10781 if (DEFAULT_ABI == ABI_AIX)
10783 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10784 size_t len2 = strlen (XSTR (sym_ref, 0));
10785 char *str = alloca (len1 + len2 + 1);
10788 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10790 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10793 else if (rs6000_sdata != SDATA_NONE
10794 && DEFAULT_ABI == ABI_V4
10795 && TREE_CODE (decl) == VAR_DECL)
10797 int size = int_size_in_bytes (TREE_TYPE (decl));
10798 tree section_name = DECL_SECTION_NAME (decl);
10799 const char *name = (char *)0;
10804 if (TREE_CODE (section_name) == STRING_CST)
10806 name = TREE_STRING_POINTER (section_name);
10807 len = TREE_STRING_LENGTH (section_name);
10813 if ((size > 0 && size <= g_switch_value)
10815 && ((len == sizeof (".sdata") - 1
10816 && strcmp (name, ".sdata") == 0)
10817 || (len == sizeof (".sdata2") - 1
10818 && strcmp (name, ".sdata2") == 0)
10819 || (len == sizeof (".sbss") - 1
10820 && strcmp (name, ".sbss") == 0)
10821 || (len == sizeof (".sbss2") - 1
10822 && strcmp (name, ".sbss2") == 0)
10823 || (len == sizeof (".PPC.EMB.sdata0") - 1
10824 && strcmp (name, ".PPC.EMB.sdata0") == 0)
10825 || (len == sizeof (".PPC.EMB.sbss0") - 1
10826 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10828 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10829 size_t len = strlen (XSTR (sym_ref, 0));
10830 char *str = alloca (len + 2);
10833 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
10834 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
10839 #endif /* USING_ELFOS_H */
10842 /* Return a REG that occurs in ADDR with coefficient 1.
10843 ADDR can be effectively incremented by incrementing REG.
10845 r0 is special and we must not select it as an address
10846 register by this routine since our caller will try to
10847 increment the returned register via an "la" instruction. */
10850 find_addr_reg (addr)
10853 while (GET_CODE (addr) == PLUS)
10855 if (GET_CODE (XEXP (addr, 0)) == REG
10856 && REGNO (XEXP (addr, 0)) != 0)
10857 addr = XEXP (addr, 0);
10858 else if (GET_CODE (XEXP (addr, 1)) == REG
10859 && REGNO (XEXP (addr, 1)) != 0)
10860 addr = XEXP (addr, 1);
10861 else if (CONSTANT_P (XEXP (addr, 0)))
10862 addr = XEXP (addr, 1);
10863 else if (CONSTANT_P (XEXP (addr, 1)))
10864 addr = XEXP (addr, 0);
10868 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
10874 rs6000_fatal_bad_address (op)
10877 fatal_insn ("bad address", op);
10880 /* Called to register all of our global variables with the garbage
10884 rs6000_add_gc_roots ()
10886 ggc_add_rtx_root (&rs6000_compare_op0, 1);
10887 ggc_add_rtx_root (&rs6000_compare_op1, 1);
10889 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
10890 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
10891 toc_hash_mark_table);
10894 machopic_add_gc_roots ();
10901 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
10902 reference and a constant. */
10905 symbolic_operand (op)
10908 switch (GET_CODE (op))
10915 return (GET_CODE (op) == SYMBOL_REF ||
10916 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
10917 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
10918 && GET_CODE (XEXP (op, 1)) == CONST_INT);
10925 #ifdef RS6000_LONG_BRANCH
10927 static tree stub_list = 0;
10929 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
10930 procedure calls to the linked list. */
10933 add_compiler_stub (label_name, function_name, line_number)
10935 tree function_name;
10938 tree stub = build_tree_list (function_name, label_name);
10939 TREE_TYPE (stub) = build_int_2 (line_number, 0);
10940 TREE_CHAIN (stub) = stub_list;
10944 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
10945 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
10946 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
10948 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
10949 handling procedure calls from the linked list and initializes the
10953 output_compiler_stub ()
10956 char label_buf[256];
10958 tree tmp_stub, stub;
10961 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10963 fprintf (asm_out_file,
10964 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
10966 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10967 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10968 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
10969 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10971 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
10973 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
10976 label_buf[0] = '_';
10977 strcpy (label_buf+1,
10978 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
10981 strcpy (tmp_buf, "lis r12,hi16(");
10982 strcat (tmp_buf, label_buf);
10983 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
10984 strcat (tmp_buf, label_buf);
10985 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
10986 output_asm_insn (tmp_buf, 0);
10988 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10989 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10990 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
10991 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10997 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
10998 already there or not. */
11001 no_previous_def (function_name)
11002 tree function_name;
11005 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11006 if (function_name == STUB_FUNCTION_NAME (stub))
11011 /* GET_PREV_LABEL gets the label name from the previous definition of
11015 get_prev_label (function_name)
11016 tree function_name;
11019 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11020 if (function_name == STUB_FUNCTION_NAME (stub))
11021 return STUB_LABEL_NAME (stub);
11025 /* INSN is either a function call or a millicode call. It may have an
11026 unconditional jump in its delay slot.
11028 CALL_DEST is the routine we are calling. */
11031 output_call (insn, call_dest, operand_number)
11034 int operand_number;
11036 static char buf[256];
11037 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11040 tree funname = get_identifier (XSTR (call_dest, 0));
11042 if (no_previous_def (funname))
11045 rtx label_rtx = gen_label_rtx ();
11046 char *label_buf, temp_buf[256];
11047 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11048 CODE_LABEL_NUMBER (label_rtx));
11049 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11050 labelname = get_identifier (label_buf);
11051 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11053 line_number = NOTE_LINE_NUMBER (insn);
11054 add_compiler_stub (labelname, funname, line_number);
11057 labelname = get_prev_label (funname);
11059 sprintf (buf, "jbsr %%z%d,%.246s",
11060 operand_number, IDENTIFIER_POINTER (labelname));
11065 sprintf (buf, "bl %%z%d", operand_number);
11070 #endif /* RS6000_LONG_BRANCH */
11072 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11074 const char *const symbol_ = (SYMBOL); \
11075 char *buffer_ = (BUF); \
11076 if (symbol_[0] == '"') \
11078 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11080 else if (name_needs_quotes(symbol_)) \
11082 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11086 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11091 /* Generate PIC and indirect symbol stubs. */
11094 machopic_output_stub (file, symb, stub)
11096 const char *symb, *stub;
11098 unsigned int length;
11099 char *symbol_name, *lazy_ptr_name;
11100 char *local_label_0;
11101 static int label = 0;
11103 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11104 STRIP_NAME_ENCODING (symb, symb);
11108 length = strlen (symb);
11109 symbol_name = alloca (length + 32);
11110 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11112 lazy_ptr_name = alloca (length + 32);
11113 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11115 local_label_0 = alloca (length + 32);
11116 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11119 machopic_picsymbol_stub_section ();
11121 machopic_symbol_stub_section ();
11123 fprintf (file, "%s:\n", stub);
11124 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11128 fprintf (file, "\tmflr r0\n");
11129 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11130 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11131 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11132 lazy_ptr_name, local_label_0);
11133 fprintf (file, "\tmtlr r0\n");
11134 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11135 lazy_ptr_name, local_label_0);
11136 fprintf (file, "\tmtctr r12\n");
11137 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11138 lazy_ptr_name, local_label_0);
11139 fprintf (file, "\tbctr\n");
11142 fprintf (file, "non-pure not supported\n");
11144 machopic_lazy_symbol_ptr_section ();
11145 fprintf (file, "%s:\n", lazy_ptr_name);
11146 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11147 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11150 /* Legitimize PIC addresses. If the address is already
11151 position-independent, we return ORIG. Newly generated
11152 position-independent addresses go into a reg. This is REG if non
11153 zero, otherwise we allocate register(s) as necessary. */
11155 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11158 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11160 enum machine_mode mode;
11165 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11166 reg = gen_reg_rtx (Pmode);
11168 if (GET_CODE (orig) == CONST)
11170 if (GET_CODE (XEXP (orig, 0)) == PLUS
11171 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11174 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11177 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11180 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11186 if (GET_CODE (offset) == CONST_INT)
11188 if (SMALL_INT (offset))
11189 return plus_constant (base, INTVAL (offset));
11190 else if (! reload_in_progress && ! reload_completed)
11191 offset = force_reg (Pmode, offset);
11194 rtx mem = force_const_mem (Pmode, orig);
11195 return machopic_legitimize_pic_address (mem, Pmode, reg);
11198 return gen_rtx (PLUS, Pmode, base, offset);
11201 /* Fall back on generic machopic code. */
11202 return machopic_legitimize_pic_address (orig, mode, reg);
11205 /* This is just a placeholder to make linking work without having to
11206 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11207 ever needed for Darwin (not too likely!) this would have to get a
11208 real definition. */
11215 #endif /* TARGET_MACHO */
11218 static unsigned int
11219 rs6000_elf_section_type_flags (decl, name, reloc)
11224 unsigned int flags = default_section_type_flags (decl, name, reloc);
11226 if (TARGET_RELOCATABLE)
11227 flags |= SECTION_WRITE;
11232 /* Record an element in the table of global constructors. SYMBOL is
11233 a SYMBOL_REF of the function to be called; PRIORITY is a number
11234 between 0 and MAX_INIT_PRIORITY.
11236 This differs from default_named_section_asm_out_constructor in
11237 that we have special handling for -mrelocatable. */
11240 rs6000_elf_asm_out_constructor (symbol, priority)
11244 const char *section = ".ctors";
11247 if (priority != DEFAULT_INIT_PRIORITY)
11249 sprintf (buf, ".ctors.%.5u",
11250 /* Invert the numbering so the linker puts us in the proper
11251 order; constructors are run from right to left, and the
11252 linker sorts in increasing order. */
11253 MAX_INIT_PRIORITY - priority);
11257 named_section_flags (section, SECTION_WRITE);
11258 assemble_align (POINTER_SIZE);
11260 if (TARGET_RELOCATABLE)
11262 fputs ("\t.long (", asm_out_file);
11263 output_addr_const (asm_out_file, symbol);
11264 fputs (")@fixup\n", asm_out_file);
11267 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11271 rs6000_elf_asm_out_destructor (symbol, priority)
11275 const char *section = ".dtors";
11278 if (priority != DEFAULT_INIT_PRIORITY)
11280 sprintf (buf, ".dtors.%.5u",
11281 /* Invert the numbering so the linker puts us in the proper
11282 order; constructors are run from right to left, and the
11283 linker sorts in increasing order. */
11284 MAX_INIT_PRIORITY - priority);
11288 named_section_flags (section, SECTION_WRITE);
11289 assemble_align (POINTER_SIZE);
11291 if (TARGET_RELOCATABLE)
11293 fputs ("\t.long (", asm_out_file);
11294 output_addr_const (asm_out_file, symbol);
11295 fputs (")@fixup\n", asm_out_file);
11298 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11302 #ifdef OBJECT_FORMAT_COFF
11304 xcoff_asm_named_section (name, flags)
11306 unsigned int flags ATTRIBUTE_UNUSED;
11308 fprintf (asm_out_file, "\t.csect %s\n", name);