1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined;
80 /* Save information from a "cmpxx" operation until the branch or scc is
82 rtx rs6000_compare_op0, rs6000_compare_op1;
83 int rs6000_compare_fp_p;
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno;
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name = RS6000_ABI_NAME;
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
96 /* Which small data model to use */
97 const char *rs6000_sdata_name = (char *)0;
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno = 0;
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi;
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string;
110 const char *rs6000_debug_name;
111 int rs6000_debug_stack; /* debug stack applications */
112 int rs6000_debug_arg; /* debug argument handling */
114 /* Flag to say the TOC is initialized */
116 char toc_label_name[10];
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set;
121 static void rs6000_add_gc_roots PARAMS ((void));
122 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code, enum machine_mode));
126 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127 static void rs6000_maybe_dead PARAMS ((rtx));
128 static void rs6000_emit_stack_tie PARAMS ((void));
129 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131 static unsigned rs6000_hash_constant PARAMS ((rtx));
132 static unsigned toc_hash_function PARAMS ((const void *));
133 static int toc_hash_eq PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry PARAMS ((void **, void *));
135 static void toc_hash_mark_table PARAMS ((void *));
136 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137 static void rs6000_free_machine_status PARAMS ((struct function *));
138 static void rs6000_init_machine_status PARAMS ((struct function *));
139 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
167 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
168 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
169 static void rs6000_parse_abi_options PARAMS ((void));
170 static int first_altivec_reg_to_save PARAMS ((void));
171 static unsigned int compute_vrsave_mask PARAMS ((void));
172 static void is_altivec_return_reg PARAMS ((rtx, void *));
173 int vrsave_operation PARAMS ((rtx, enum machine_mode));
174 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
175 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
177 /* Default register names. */
178 char rs6000_reg_names[][8] =
180 "0", "1", "2", "3", "4", "5", "6", "7",
181 "8", "9", "10", "11", "12", "13", "14", "15",
182 "16", "17", "18", "19", "20", "21", "22", "23",
183 "24", "25", "26", "27", "28", "29", "30", "31",
184 "0", "1", "2", "3", "4", "5", "6", "7",
185 "8", "9", "10", "11", "12", "13", "14", "15",
186 "16", "17", "18", "19", "20", "21", "22", "23",
187 "24", "25", "26", "27", "28", "29", "30", "31",
188 "mq", "lr", "ctr","ap",
189 "0", "1", "2", "3", "4", "5", "6", "7",
191 /* AltiVec registers. */
192 "0", "1", "2", "3", "4", "5", "6", "7",
193 "8", "9", "10", "11", "12", "13", "14", "15",
194 "16", "17", "18", "19", "20", "21", "22", "23",
195 "24", "25", "26", "27", "28", "29", "30", "31",
199 #ifdef TARGET_REGNAMES
200 static const char alt_reg_names[][8] =
202 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
203 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
204 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
205 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
206 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
207 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
208 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
209 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
210 "mq", "lr", "ctr", "ap",
211 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
213 /* AltiVec registers. */
214 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
215 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
216 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
217 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
222 #ifndef MASK_STRICT_ALIGN
223 #define MASK_STRICT_ALIGN 0
226 /* Initialize the GCC target structure. */
227 #undef TARGET_ATTRIBUTE_TABLE
228 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
230 #undef TARGET_ASM_ALIGNED_DI_OP
231 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
233 /* Default unaligned ops are only provided for ELF. Find the ops needed
234 for non-ELF systems. */
235 #ifndef OBJECT_FORMAT_ELF
236 #ifdef OBJECT_FORMAT_COFF
237 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
239 #undef TARGET_ASM_UNALIGNED_HI_OP
240 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
241 #undef TARGET_ASM_UNALIGNED_SI_OP
242 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
243 #undef TARGET_ASM_UNALIGNED_DI_OP
244 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
247 #undef TARGET_ASM_UNALIGNED_HI_OP
248 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
249 #undef TARGET_ASM_UNALIGNED_SI_OP
250 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
254 /* This hook deals with fixups for relocatable code and DI-mode objects
256 #undef TARGET_ASM_INTEGER
257 #define TARGET_ASM_INTEGER rs6000_assemble_integer
259 #undef TARGET_ASM_FUNCTION_PROLOGUE
260 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
261 #undef TARGET_ASM_FUNCTION_EPILOGUE
262 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
265 #undef TARGET_SECTION_TYPE_FLAGS
266 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
269 #undef TARGET_SCHED_ISSUE_RATE
270 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
271 #undef TARGET_SCHED_ADJUST_COST
272 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
273 #undef TARGET_SCHED_ADJUST_PRIORITY
274 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
276 #undef TARGET_INIT_BUILTINS
277 #define TARGET_INIT_BUILTINS rs6000_init_builtins
279 #undef TARGET_EXPAND_BUILTIN
280 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
282 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
283 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
285 struct gcc_target targetm = TARGET_INITIALIZER;
287 /* Override command line options. Mostly we process the processor
288 type and sometimes adjust other TARGET_ options. */
291 rs6000_override_options (default_cpu)
292 const char *default_cpu;
295 struct rs6000_cpu_select *ptr;
297 /* Simplify the entries below by making a mask for any POWER
298 variant and any PowerPC variant. */
300 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
301 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
302 | MASK_PPC_GFXOPT | MASK_POWERPC64)
303 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
307 const char *const name; /* Canonical processor name. */
308 const enum processor_type processor; /* Processor type enum value. */
309 const int target_enable; /* Target flags to enable. */
310 const int target_disable; /* Target flags to disable. */
311 } const processor_target_table[]
312 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
313 POWER_MASKS | POWERPC_MASKS},
314 {"power", PROCESSOR_POWER,
315 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
316 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
317 {"power2", PROCESSOR_POWER,
318 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
319 POWERPC_MASKS | MASK_NEW_MNEMONICS},
320 {"power3", PROCESSOR_PPC630,
321 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
322 POWER_MASKS | MASK_PPC_GPOPT},
323 {"powerpc", PROCESSOR_POWERPC,
324 MASK_POWERPC | MASK_NEW_MNEMONICS,
325 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
326 {"powerpc64", PROCESSOR_POWERPC64,
327 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
328 POWER_MASKS | POWERPC_OPT_MASKS},
329 {"rios", PROCESSOR_RIOS1,
330 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
331 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
332 {"rios1", PROCESSOR_RIOS1,
333 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
334 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
335 {"rsc", PROCESSOR_PPC601,
336 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
337 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
338 {"rsc1", PROCESSOR_PPC601,
339 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
340 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
341 {"rios2", PROCESSOR_RIOS2,
342 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
343 POWERPC_MASKS | MASK_NEW_MNEMONICS},
344 {"rs64a", PROCESSOR_RS64A,
345 MASK_POWERPC | MASK_NEW_MNEMONICS,
346 POWER_MASKS | POWERPC_OPT_MASKS},
347 {"401", PROCESSOR_PPC403,
348 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
349 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
350 {"403", PROCESSOR_PPC403,
351 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
352 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
353 {"405", PROCESSOR_PPC405,
354 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
355 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
356 {"505", PROCESSOR_MPCCORE,
357 MASK_POWERPC | MASK_NEW_MNEMONICS,
358 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
359 {"601", PROCESSOR_PPC601,
360 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
361 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
362 {"602", PROCESSOR_PPC603,
363 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
364 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
365 {"603", PROCESSOR_PPC603,
366 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
367 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
368 {"603e", PROCESSOR_PPC603,
369 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
370 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
371 {"ec603e", PROCESSOR_PPC603,
372 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
373 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
374 {"604", PROCESSOR_PPC604,
375 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
376 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
377 {"604e", PROCESSOR_PPC604e,
378 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
379 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
380 {"620", PROCESSOR_PPC620,
381 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
382 POWER_MASKS | MASK_PPC_GPOPT},
383 {"630", PROCESSOR_PPC630,
384 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
385 POWER_MASKS | MASK_PPC_GPOPT},
386 {"740", PROCESSOR_PPC750,
387 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
388 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
389 {"750", PROCESSOR_PPC750,
390 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
391 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
392 {"7400", PROCESSOR_PPC7400,
393 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
394 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
395 {"7450", PROCESSOR_PPC7450,
396 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
397 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
398 {"801", PROCESSOR_MPCCORE,
399 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
400 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
401 {"821", PROCESSOR_MPCCORE,
402 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
403 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
404 {"823", PROCESSOR_MPCCORE,
405 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
406 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
407 {"860", PROCESSOR_MPCCORE,
408 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
409 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
411 size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
413 /* Save current -mmultiple/-mno-multiple status. */
414 int multiple = TARGET_MULTIPLE;
415 /* Save current -mstring/-mno-string status. */
416 int string = TARGET_STRING;
418 /* Identify the processor type. */
419 rs6000_select[0].string = default_cpu;
420 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
422 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
424 ptr = &rs6000_select[i];
425 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
427 for (j = 0; j < ptt_size; j++)
428 if (! strcmp (ptr->string, processor_target_table[j].name))
431 rs6000_cpu = processor_target_table[j].processor;
435 target_flags |= processor_target_table[j].target_enable;
436 target_flags &= ~processor_target_table[j].target_disable;
442 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
446 /* If we are optimizing big endian systems for space, use the store
447 multiple instructions. */
448 if (BYTES_BIG_ENDIAN && optimize_size)
449 target_flags |= MASK_MULTIPLE;
451 /* If -mmultiple or -mno-multiple was explicitly used, don't
452 override with the processor default */
453 if (TARGET_MULTIPLE_SET)
454 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
456 /* If -mstring or -mno-string was explicitly used, don't override
457 with the processor default. */
458 if (TARGET_STRING_SET)
459 target_flags = (target_flags & ~MASK_STRING) | string;
461 /* Don't allow -mmultiple or -mstring on little endian systems
462 unless the cpu is a 750, because the hardware doesn't support the
463 instructions used in little endian mode, and causes an alignment
464 trap. The 750 does not cause an alignment trap (except when the
465 target is unaligned). */
467 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
471 target_flags &= ~MASK_MULTIPLE;
472 if (TARGET_MULTIPLE_SET)
473 warning ("-mmultiple is not supported on little endian systems");
478 target_flags &= ~MASK_STRING;
479 if (TARGET_STRING_SET)
480 warning ("-mstring is not supported on little endian systems");
484 if (flag_pic && DEFAULT_ABI == ABI_AIX)
486 warning ("-f%s ignored (all code is position independent)",
487 (flag_pic > 1) ? "PIC" : "pic");
491 #ifdef XCOFF_DEBUGGING_INFO
492 if (flag_function_sections && (write_symbols != NO_DEBUG)
493 && DEFAULT_ABI == ABI_AIX)
495 warning ("-ffunction-sections disabled on AIX when debugging");
496 flag_function_sections = 0;
499 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
501 warning ("-fdata-sections not supported on AIX");
502 flag_data_sections = 0;
506 /* Set debug flags */
507 if (rs6000_debug_name)
509 if (! strcmp (rs6000_debug_name, "all"))
510 rs6000_debug_stack = rs6000_debug_arg = 1;
511 else if (! strcmp (rs6000_debug_name, "stack"))
512 rs6000_debug_stack = 1;
513 else if (! strcmp (rs6000_debug_name, "arg"))
514 rs6000_debug_arg = 1;
516 error ("unknown -mdebug-%s switch", rs6000_debug_name);
519 /* Set size of long double */
520 rs6000_long_double_type_size = 64;
521 if (rs6000_long_double_size_string)
524 int size = strtol (rs6000_long_double_size_string, &tail, 10);
525 if (*tail != '\0' || (size != 64 && size != 128))
526 error ("Unknown switch -mlong-double-%s",
527 rs6000_long_double_size_string);
529 rs6000_long_double_type_size = size;
532 /* Handle -mabi= options. */
533 rs6000_parse_abi_options ();
535 #ifdef TARGET_REGNAMES
536 /* If the user desires alternate register names, copy in the
537 alternate names now. */
539 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
542 #ifdef SUBTARGET_OVERRIDE_OPTIONS
543 SUBTARGET_OVERRIDE_OPTIONS;
545 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
546 SUBSUBTARGET_OVERRIDE_OPTIONS;
549 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
550 If -maix-struct-return or -msvr4-struct-return was explicitly
551 used, don't override with the ABI default. */
552 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
554 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
555 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
557 target_flags |= MASK_AIX_STRUCT_RET;
560 /* Register global variables with the garbage collector. */
561 rs6000_add_gc_roots ();
563 /* Allocate an alias set for register saves & restores from stack. */
564 rs6000_sr_alias_set = new_alias_set ();
567 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
569 /* We can only guarantee the availability of DI pseudo-ops when
570 assembling for 64-bit targets. */
573 targetm.asm_out.aligned_op.di = NULL;
574 targetm.asm_out.unaligned_op.di = NULL;
577 /* Arrange to save and restore machine status around nested functions. */
578 init_machine_status = rs6000_init_machine_status;
579 free_machine_status = rs6000_free_machine_status;
582 /* Handle -mabi= options. */
584 rs6000_parse_abi_options ()
586 if (rs6000_abi_string == 0)
588 else if (! strcmp (rs6000_abi_string, "altivec"))
589 rs6000_altivec_abi = 1;
591 error ("unknown ABI specified: '%s'", rs6000_abi_string);
595 optimization_options (level, size)
596 int level ATTRIBUTE_UNUSED;
597 int size ATTRIBUTE_UNUSED;
601 /* Do anything needed at the start of the asm file. */
604 rs6000_file_start (file, default_cpu)
606 const char *default_cpu;
610 const char *start = buffer;
611 struct rs6000_cpu_select *ptr;
613 if (flag_verbose_asm)
615 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
616 rs6000_select[0].string = default_cpu;
618 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
620 ptr = &rs6000_select[i];
621 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
623 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
629 switch (rs6000_sdata)
631 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
632 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
633 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
634 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
637 if (rs6000_sdata && g_switch_value)
639 fprintf (file, "%s -G %d", start, g_switch_value);
650 /* Create a CONST_DOUBLE from a string. */
653 rs6000_float_const (string, mode)
655 enum machine_mode mode;
657 REAL_VALUE_TYPE value;
658 value = REAL_VALUE_ATOF (string, mode);
659 return immed_real_const_1 (value, mode);
662 /* Return non-zero if this function is known to have a null epilogue. */
667 if (reload_completed)
669 rs6000_stack_t *info = rs6000_stack_info ();
671 if (info->first_gp_reg_save == 32
672 && info->first_fp_reg_save == 64
673 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
676 && info->vrsave_mask == 0
684 /* Returns 1 always. */
687 any_operand (op, mode)
688 rtx op ATTRIBUTE_UNUSED;
689 enum machine_mode mode ATTRIBUTE_UNUSED;
694 /* Returns 1 if op is the count register. */
696 count_register_operand (op, mode)
698 enum machine_mode mode ATTRIBUTE_UNUSED;
700 if (GET_CODE (op) != REG)
703 if (REGNO (op) == COUNT_REGISTER_REGNUM)
706 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
713 xer_operand (op, mode)
715 enum machine_mode mode ATTRIBUTE_UNUSED;
717 if (GET_CODE (op) != REG)
720 if (XER_REGNO_P (REGNO (op)))
726 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
727 by such constants completes more quickly. */
730 s8bit_cint_operand (op, mode)
732 enum machine_mode mode ATTRIBUTE_UNUSED;
734 return ( GET_CODE (op) == CONST_INT
735 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
738 /* Return 1 if OP is a constant that can fit in a D field. */
741 short_cint_operand (op, mode)
743 enum machine_mode mode ATTRIBUTE_UNUSED;
745 return (GET_CODE (op) == CONST_INT
746 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
749 /* Similar for an unsigned D field. */
752 u_short_cint_operand (op, mode)
754 enum machine_mode mode ATTRIBUTE_UNUSED;
756 return (GET_CODE (op) == CONST_INT
757 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'));
760 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
763 non_short_cint_operand (op, mode)
765 enum machine_mode mode ATTRIBUTE_UNUSED;
767 return (GET_CODE (op) == CONST_INT
768 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
771 /* Returns 1 if OP is a CONST_INT that is a positive value
772 and an exact power of 2. */
775 exact_log2_cint_operand (op, mode)
777 enum machine_mode mode ATTRIBUTE_UNUSED;
779 return (GET_CODE (op) == CONST_INT
781 && exact_log2 (INTVAL (op)) >= 0);
784 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
788 gpc_reg_operand (op, mode)
790 enum machine_mode mode;
792 return (register_operand (op, mode)
793 && (GET_CODE (op) != REG
794 || (REGNO (op) >= ARG_POINTER_REGNUM
795 && !XER_REGNO_P (REGNO (op)))
796 || REGNO (op) < MQ_REGNO));
799 /* Returns 1 if OP is either a pseudo-register or a register denoting a
803 cc_reg_operand (op, mode)
805 enum machine_mode mode;
807 return (register_operand (op, mode)
808 && (GET_CODE (op) != REG
809 || REGNO (op) >= FIRST_PSEUDO_REGISTER
810 || CR_REGNO_P (REGNO (op))));
813 /* Returns 1 if OP is either a pseudo-register or a register denoting a
814 CR field that isn't CR0. */
817 cc_reg_not_cr0_operand (op, mode)
819 enum machine_mode mode;
821 return (register_operand (op, mode)
822 && (GET_CODE (op) != REG
823 || REGNO (op) >= FIRST_PSEUDO_REGISTER
824 || CR_REGNO_NOT_CR0_P (REGNO (op))));
827 /* Returns 1 if OP is either a constant integer valid for a D-field or
828 a non-special register. If a register, it must be in the proper
829 mode unless MODE is VOIDmode. */
832 reg_or_short_operand (op, mode)
834 enum machine_mode mode;
836 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
839 /* Similar, except check if the negation of the constant would be
840 valid for a D-field. */
843 reg_or_neg_short_operand (op, mode)
845 enum machine_mode mode;
847 if (GET_CODE (op) == CONST_INT)
848 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
850 return gpc_reg_operand (op, mode);
853 /* Returns 1 if OP is either a constant integer valid for a DS-field or
854 a non-special register. If a register, it must be in the proper
855 mode unless MODE is VOIDmode. */
858 reg_or_aligned_short_operand (op, mode)
860 enum machine_mode mode;
862 if (gpc_reg_operand (op, mode))
864 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
871 /* Return 1 if the operand is either a register or an integer whose
872 high-order 16 bits are zero. */
875 reg_or_u_short_operand (op, mode)
877 enum machine_mode mode;
879 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
882 /* Return 1 is the operand is either a non-special register or ANY
886 reg_or_cint_operand (op, mode)
888 enum machine_mode mode;
890 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
893 /* Return 1 is the operand is either a non-special register or ANY
894 32-bit signed constant integer. */
897 reg_or_arith_cint_operand (op, mode)
899 enum machine_mode mode;
901 return (gpc_reg_operand (op, mode)
902 || (GET_CODE (op) == CONST_INT
903 #if HOST_BITS_PER_WIDE_INT != 32
904 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
905 < (unsigned HOST_WIDE_INT) 0x100000000ll)
910 /* Return 1 is the operand is either a non-special register or a 32-bit
911 signed constant integer valid for 64-bit addition. */
914 reg_or_add_cint64_operand (op, mode)
916 enum machine_mode mode;
918 return (gpc_reg_operand (op, mode)
919 || (GET_CODE (op) == CONST_INT
920 && INTVAL (op) < 0x7fff8000
921 #if HOST_BITS_PER_WIDE_INT != 32
922 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
928 /* Return 1 is the operand is either a non-special register or a 32-bit
929 signed constant integer valid for 64-bit subtraction. */
932 reg_or_sub_cint64_operand (op, mode)
934 enum machine_mode mode;
936 return (gpc_reg_operand (op, mode)
937 || (GET_CODE (op) == CONST_INT
938 && (- INTVAL (op)) < 0x7fff8000
939 #if HOST_BITS_PER_WIDE_INT != 32
940 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
946 /* Return 1 is the operand is either a non-special register or ANY
947 32-bit unsigned constant integer. */
950 reg_or_logical_cint_operand (op, mode)
952 enum machine_mode mode;
954 if (GET_CODE (op) == CONST_INT)
956 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
958 if (GET_MODE_BITSIZE (mode) <= 32)
965 return ((INTVAL (op) & GET_MODE_MASK (mode)
966 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
968 else if (GET_CODE (op) == CONST_DOUBLE)
970 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
974 return CONST_DOUBLE_HIGH (op) == 0;
977 return gpc_reg_operand (op, mode);
980 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
983 got_operand (op, mode)
985 enum machine_mode mode ATTRIBUTE_UNUSED;
987 return (GET_CODE (op) == SYMBOL_REF
988 || GET_CODE (op) == CONST
989 || GET_CODE (op) == LABEL_REF);
992 /* Return 1 if the operand is a simple references that can be loaded via
993 the GOT (labels involving addition aren't allowed). */
996 got_no_const_operand (op, mode)
998 enum machine_mode mode ATTRIBUTE_UNUSED;
1000 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1003 /* Return the number of instructions it takes to form a constant in an
1004 integer register. */
1007 num_insns_constant_wide (value)
1008 HOST_WIDE_INT value;
1010 /* signed constant loadable with {cal|addi} */
1011 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1014 /* constant loadable with {cau|addis} */
1015 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1018 #if HOST_BITS_PER_WIDE_INT == 64
1019 else if (TARGET_POWERPC64)
1021 HOST_WIDE_INT low = value & 0xffffffff;
1022 HOST_WIDE_INT high = value >> 32;
1024 low = (low ^ 0x80000000) - 0x80000000; /* sign extend */
1026 if (high == 0 && (low & 0x80000000) == 0)
1029 else if (high == -1 && (low & 0x80000000) != 0)
1033 return num_insns_constant_wide (high) + 1;
1036 return (num_insns_constant_wide (high)
1037 + num_insns_constant_wide (low) + 1);
1046 num_insns_constant (op, mode)
1048 enum machine_mode mode;
1050 if (GET_CODE (op) == CONST_INT)
1052 #if HOST_BITS_PER_WIDE_INT == 64
1053 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1054 && mask64_operand (op, mode))
1058 return num_insns_constant_wide (INTVAL (op));
1061 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1066 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1067 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1068 return num_insns_constant_wide ((HOST_WIDE_INT)l);
1071 else if (GET_CODE (op) == CONST_DOUBLE)
1077 int endian = (WORDS_BIG_ENDIAN == 0);
1079 if (mode == VOIDmode || mode == DImode)
1081 high = CONST_DOUBLE_HIGH (op);
1082 low = CONST_DOUBLE_LOW (op);
1086 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1087 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1089 low = l[1 - endian];
1093 return (num_insns_constant_wide (low)
1094 + num_insns_constant_wide (high));
1098 if (high == 0 && (low & 0x80000000) == 0)
1099 return num_insns_constant_wide (low);
1101 else if (high == -1 && (low & 0x80000000) != 0)
1102 return num_insns_constant_wide (low);
1104 else if (mask64_operand (op, mode))
1108 return num_insns_constant_wide (high) + 1;
1111 return (num_insns_constant_wide (high)
1112 + num_insns_constant_wide (low) + 1);
1120 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1121 register with one instruction per word. We only do this if we can
1122 safely read CONST_DOUBLE_{LOW,HIGH}. */
1125 easy_fp_constant (op, mode)
1127 enum machine_mode mode;
1129 if (GET_CODE (op) != CONST_DOUBLE
1130 || GET_MODE (op) != mode
1131 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1134 /* Consider all constants with -msoft-float to be easy. */
1135 if (TARGET_SOFT_FLOAT && mode != DImode)
1138 /* If we are using V.4 style PIC, consider all constants to be hard. */
1139 if (flag_pic && DEFAULT_ABI == ABI_V4)
1142 #ifdef TARGET_RELOCATABLE
1143 /* Similarly if we are using -mrelocatable, consider all constants
1145 if (TARGET_RELOCATABLE)
1154 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1155 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1157 return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1158 && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1161 else if (mode == SFmode)
1166 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1167 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1169 return num_insns_constant_wide (l) == 1;
1172 else if (mode == DImode)
1173 return ((TARGET_POWERPC64
1174 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1175 || (num_insns_constant (op, DImode) <= 2));
1177 else if (mode == SImode)
1183 /* Return 1 if the operand is 0.0. */
1185 zero_fp_constant (op, mode)
1187 enum machine_mode mode;
1189 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1192 /* Return 1 if the operand is in volatile memory. Note that during
1193 the RTL generation phase, memory_operand does not return TRUE for
1194 volatile memory references. So this function allows us to
1195 recognize volatile references where its safe. */
1198 volatile_mem_operand (op, mode)
1200 enum machine_mode mode;
1202 if (GET_CODE (op) != MEM)
1205 if (!MEM_VOLATILE_P (op))
1208 if (mode != GET_MODE (op))
1211 if (reload_completed)
1212 return memory_operand (op, mode);
1214 if (reload_in_progress)
1215 return strict_memory_address_p (mode, XEXP (op, 0));
1217 return memory_address_p (mode, XEXP (op, 0));
1220 /* Return 1 if the operand is an offsettable memory operand. */
1223 offsettable_mem_operand (op, mode)
1225 enum machine_mode mode;
1227 return ((GET_CODE (op) == MEM)
1228 && offsettable_address_p (reload_completed || reload_in_progress,
1229 mode, XEXP (op, 0)));
1232 /* Return 1 if the operand is either an easy FP constant (see above) or
1236 mem_or_easy_const_operand (op, mode)
1238 enum machine_mode mode;
1240 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1243 /* Return 1 if the operand is either a non-special register or an item
1244 that can be used as the operand of a `mode' add insn. */
1247 add_operand (op, mode)
1249 enum machine_mode mode;
1251 if (GET_CODE (op) == CONST_INT)
1252 return (CONST_OK_FOR_LETTER_P (INTVAL(op), 'I')
1253 || CONST_OK_FOR_LETTER_P (INTVAL(op), 'L'));
1255 return gpc_reg_operand (op, mode);
1258 /* Return 1 if OP is a constant but not a valid add_operand. */
1261 non_add_cint_operand (op, mode)
1263 enum machine_mode mode ATTRIBUTE_UNUSED;
1265 return (GET_CODE (op) == CONST_INT
1266 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000
1267 && ! CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1270 /* Return 1 if the operand is a non-special register or a constant that
1271 can be used as the operand of an OR or XOR insn on the RS/6000. */
1274 logical_operand (op, mode)
1276 enum machine_mode mode;
1278 HOST_WIDE_INT opl, oph;
1280 if (gpc_reg_operand (op, mode))
1283 if (GET_CODE (op) == CONST_INT)
1285 opl = INTVAL (op) & GET_MODE_MASK (mode);
1287 #if HOST_BITS_PER_WIDE_INT <= 32
1288 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1292 else if (GET_CODE (op) == CONST_DOUBLE)
1294 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1297 opl = CONST_DOUBLE_LOW (op);
1298 oph = CONST_DOUBLE_HIGH (op);
1305 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1306 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1309 /* Return 1 if C is a constant that is not a logical operand (as
1310 above), but could be split into one. */
1313 non_logical_cint_operand (op, mode)
1315 enum machine_mode mode;
1317 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1318 && ! logical_operand (op, mode)
1319 && reg_or_logical_cint_operand (op, mode));
1322 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1323 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1324 Reject all ones and all zeros, since these should have been optimized
1325 away and confuse the making of MB and ME. */
1328 mask_operand (op, mode)
1330 enum machine_mode mode ATTRIBUTE_UNUSED;
1332 HOST_WIDE_INT c, lsb;
1334 if (GET_CODE (op) != CONST_INT)
1339 /* We don't change the number of transitions by inverting,
1340 so make sure we start with the LS bit zero. */
1344 /* Reject all zeros or all ones. */
1348 /* Find the first transition. */
1351 /* Invert to look for a second transition. */
1354 /* Erase first transition. */
1357 /* Find the second transition (if any). */
1360 /* Match if all the bits above are 1's (or c is zero). */
1364 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1365 It is if there are no more than one 1->0 or 0->1 transitions.
1366 Reject all ones and all zeros, since these should have been optimized
1367 away and confuse the making of MB and ME. */
1370 mask64_operand (op, mode)
1372 enum machine_mode mode;
1374 if (GET_CODE (op) == CONST_INT)
1376 HOST_WIDE_INT c, lsb;
1378 /* We don't change the number of transitions by inverting,
1379 so make sure we start with the LS bit zero. */
1384 /* Reject all zeros or all ones. */
1388 /* Find the transition, and check that all bits above are 1's. */
1392 else if (GET_CODE (op) == CONST_DOUBLE
1393 && (mode == VOIDmode || mode == DImode))
1395 HOST_WIDE_INT low, high, lsb;
1397 if (HOST_BITS_PER_WIDE_INT < 64)
1398 high = CONST_DOUBLE_HIGH (op);
1400 low = CONST_DOUBLE_LOW (op);
1403 if (HOST_BITS_PER_WIDE_INT < 64)
1410 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1414 return high == -lsb;
1418 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1424 /* Return 1 if the operand is either a non-special register or a constant
1425 that can be used as the operand of a PowerPC64 logical AND insn. */
1428 and64_operand (op, mode)
1430 enum machine_mode mode;
1432 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1433 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1435 return (logical_operand (op, mode) || mask64_operand (op, mode));
1438 /* Return 1 if the operand is either a non-special register or a
1439 constant that can be used as the operand of an RS/6000 logical AND insn. */
1442 and_operand (op, mode)
1444 enum machine_mode mode;
1446 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1447 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1449 return (logical_operand (op, mode) || mask_operand (op, mode));
1452 /* Return 1 if the operand is a general register or memory operand. */
1455 reg_or_mem_operand (op, mode)
1457 enum machine_mode mode;
1459 return (gpc_reg_operand (op, mode)
1460 || memory_operand (op, mode)
1461 || volatile_mem_operand (op, mode));
1464 /* Return 1 if the operand is a general register or memory operand without
1465 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1469 lwa_operand (op, mode)
1471 enum machine_mode mode;
1475 if (reload_completed && GET_CODE (inner) == SUBREG)
1476 inner = SUBREG_REG (inner);
1478 return gpc_reg_operand (inner, mode)
1479 || (memory_operand (inner, mode)
1480 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1481 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1482 && (GET_CODE (XEXP (inner, 0)) != PLUS
1483 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1484 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1487 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1488 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1492 call_operand (op, mode)
1494 enum machine_mode mode;
1496 if (mode != VOIDmode && GET_MODE (op) != mode)
1499 return (GET_CODE (op) == SYMBOL_REF
1500 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1503 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1504 this file and the function is not weakly defined. */
1507 current_file_function_operand (op, mode)
1509 enum machine_mode mode ATTRIBUTE_UNUSED;
1511 return (GET_CODE (op) == SYMBOL_REF
1512 && (SYMBOL_REF_FLAG (op)
1513 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1514 && ! DECL_WEAK (current_function_decl))));
1517 /* Return 1 if this operand is a valid input for a move insn. */
1520 input_operand (op, mode)
1522 enum machine_mode mode;
1524 /* Memory is always valid. */
1525 if (memory_operand (op, mode))
1528 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1529 if (GET_CODE (op) == CONSTANT_P_RTX)
1532 /* For floating-point, easy constants are valid. */
1533 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1535 && easy_fp_constant (op, mode))
1538 /* Allow any integer constant. */
1539 if (GET_MODE_CLASS (mode) == MODE_INT
1540 && (GET_CODE (op) == CONST_INT
1541 || GET_CODE (op) == CONST_DOUBLE))
1544 /* For floating-point or multi-word mode, the only remaining valid type
1546 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1547 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1548 return register_operand (op, mode);
1550 /* The only cases left are integral modes one word or smaller (we
1551 do not get called for MODE_CC values). These can be in any
1553 if (register_operand (op, mode))
1556 /* A SYMBOL_REF referring to the TOC is valid. */
1557 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1560 /* A constant pool expression (relative to the TOC) is valid */
1561 if (TOC_RELATIVE_EXPR_P (op))
1564 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1566 if (DEFAULT_ABI == ABI_V4
1567 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1568 && small_data_operand (op, Pmode))
1574 /* Return 1 for an operand in small memory on V.4/eabi. */
1577 small_data_operand (op, mode)
1578 rtx op ATTRIBUTE_UNUSED;
1579 enum machine_mode mode ATTRIBUTE_UNUSED;
1584 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1587 if (DEFAULT_ABI != ABI_V4)
1590 if (GET_CODE (op) == SYMBOL_REF)
1593 else if (GET_CODE (op) != CONST
1594 || GET_CODE (XEXP (op, 0)) != PLUS
1595 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1596 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1601 rtx sum = XEXP (op, 0);
1602 HOST_WIDE_INT summand;
1604 /* We have to be careful here, because it is the referenced address
1605 that must be 32k from _SDA_BASE_, not just the symbol. */
1606 summand = INTVAL (XEXP (sum, 1));
1607 if (summand < 0 || summand > g_switch_value)
1610 sym_ref = XEXP (sum, 0);
1613 if (*XSTR (sym_ref, 0) != '@')
1624 constant_pool_expr_1 (op, have_sym, have_toc)
1629 switch (GET_CODE(op))
1632 if (CONSTANT_POOL_ADDRESS_P (op))
1634 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1642 else if (! strcmp (XSTR (op, 0), toc_label_name))
1651 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) &&
1652 constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc);
1654 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1663 constant_pool_expr_p (op)
1668 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1672 toc_relative_expr_p (op)
1677 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1680 /* Try machine-dependent ways of modifying an illegitimate address
1681 to be legitimate. If we find one, return the new, valid address.
1682 This is used from only one place: `memory_address' in explow.c.
1684 OLDX is the address as it was before break_out_memory_refs was
1685 called. In some cases it is useful to look at this to decide what
1688 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1690 It is always safe for this function to do nothing. It exists to
1691 recognize opportunities to optimize the output.
1693 On RS/6000, first check for the sum of a register with a constant
1694 integer that is out of range. If so, generate code to add the
1695 constant with the low-order 16 bits masked to the register and force
1696 this result into another register (this can be done with `cau').
1697 Then generate an address of REG+(CONST&0xffff), allowing for the
1698 possibility of bit 16 being a one.
1700 Then check for the sum of a register and something not constant, try to
1701 load the other things into a register and return the sum. */
1703 rs6000_legitimize_address (x, oldx, mode)
1705 rtx oldx ATTRIBUTE_UNUSED;
1706 enum machine_mode mode;
1708 if (GET_CODE (x) == PLUS
1709 && GET_CODE (XEXP (x, 0)) == REG
1710 && GET_CODE (XEXP (x, 1)) == CONST_INT
1711 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1713 HOST_WIDE_INT high_int, low_int;
1715 high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1716 low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1717 if (low_int & 0x8000)
1718 high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1719 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1720 GEN_INT (high_int)), 0);
1721 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1723 else if (GET_CODE (x) == PLUS
1724 && GET_CODE (XEXP (x, 0)) == REG
1725 && GET_CODE (XEXP (x, 1)) != CONST_INT
1726 && GET_MODE_NUNITS (mode) == 1
1727 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1728 && (TARGET_POWERPC64 || mode != DImode)
1731 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1732 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1734 else if (ALTIVEC_VECTOR_MODE (mode))
1738 /* Make sure both operands are registers. */
1739 if (GET_CODE (x) == PLUS)
1740 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1741 force_reg (Pmode, XEXP (x, 1)));
1743 reg = force_reg (Pmode, x);
1746 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1747 && GET_CODE (x) != CONST_INT
1748 && GET_CODE (x) != CONST_DOUBLE
1750 && GET_MODE_NUNITS (mode) == 1
1751 && (GET_MODE_BITSIZE (mode) <= 32
1752 || (TARGET_HARD_FLOAT && mode == DFmode)))
1754 rtx reg = gen_reg_rtx (Pmode);
1755 emit_insn (gen_elf_high (reg, (x)));
1756 return gen_rtx_LO_SUM (Pmode, reg, (x));
1758 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1760 && GET_CODE (x) != CONST_INT
1761 && GET_CODE (x) != CONST_DOUBLE
1763 && (TARGET_HARD_FLOAT || mode != DFmode)
1767 rtx reg = gen_reg_rtx (Pmode);
1768 emit_insn (gen_macho_high (reg, (x)));
1769 return gen_rtx_LO_SUM (Pmode, reg, (x));
1772 && CONSTANT_POOL_EXPR_P (x)
1773 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1775 return create_TOC_reference (x);
1781 /* The convention appears to be to define this wherever it is used.
1782 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1783 is now used here. */
1784 #ifndef REG_MODE_OK_FOR_BASE_P
1785 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1788 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1789 replace the input X, or the original X if no replacement is called for.
1790 The output parameter *WIN is 1 if the calling macro should goto WIN,
1793 For RS/6000, we wish to handle large displacements off a base
1794 register by splitting the addend across an addiu/addis and the mem insn.
1795 This cuts number of extra insns needed from 3 to 1.
1797 On Darwin, we use this to generate code for floating point constants.
1798 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1799 The Darwin code is inside #if TARGET_MACHO because only then is
1800 machopic_function_base_name() defined. */
1802 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1804 enum machine_mode mode;
1807 int ind_levels ATTRIBUTE_UNUSED;
1810 /* We must recognize output that we have already generated ourselves. */
1811 if (GET_CODE (x) == PLUS
1812 && GET_CODE (XEXP (x, 0)) == PLUS
1813 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1814 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1815 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1817 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1818 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1819 opnum, (enum reload_type)type);
1825 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1826 && GET_CODE (x) == LO_SUM
1827 && GET_CODE (XEXP (x, 0)) == PLUS
1828 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1829 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1830 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1831 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1832 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1833 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1834 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1836 /* Result of previous invocation of this function on Darwin
1837 floating point constant. */
1838 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1839 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1840 opnum, (enum reload_type)type);
1845 if (GET_CODE (x) == PLUS
1846 && GET_CODE (XEXP (x, 0)) == REG
1847 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1848 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1849 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1851 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1852 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1854 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1856 /* Check for 32-bit overflow. */
1857 if (high + low != val)
1863 /* Reload the high part into a base reg; leave the low part
1864 in the mem directly. */
1866 x = gen_rtx_PLUS (GET_MODE (x),
1867 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1871 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1872 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1873 opnum, (enum reload_type)type);
1878 if (GET_CODE (x) == SYMBOL_REF
1879 && DEFAULT_ABI == ABI_DARWIN
1882 /* Darwin load of floating point constant. */
1883 rtx offset = gen_rtx (CONST, Pmode,
1884 gen_rtx (MINUS, Pmode, x,
1885 gen_rtx (SYMBOL_REF, Pmode,
1886 machopic_function_base_name ())));
1887 x = gen_rtx (LO_SUM, GET_MODE (x),
1888 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1889 gen_rtx (HIGH, Pmode, offset)), offset);
1890 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1891 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1892 opnum, (enum reload_type)type);
1898 && CONSTANT_POOL_EXPR_P (x)
1899 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1901 (x) = create_TOC_reference (x);
1909 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1910 that is a valid memory address for an instruction.
1911 The MODE argument is the machine mode for the MEM expression
1912 that wants to use this address.
1914 On the RS/6000, there are four valid address: a SYMBOL_REF that
1915 refers to a constant pool entry of an address (or the sum of it
1916 plus a constant), a short (16-bit signed) constant plus a register,
1917 the sum of two registers, or a register indirect, possibly with an
1918 auto-increment. For DFmode and DImode with an constant plus register,
1919 we must ensure that both words are addressable or PowerPC64 with offset
1922 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1923 32-bit DImode, TImode), indexed addressing cannot be used because
1924 adjacent memory cells are accessed by adding word-sized offsets
1925 during assembly output. */
1927 rs6000_legitimate_address (mode, x, reg_ok_strict)
1928 enum machine_mode mode;
1932 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
1934 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
1936 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
1938 if (LEGITIMATE_SMALL_DATA_P (mode, x))
1940 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
1942 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
1944 && GET_CODE (x) == PLUS
1945 && GET_CODE (XEXP (x, 0)) == REG
1946 && XEXP (x, 0) == virtual_stack_vars_rtx
1947 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1949 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
1952 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1953 && (TARGET_POWERPC64 || mode != DImode)
1954 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
1956 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
1961 /* Try to output insns to set TARGET equal to the constant C if it can
1962 be done in less than N insns. Do all computations in MODE.
1963 Returns the place where the output has been placed if it can be
1964 done and the insns have been emitted. If it would take more than N
1965 insns, zero is returned and no insns and emitted. */
1968 rs6000_emit_set_const (dest, mode, source, n)
1970 enum machine_mode mode;
1971 int n ATTRIBUTE_UNUSED;
1973 HOST_WIDE_INT c0, c1;
1975 if (mode == QImode || mode == HImode || mode == SImode)
1978 dest = gen_reg_rtx (mode);
1979 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
1983 if (GET_CODE (source) == CONST_INT)
1985 c0 = INTVAL (source);
1988 else if (GET_CODE (source) == CONST_DOUBLE)
1990 #if HOST_BITS_PER_WIDE_INT >= 64
1991 c0 = CONST_DOUBLE_LOW (source);
1994 c0 = CONST_DOUBLE_LOW (source);
1995 c1 = CONST_DOUBLE_HIGH (source);
2001 return rs6000_emit_set_long_const (dest, c0, c1);
2004 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2005 fall back to a straight forward decomposition. We do this to avoid
2006 exponential run times encountered when looking for longer sequences
2007 with rs6000_emit_set_const. */
2009 rs6000_emit_set_long_const (dest, c1, c2)
2011 HOST_WIDE_INT c1, c2;
2013 if (!TARGET_POWERPC64)
2015 rtx operand1, operand2;
2017 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2019 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2021 emit_move_insn (operand1, GEN_INT (c1));
2022 emit_move_insn (operand2, GEN_INT (c2));
2026 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2029 ud2 = (c1 & 0xffff0000) >> 16;
2030 #if HOST_BITS_PER_WIDE_INT >= 64
2034 ud4 = (c2 & 0xffff0000) >> 16;
2036 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2037 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2040 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2042 emit_move_insn (dest, GEN_INT (ud1));
2045 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2046 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2049 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2052 emit_move_insn (dest, GEN_INT (ud2 << 16));
2054 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2056 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2057 || (ud4 == 0 && ! (ud3 & 0x8000)))
2060 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2063 emit_move_insn (dest, GEN_INT (ud3 << 16));
2066 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2067 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2069 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2074 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2077 emit_move_insn (dest, GEN_INT (ud4 << 16));
2080 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2082 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2084 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2085 GEN_INT (ud2 << 16)));
2087 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2093 /* Emit a move from SOURCE to DEST in mode MODE. */
2095 rs6000_emit_move (dest, source, mode)
2098 enum machine_mode mode;
2102 operands[1] = source;
2104 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2105 if (GET_CODE (operands[1]) == CONST_DOUBLE
2106 && ! FLOAT_MODE_P (mode)
2107 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2109 /* FIXME. This should never happen. */
2110 /* Since it seems that it does, do the safe thing and convert
2113 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2115 if (GET_CODE (operands[1]) == CONST_DOUBLE
2116 && ! FLOAT_MODE_P (mode)
2117 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2118 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2119 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2120 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2123 /* Check if GCC is setting up a block move that will end up using FP
2124 registers as temporaries. We must make sure this is acceptable. */
2125 if (GET_CODE (operands[0]) == MEM
2126 && GET_CODE (operands[1]) == MEM
2128 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2129 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2130 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2131 ? 32 : MEM_ALIGN (operands[0])))
2132 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2134 : MEM_ALIGN (operands[1]))))
2135 && ! MEM_VOLATILE_P (operands [0])
2136 && ! MEM_VOLATILE_P (operands [1]))
2138 emit_move_insn (adjust_address (operands[0], SImode, 0),
2139 adjust_address (operands[1], SImode, 0));
2140 emit_move_insn (adjust_address (operands[0], SImode, 4),
2141 adjust_address (operands[1], SImode, 4));
2145 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2146 operands[1] = force_reg (mode, operands[1]);
2148 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2149 && GET_CODE (operands[0]) == MEM)
2153 if (reload_in_progress || reload_completed)
2154 regnum = true_regnum (operands[1]);
2155 else if (GET_CODE (operands[1]) == REG)
2156 regnum = REGNO (operands[1]);
2160 /* If operands[1] is a register, on POWER it may have
2161 double-precision data in it, so truncate it to single
2163 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2166 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2167 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2168 operands[1] = newreg;
2172 /* Handle the case where reload calls us with an invalid address;
2173 and the case of CONSTANT_P_RTX. */
2174 if (! general_operand (operands[1], mode)
2175 || ! nonimmediate_operand (operands[0], mode)
2176 || GET_CODE (operands[1]) == CONSTANT_P_RTX)
2178 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2182 /* FIXME: In the long term, this switch statement should go away
2183 and be replaced by a sequence of tests based on things like
2189 if (CONSTANT_P (operands[1])
2190 && GET_CODE (operands[1]) != CONST_INT)
2191 operands[1] = force_const_mem (mode, operands[1]);
2197 if (CONSTANT_P (operands[1])
2198 && ! easy_fp_constant (operands[1], mode))
2199 operands[1] = force_const_mem (mode, operands[1]);
2206 /* fixme: aldyh -- allow vector constants when they are implemented. */
2207 if (CONSTANT_P (operands[1]))
2208 operands[1] = force_const_mem (mode, operands[1]);
2213 /* Use default pattern for address of ELF small data */
2216 && DEFAULT_ABI == ABI_V4
2217 && (GET_CODE (operands[1]) == SYMBOL_REF
2218 || GET_CODE (operands[1]) == CONST)
2219 && small_data_operand (operands[1], mode))
2221 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2225 if (DEFAULT_ABI == ABI_V4
2226 && mode == Pmode && mode == SImode
2227 && flag_pic == 1 && got_operand (operands[1], mode))
2229 emit_insn (gen_movsi_got (operands[0], operands[1]));
2233 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2234 && TARGET_NO_TOC && ! flag_pic
2236 && CONSTANT_P (operands[1])
2237 && GET_CODE (operands[1]) != HIGH
2238 && GET_CODE (operands[1]) != CONST_INT)
2240 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2242 /* If this is a function address on -mcall-aixdesc,
2243 convert it to the address of the descriptor. */
2244 if (DEFAULT_ABI == ABI_AIX
2245 && GET_CODE (operands[1]) == SYMBOL_REF
2246 && XSTR (operands[1], 0)[0] == '.')
2248 const char *name = XSTR (operands[1], 0);
2250 while (*name == '.')
2252 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2253 CONSTANT_POOL_ADDRESS_P (new_ref)
2254 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2255 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2256 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2257 operands[1] = new_ref;
2260 if (DEFAULT_ABI == ABI_DARWIN)
2262 emit_insn (gen_macho_high (target, operands[1]));
2263 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2267 emit_insn (gen_elf_high (target, operands[1]));
2268 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2272 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2273 and we have put it in the TOC, we just need to make a TOC-relative
2276 && GET_CODE (operands[1]) == SYMBOL_REF
2277 && CONSTANT_POOL_EXPR_P (operands[1])
2278 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2279 get_pool_mode (operands[1])))
2281 operands[1] = create_TOC_reference (operands[1]);
2283 else if (mode == Pmode
2284 && CONSTANT_P (operands[1])
2285 && ((GET_CODE (operands[1]) != CONST_INT
2286 && ! easy_fp_constant (operands[1], mode))
2287 || (GET_CODE (operands[1]) == CONST_INT
2288 && num_insns_constant (operands[1], mode) > 2)
2289 || (GET_CODE (operands[0]) == REG
2290 && FP_REGNO_P (REGNO (operands[0]))))
2291 && GET_CODE (operands[1]) != HIGH
2292 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2293 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2295 /* Emit a USE operation so that the constant isn't deleted if
2296 expensive optimizations are turned on because nobody
2297 references it. This should only be done for operands that
2298 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2299 This should not be done for operands that contain LABEL_REFs.
2300 For now, we just handle the obvious case. */
2301 if (GET_CODE (operands[1]) != LABEL_REF)
2302 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2305 /* Darwin uses a special PIC legitimizer. */
2306 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2309 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2311 if (operands[0] != operands[1])
2312 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2317 /* If we are to limit the number of things we put in the TOC and
2318 this is a symbol plus a constant we can add in one insn,
2319 just put the symbol in the TOC and add the constant. Don't do
2320 this if reload is in progress. */
2321 if (GET_CODE (operands[1]) == CONST
2322 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2323 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2324 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2325 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2326 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2327 && ! side_effects_p (operands[0]))
2330 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2331 rtx other = XEXP (XEXP (operands[1], 0), 1);
2333 sym = force_reg (mode, sym);
2335 emit_insn (gen_addsi3 (operands[0], sym, other));
2337 emit_insn (gen_adddi3 (operands[0], sym, other));
2341 operands[1] = force_const_mem (mode, operands[1]);
2344 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2345 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2346 get_pool_constant (XEXP (operands[1], 0)),
2347 get_pool_mode (XEXP (operands[1], 0))))
2350 = gen_rtx_MEM (mode,
2351 create_TOC_reference (XEXP (operands[1], 0)));
2352 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2353 RTX_UNCHANGING_P (operands[1]) = 1;
2359 if (GET_CODE (operands[0]) == MEM
2360 && GET_CODE (XEXP (operands[0], 0)) != REG
2361 && ! reload_in_progress)
2363 = replace_equiv_address (operands[0],
2364 copy_addr_to_reg (XEXP (operands[0], 0)));
2366 if (GET_CODE (operands[1]) == MEM
2367 && GET_CODE (XEXP (operands[1], 0)) != REG
2368 && ! reload_in_progress)
2370 = replace_equiv_address (operands[1],
2371 copy_addr_to_reg (XEXP (operands[1], 0)));
2378 /* Above, we may have called force_const_mem which may have returned
2379 an invalid address. If we can, fix this up; otherwise, reload will
2380 have to deal with it. */
2381 if (GET_CODE (operands[1]) == MEM
2382 && ! memory_address_p (mode, XEXP (operands[1], 0))
2383 && ! reload_in_progress)
2384 operands[1] = adjust_address (operands[1], mode, 0);
2386 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2390 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2391 for a call to a function whose data type is FNTYPE.
2392 For a library call, FNTYPE is 0.
2394 For incoming args we set the number of arguments in the prototype large
2395 so we never return a PARALLEL. */
2398 init_cumulative_args (cum, fntype, libname, incoming)
2399 CUMULATIVE_ARGS *cum;
2401 rtx libname ATTRIBUTE_UNUSED;
2404 static CUMULATIVE_ARGS zero_cumulative;
2406 *cum = zero_cumulative;
2408 cum->fregno = FP_ARG_MIN_REG;
2409 cum->vregno = ALTIVEC_ARG_MIN_REG;
2410 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2411 cum->call_cookie = CALL_NORMAL;
2412 cum->sysv_gregno = GP_ARG_MIN_REG;
2415 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2417 else if (cum->prototype)
2418 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2419 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2420 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2423 cum->nargs_prototype = 0;
2425 cum->orig_nargs = cum->nargs_prototype;
2427 /* Check for longcall's */
2428 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2429 cum->call_cookie = CALL_LONG;
2431 if (TARGET_DEBUG_ARG)
2433 fprintf (stderr, "\ninit_cumulative_args:");
2436 tree ret_type = TREE_TYPE (fntype);
2437 fprintf (stderr, " ret code = %s,",
2438 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2441 if (cum->call_cookie & CALL_LONG)
2442 fprintf (stderr, " longcall,");
2444 fprintf (stderr, " proto = %d, nargs = %d\n",
2445 cum->prototype, cum->nargs_prototype);
2449 /* If defined, a C expression which determines whether, and in which
2450 direction, to pad out an argument with extra space. The value
2451 should be of type `enum direction': either `upward' to pad above
2452 the argument, `downward' to pad below, or `none' to inhibit
2455 For the AIX ABI structs are always stored left shifted in their
2459 function_arg_padding (mode, type)
2460 enum machine_mode mode;
2463 if (type != 0 && AGGREGATE_TYPE_P (type))
2466 /* This is the default definition. */
2467 return (! BYTES_BIG_ENDIAN
2470 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2471 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2472 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2473 ? downward : upward));
2476 /* If defined, a C expression that gives the alignment boundary, in bits,
2477 of an argument with the specified mode and type. If it is not defined,
2478 PARM_BOUNDARY is used for all arguments.
2480 V.4 wants long longs to be double word aligned. */
2483 function_arg_boundary (mode, type)
2484 enum machine_mode mode;
2485 tree type ATTRIBUTE_UNUSED;
2487 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2489 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2492 return PARM_BOUNDARY;
2495 /* Update the data in CUM to advance over an argument
2496 of mode MODE and data type TYPE.
2497 (TYPE is null for libcalls where that information may not be available.) */
2500 function_arg_advance (cum, mode, type, named)
2501 CUMULATIVE_ARGS *cum;
2502 enum machine_mode mode;
2506 cum->nargs_prototype--;
2508 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2510 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2513 cum->words += RS6000_ARG_SIZE (mode, type);
2515 else if (DEFAULT_ABI == ABI_V4)
2517 if (TARGET_HARD_FLOAT
2518 && (mode == SFmode || mode == DFmode))
2520 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2525 cum->words += cum->words & 1;
2526 cum->words += RS6000_ARG_SIZE (mode, type);
2532 int gregno = cum->sysv_gregno;
2534 /* Aggregates and IEEE quad get passed by reference. */
2535 if ((type && AGGREGATE_TYPE_P (type))
2539 n_words = RS6000_ARG_SIZE (mode, type);
2541 /* Long long is put in odd registers. */
2542 if (n_words == 2 && (gregno & 1) == 0)
2545 /* Long long is not split between registers and stack. */
2546 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2548 /* Long long is aligned on the stack. */
2550 cum->words += cum->words & 1;
2551 cum->words += n_words;
2554 /* Note: continuing to accumulate gregno past when we've started
2555 spilling to the stack indicates the fact that we've started
2556 spilling to the stack to expand_builtin_saveregs. */
2557 cum->sysv_gregno = gregno + n_words;
2560 if (TARGET_DEBUG_ARG)
2562 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2563 cum->words, cum->fregno);
2564 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2565 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2566 fprintf (stderr, "mode = %4s, named = %d\n",
2567 GET_MODE_NAME (mode), named);
2572 int align = (TARGET_32BIT && (cum->words & 1) != 0
2573 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2575 cum->words += align + RS6000_ARG_SIZE (mode, type);
2577 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2580 if (TARGET_DEBUG_ARG)
2582 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2583 cum->words, cum->fregno);
2584 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2585 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2586 fprintf (stderr, "named = %d, align = %d\n", named, align);
2591 /* Determine where to put an argument to a function.
2592 Value is zero to push the argument on the stack,
2593 or a hard register in which to store the argument.
2595 MODE is the argument's machine mode.
2596 TYPE is the data type of the argument (as a tree).
2597 This is null for libcalls where that information may
2599 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2600 the preceding args and about the function being called.
2601 NAMED is nonzero if this argument is a named parameter
2602 (otherwise it is an extra parameter matching an ellipsis).
2604 On RS/6000 the first eight words of non-FP are normally in registers
2605 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2606 Under V.4, the first 8 FP args are in registers.
2608 If this is floating-point and no prototype is specified, we use
2609 both an FP and integer register (or possibly FP reg and stack). Library
2610 functions (when TYPE is zero) always have the proper types for args,
2611 so we can pass the FP value just in one register. emit_library_function
2612 doesn't support PARALLEL anyway. */
2615 function_arg (cum, mode, type, named)
2616 CUMULATIVE_ARGS *cum;
2617 enum machine_mode mode;
2621 enum rs6000_abi abi = DEFAULT_ABI;
2623 /* Return a marker to indicate whether CR1 needs to set or clear the
2624 bit that V.4 uses to say fp args were passed in registers.
2625 Assume that we don't need the marker for software floating point,
2626 or compiler generated library calls. */
2627 if (mode == VOIDmode)
2630 && TARGET_HARD_FLOAT
2631 && cum->nargs_prototype < 0
2632 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2634 return GEN_INT (cum->call_cookie
2635 | ((cum->fregno == FP_ARG_MIN_REG)
2636 ? CALL_V4_SET_FP_ARGS
2637 : CALL_V4_CLEAR_FP_ARGS));
2640 return GEN_INT (cum->call_cookie);
2643 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2645 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2646 return gen_rtx_REG (mode, cum->vregno);
2650 else if (abi == ABI_V4)
2652 if (TARGET_HARD_FLOAT
2653 && (mode == SFmode || mode == DFmode))
2655 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2656 return gen_rtx_REG (mode, cum->fregno);
2663 int gregno = cum->sysv_gregno;
2665 /* Aggregates and IEEE quad get passed by reference. */
2666 if ((type && AGGREGATE_TYPE_P (type))
2670 n_words = RS6000_ARG_SIZE (mode, type);
2672 /* Long long is put in odd registers. */
2673 if (n_words == 2 && (gregno & 1) == 0)
2676 /* Long long is not split between registers and stack. */
2677 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2678 return gen_rtx_REG (mode, gregno);
2685 int align = (TARGET_32BIT && (cum->words & 1) != 0
2686 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2687 int align_words = cum->words + align;
2689 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2692 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2695 || ((cum->nargs_prototype > 0)
2696 /* IBM AIX extended its linkage convention definition always
2697 to require FP args after register save area hole on the
2699 && (DEFAULT_ABI != ABI_AIX
2701 || (align_words < GP_ARG_NUM_REG))))
2702 return gen_rtx_REG (mode, cum->fregno);
2704 return gen_rtx_PARALLEL (mode,
2706 gen_rtx_EXPR_LIST (VOIDmode,
2707 ((align_words >= GP_ARG_NUM_REG)
2710 + RS6000_ARG_SIZE (mode, type)
2712 /* If this is partially on the stack, then
2713 we only include the portion actually
2714 in registers here. */
2715 ? gen_rtx_REG (SImode,
2716 GP_ARG_MIN_REG + align_words)
2717 : gen_rtx_REG (mode,
2718 GP_ARG_MIN_REG + align_words))),
2720 gen_rtx_EXPR_LIST (VOIDmode,
2721 gen_rtx_REG (mode, cum->fregno),
2724 else if (align_words < GP_ARG_NUM_REG)
2725 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2731 /* For an arg passed partly in registers and partly in memory,
2732 this is the number of registers used.
2733 For args passed entirely in registers or entirely in memory, zero. */
2736 function_arg_partial_nregs (cum, mode, type, named)
2737 CUMULATIVE_ARGS *cum;
2738 enum machine_mode mode;
2740 int named ATTRIBUTE_UNUSED;
2742 if (DEFAULT_ABI == ABI_V4)
2745 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2746 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2748 if (cum->nargs_prototype >= 0)
2752 if (cum->words < GP_ARG_NUM_REG
2753 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2755 int ret = GP_ARG_NUM_REG - cum->words;
2756 if (ret && TARGET_DEBUG_ARG)
2757 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2765 /* A C expression that indicates when an argument must be passed by
2766 reference. If nonzero for an argument, a copy of that argument is
2767 made in memory and a pointer to the argument is passed instead of
2768 the argument itself. The pointer is passed in whatever way is
2769 appropriate for passing a pointer to that type.
2771 Under V.4, structures and unions are passed by reference. */
2774 function_arg_pass_by_reference (cum, mode, type, named)
2775 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2776 enum machine_mode mode ATTRIBUTE_UNUSED;
2778 int named ATTRIBUTE_UNUSED;
2780 if (DEFAULT_ABI == ABI_V4
2781 && ((type && AGGREGATE_TYPE_P (type))
2784 if (TARGET_DEBUG_ARG)
2785 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2793 /* Perform any needed actions needed for a function that is receiving a
2794 variable number of arguments.
2798 MODE and TYPE are the mode and type of the current parameter.
2800 PRETEND_SIZE is a variable that should be set to the amount of stack
2801 that must be pushed by the prolog to pretend that our caller pushed
2804 Normally, this macro will push all remaining incoming registers on the
2805 stack and set PRETEND_SIZE to the length of the registers pushed. */
2808 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2809 CUMULATIVE_ARGS *cum;
2810 enum machine_mode mode;
2816 CUMULATIVE_ARGS next_cum;
2817 int reg_size = TARGET_32BIT ? 4 : 8;
2818 rtx save_area = NULL_RTX, mem;
2819 int first_reg_offset, set;
2823 fntype = TREE_TYPE (current_function_decl);
2824 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2825 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2826 != void_type_node));
2828 /* For varargs, we do not want to skip the dummy va_dcl argument.
2829 For stdargs, we do want to skip the last named argument. */
2832 function_arg_advance (&next_cum, mode, type, 1);
2834 if (DEFAULT_ABI == ABI_V4)
2836 /* Indicate to allocate space on the stack for varargs save area. */
2837 /* ??? Does this really have to be located at a magic spot on the
2838 stack, or can we allocate this with assign_stack_local instead. */
2839 cfun->machine->sysv_varargs_p = 1;
2841 save_area = plus_constant (virtual_stack_vars_rtx,
2842 - RS6000_VARARGS_SIZE);
2844 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2848 first_reg_offset = next_cum.words;
2849 save_area = virtual_incoming_args_rtx;
2850 cfun->machine->sysv_varargs_p = 0;
2852 if (MUST_PASS_IN_STACK (mode, type))
2853 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2856 set = get_varargs_alias_set ();
2857 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2859 mem = gen_rtx_MEM (BLKmode,
2860 plus_constant (save_area,
2861 first_reg_offset * reg_size)),
2862 set_mem_alias_set (mem, set);
2863 set_mem_align (mem, BITS_PER_WORD);
2866 (GP_ARG_MIN_REG + first_reg_offset, mem,
2867 GP_ARG_NUM_REG - first_reg_offset,
2868 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2870 /* ??? Does ABI_V4 need this at all? */
2871 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2874 /* Save FP registers if needed. */
2875 if (DEFAULT_ABI == ABI_V4
2876 && TARGET_HARD_FLOAT && ! no_rtl
2877 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2879 int fregno = next_cum.fregno;
2880 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2881 rtx lab = gen_label_rtx ();
2882 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2884 emit_jump_insn (gen_rtx_SET (VOIDmode,
2886 gen_rtx_IF_THEN_ELSE (VOIDmode,
2887 gen_rtx_NE (VOIDmode, cr1,
2889 gen_rtx_LABEL_REF (VOIDmode, lab),
2892 while (fregno <= FP_ARG_V4_MAX_REG)
2894 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2895 set_mem_alias_set (mem, set);
2896 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2905 /* Create the va_list data type. */
2908 rs6000_build_va_list ()
2910 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2912 /* For AIX, prefer 'char *' because that's what the system
2913 header files like. */
2914 if (DEFAULT_ABI != ABI_V4)
2915 return build_pointer_type (char_type_node);
2917 record = make_lang_type (RECORD_TYPE);
2918 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2920 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2921 unsigned_char_type_node);
2922 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2923 unsigned_char_type_node);
2924 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2926 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2929 DECL_FIELD_CONTEXT (f_gpr) = record;
2930 DECL_FIELD_CONTEXT (f_fpr) = record;
2931 DECL_FIELD_CONTEXT (f_ovf) = record;
2932 DECL_FIELD_CONTEXT (f_sav) = record;
2934 TREE_CHAIN (record) = type_decl;
2935 TYPE_NAME (record) = type_decl;
2936 TYPE_FIELDS (record) = f_gpr;
2937 TREE_CHAIN (f_gpr) = f_fpr;
2938 TREE_CHAIN (f_fpr) = f_ovf;
2939 TREE_CHAIN (f_ovf) = f_sav;
2941 layout_type (record);
2943 /* The correct type is an array type of one element. */
2944 return build_array_type (record, build_index_type (size_zero_node));
2947 /* Implement va_start. */
2950 rs6000_va_start (stdarg_p, valist, nextarg)
2955 HOST_WIDE_INT words, n_gpr, n_fpr;
2956 tree f_gpr, f_fpr, f_ovf, f_sav;
2957 tree gpr, fpr, ovf, sav, t;
2959 /* Only SVR4 needs something special. */
2960 if (DEFAULT_ABI != ABI_V4)
2962 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2966 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2967 f_fpr = TREE_CHAIN (f_gpr);
2968 f_ovf = TREE_CHAIN (f_fpr);
2969 f_sav = TREE_CHAIN (f_ovf);
2971 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2972 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2973 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2974 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2975 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2977 /* Count number of gp and fp argument registers used. */
2978 words = current_function_args_info.words;
2979 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
2980 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
2982 if (TARGET_DEBUG_ARG)
2984 fputs ("va_start: words = ", stderr);
2985 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
2986 fputs (", n_gpr = ", stderr);
2987 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
2988 fputs (", n_fpr = ", stderr);
2989 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
2990 putc ('\n', stderr);
2993 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
2994 TREE_SIDE_EFFECTS (t) = 1;
2995 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2997 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
2998 TREE_SIDE_EFFECTS (t) = 1;
2999 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3001 /* Find the overflow area. */
3002 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3004 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3005 build_int_2 (words * UNITS_PER_WORD, 0));
3006 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3007 TREE_SIDE_EFFECTS (t) = 1;
3008 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3010 /* Find the register save area. */
3011 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3012 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3013 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3014 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3015 TREE_SIDE_EFFECTS (t) = 1;
3016 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3019 /* Implement va_arg. */
3022 rs6000_va_arg (valist, type)
3025 tree f_gpr, f_fpr, f_ovf, f_sav;
3026 tree gpr, fpr, ovf, sav, reg, t, u;
3027 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3028 rtx lab_false, lab_over, addr_rtx, r;
3030 /* For AIX, the rule is that structures are passed left-aligned in
3031 their stack slot. However, GCC does not presently do this:
3032 structures which are the same size as integer types are passed
3033 right-aligned, as if they were in fact integers. This only
3034 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
3035 if (DEFAULT_ABI != ABI_V4)
3037 HOST_WIDE_INT align, rounded_size;
3038 enum machine_mode mode;
3041 /* Compute the rounded size of the type. */
3042 align = PARM_BOUNDARY / BITS_PER_UNIT;
3043 rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
3048 mode = TYPE_MODE (type);
3049 if (mode != BLKmode)
3052 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
3053 if (rounded_size > align)
3056 addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3057 build_int_2 (rounded_size - adj, 0));
3060 addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3061 addr_rtx = copy_to_reg (addr_rtx);
3063 /* Compute new value for AP. */
3064 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3065 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3066 build_int_2 (rounded_size, 0)));
3067 TREE_SIDE_EFFECTS (t) = 1;
3068 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3073 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3074 f_fpr = TREE_CHAIN (f_gpr);
3075 f_ovf = TREE_CHAIN (f_fpr);
3076 f_sav = TREE_CHAIN (f_ovf);
3078 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3079 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3080 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3081 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3082 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3084 size = int_size_in_bytes (type);
3085 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3087 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3089 /* Aggregates and long doubles are passed by reference. */
3095 size = rsize = UNITS_PER_WORD;
3097 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3099 /* FP args go in FP registers, if present. */
3108 /* Otherwise into GP registers. */
3116 /* Pull the value out of the saved registers ... */
3118 lab_false = gen_label_rtx ();
3119 lab_over = gen_label_rtx ();
3120 addr_rtx = gen_reg_rtx (Pmode);
3122 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3123 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3126 /* Long long is aligned in the registers. */
3129 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3130 build_int_2 (n_reg - 1, 0));
3131 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3132 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3133 TREE_SIDE_EFFECTS (u) = 1;
3134 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3138 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3142 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0));
3143 TREE_SIDE_EFFECTS (u) = 1;
3145 u = build1 (CONVERT_EXPR, integer_type_node, u);
3146 TREE_SIDE_EFFECTS (u) = 1;
3148 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3149 TREE_SIDE_EFFECTS (u) = 1;
3151 t = build (PLUS_EXPR, ptr_type_node, t, u);
3152 TREE_SIDE_EFFECTS (t) = 1;
3154 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3156 emit_move_insn (addr_rtx, r);
3158 emit_jump_insn (gen_jump (lab_over));
3160 emit_label (lab_false);
3162 /* ... otherwise out of the overflow area. */
3164 /* Make sure we don't find reg 7 for the next int arg. */
3167 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3168 TREE_SIDE_EFFECTS (t) = 1;
3169 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3172 /* Care for on-stack alignment if needed. */
3177 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0));
3178 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1));
3182 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3184 emit_move_insn (addr_rtx, r);
3186 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3187 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3188 TREE_SIDE_EFFECTS (t) = 1;
3189 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3191 emit_label (lab_over);
3195 r = gen_rtx_MEM (Pmode, addr_rtx);
3196 set_mem_alias_set (r, get_varargs_alias_set ());
3197 emit_move_insn (addr_rtx, r);
3205 #define def_builtin(MASK, NAME, TYPE, CODE) \
3207 if ((MASK) & target_flags) \
3208 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3211 struct builtin_description
3213 const unsigned int mask;
3214 const enum insn_code icode;
3215 const char *const name;
3216 const enum rs6000_builtins code;
3219 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3221 static const struct builtin_description bdesc_3arg[] =
3223 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3224 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3225 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3226 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3227 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3228 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3229 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3230 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3231 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3232 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3233 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3234 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3235 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3236 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3237 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3238 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3239 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3240 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3241 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3242 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3243 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3244 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3245 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3248 /* DST operations: void foo (void *, const int, const char). */
3250 static const struct builtin_description bdesc_dst[] =
3252 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3253 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3254 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3255 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3258 /* Simple binary operations: VECc = foo (VECa, VECb). */
3260 static const struct builtin_description bdesc_2arg[] =
3262 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3263 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3264 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3265 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3266 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3267 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3268 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3269 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3270 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3271 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3272 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3273 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3274 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3275 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3276 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3277 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3278 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3279 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3280 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3281 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3282 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3283 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3284 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3285 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3286 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3287 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3288 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3289 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3290 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3291 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3292 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3293 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3294 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3295 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3296 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3297 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3298 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3299 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3300 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3301 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3302 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3303 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3304 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3305 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3306 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3307 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3308 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3309 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3310 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3311 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3312 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3313 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3314 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3315 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3316 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3317 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3318 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3319 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3320 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3321 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3322 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3323 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3324 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3325 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3326 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3327 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3328 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3329 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3330 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3331 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3332 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3333 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3334 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3335 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3336 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3337 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3338 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3339 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3340 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3341 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3342 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3343 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3344 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3345 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3346 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3347 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3348 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3349 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3350 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3351 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3358 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3359 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3360 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3361 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3362 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3374 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3377 /* AltiVec predicates. */
3379 struct builtin_description_predicates
3381 const unsigned int mask;
3382 const enum insn_code icode;
3384 const char *const name;
3385 const enum rs6000_builtins code;
3388 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3390 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3391 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3392 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3393 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3395 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3405 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3408 static const struct builtin_description bdesc_1arg[] =
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3416 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3421 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3423 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3426 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3430 altivec_expand_unop_builtin (icode, arglist, target)
3431 enum insn_code icode;
3436 tree arg0 = TREE_VALUE (arglist);
3437 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3438 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3439 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3441 /* If we got invalid arguments bail out before generating bad rtl. */
3442 if (arg0 == error_mark_node)
3446 || GET_MODE (target) != tmode
3447 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3448 target = gen_reg_rtx (tmode);
3450 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3451 op0 = copy_to_mode_reg (mode0, op0);
3453 pat = GEN_FCN (icode) (target, op0);
3462 altivec_expand_binop_builtin (icode, arglist, target)
3463 enum insn_code icode;
3468 tree arg0 = TREE_VALUE (arglist);
3469 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3470 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3471 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3472 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3473 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3474 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3476 /* If we got invalid arguments bail out before generating bad rtl. */
3477 if (arg0 == error_mark_node || arg1 == error_mark_node)
3481 || GET_MODE (target) != tmode
3482 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3483 target = gen_reg_rtx (tmode);
3485 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3486 op0 = copy_to_mode_reg (mode0, op0);
3487 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3488 op1 = copy_to_mode_reg (mode1, op1);
3490 pat = GEN_FCN (icode) (target, op0, op1);
3499 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3500 enum insn_code icode;
3506 tree cr6_form = TREE_VALUE (arglist);
3507 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3508 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3509 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3510 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3511 enum machine_mode tmode = SImode;
3512 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3513 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3516 if (TREE_CODE (cr6_form) != INTEGER_CST)
3518 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3522 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3527 /* If we have invalid arguments, bail out before generating bad rtl. */
3528 if (arg0 == error_mark_node || arg1 == error_mark_node)
3532 || GET_MODE (target) != tmode
3533 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3534 target = gen_reg_rtx (tmode);
3536 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3537 op0 = copy_to_mode_reg (mode0, op0);
3538 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3539 op1 = copy_to_mode_reg (mode1, op1);
3541 scratch = gen_reg_rtx (mode0);
3543 pat = GEN_FCN (icode) (scratch, op0, op1,
3544 gen_rtx (SYMBOL_REF, Pmode, opcode));
3549 /* The vec_any* and vec_all* predicates use the same opcodes for two
3550 different operations, but the bits in CR6 will be different
3551 depending on what information we want. So we have to play tricks
3552 with CR6 to get the right bits out.
3554 If you think this is disgusting, look at the specs for the
3555 AltiVec predicates. */
3557 switch (cr6_form_int)
3560 emit_insn (gen_cr6_test_for_zero (target));
3563 emit_insn (gen_cr6_test_for_zero_reverse (target));
3566 emit_insn (gen_cr6_test_for_lt (target));
3569 emit_insn (gen_cr6_test_for_lt_reverse (target));
3572 error ("argument 1 of __builtin_altivec_predicate is out of range");
3580 altivec_expand_stv_builtin (icode, arglist)
3581 enum insn_code icode;
3584 tree arg0 = TREE_VALUE (arglist);
3585 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3586 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3587 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3588 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3589 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3591 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3592 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3593 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3595 /* Invalid arguments. Bail before doing anything stoopid! */
3596 if (arg0 == error_mark_node
3597 || arg1 == error_mark_node
3598 || arg2 == error_mark_node)
3601 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3602 op0 = copy_to_mode_reg (mode2, op0);
3603 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3604 op1 = copy_to_mode_reg (mode0, op1);
3605 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3606 op2 = copy_to_mode_reg (mode1, op2);
3608 pat = GEN_FCN (icode) (op1, op2, op0);
3615 altivec_expand_ternop_builtin (icode, arglist, target)
3616 enum insn_code icode;
3621 tree arg0 = TREE_VALUE (arglist);
3622 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3623 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3624 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3625 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3626 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3627 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3628 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3629 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3630 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3632 /* If we got invalid arguments bail out before generating bad rtl. */
3633 if (arg0 == error_mark_node
3634 || arg1 == error_mark_node
3635 || arg2 == error_mark_node)
3639 || GET_MODE (target) != tmode
3640 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3641 target = gen_reg_rtx (tmode);
3643 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3644 op0 = copy_to_mode_reg (mode0, op0);
3645 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3646 op1 = copy_to_mode_reg (mode1, op1);
3647 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3648 op2 = copy_to_mode_reg (mode2, op2);
3650 pat = GEN_FCN (icode) (target, op0, op1, op2);
3658 altivec_expand_builtin (exp, target)
3662 struct builtin_description *d;
3663 struct builtin_description_predicates *dp;
3665 enum insn_code icode;
3666 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3667 tree arglist = TREE_OPERAND (exp, 1);
3668 tree arg0, arg1, arg2;
3669 rtx op0, op1, op2, pat;
3670 enum machine_mode tmode, mode0, mode1, mode2;
3671 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3675 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3676 icode = CODE_FOR_altivec_lvx_16qi;
3677 arg0 = TREE_VALUE (arglist);
3678 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3679 tmode = insn_data[icode].operand[0].mode;
3680 mode0 = insn_data[icode].operand[1].mode;
3683 || GET_MODE (target) != tmode
3684 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3685 target = gen_reg_rtx (tmode);
3687 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3688 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3690 pat = GEN_FCN (icode) (target, op0);
3696 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3697 icode = CODE_FOR_altivec_lvx_8hi;
3698 arg0 = TREE_VALUE (arglist);
3699 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3700 tmode = insn_data[icode].operand[0].mode;
3701 mode0 = insn_data[icode].operand[1].mode;
3704 || GET_MODE (target) != tmode
3705 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3706 target = gen_reg_rtx (tmode);
3708 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3709 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3711 pat = GEN_FCN (icode) (target, op0);
3717 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3718 icode = CODE_FOR_altivec_lvx_4si;
3719 arg0 = TREE_VALUE (arglist);
3720 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3721 tmode = insn_data[icode].operand[0].mode;
3722 mode0 = insn_data[icode].operand[1].mode;
3725 || GET_MODE (target) != tmode
3726 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3727 target = gen_reg_rtx (tmode);
3729 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3730 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3732 pat = GEN_FCN (icode) (target, op0);
3738 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3739 icode = CODE_FOR_altivec_lvx_4sf;
3740 arg0 = TREE_VALUE (arglist);
3741 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3742 tmode = insn_data[icode].operand[0].mode;
3743 mode0 = insn_data[icode].operand[1].mode;
3746 || GET_MODE (target) != tmode
3747 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3748 target = gen_reg_rtx (tmode);
3750 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3751 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3753 pat = GEN_FCN (icode) (target, op0);
3759 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3760 icode = CODE_FOR_altivec_stvx_16qi;
3761 arg0 = TREE_VALUE (arglist);
3762 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3763 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3764 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3765 mode0 = insn_data[icode].operand[0].mode;
3766 mode1 = insn_data[icode].operand[1].mode;
3768 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3769 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3770 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3771 op1 = copy_to_mode_reg (mode1, op1);
3773 pat = GEN_FCN (icode) (op0, op1);
3778 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3779 icode = CODE_FOR_altivec_stvx_8hi;
3780 arg0 = TREE_VALUE (arglist);
3781 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3782 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3783 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3784 mode0 = insn_data[icode].operand[0].mode;
3785 mode1 = insn_data[icode].operand[1].mode;
3787 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3788 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3789 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3790 op1 = copy_to_mode_reg (mode1, op1);
3792 pat = GEN_FCN (icode) (op0, op1);
3797 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3798 icode = CODE_FOR_altivec_stvx_4si;
3799 arg0 = TREE_VALUE (arglist);
3800 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3801 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3802 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3803 mode0 = insn_data[icode].operand[0].mode;
3804 mode1 = insn_data[icode].operand[1].mode;
3806 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3807 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3808 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3809 op1 = copy_to_mode_reg (mode1, op1);
3811 pat = GEN_FCN (icode) (op0, op1);
3816 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3817 icode = CODE_FOR_altivec_stvx_4sf;
3818 arg0 = TREE_VALUE (arglist);
3819 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3820 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3821 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3822 mode0 = insn_data[icode].operand[0].mode;
3823 mode1 = insn_data[icode].operand[1].mode;
3825 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3826 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3827 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3828 op1 = copy_to_mode_reg (mode1, op1);
3830 pat = GEN_FCN (icode) (op0, op1);
3835 case ALTIVEC_BUILTIN_STVX:
3836 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3837 case ALTIVEC_BUILTIN_STVEBX:
3838 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3839 case ALTIVEC_BUILTIN_STVEHX:
3840 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3841 case ALTIVEC_BUILTIN_STVEWX:
3842 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3843 case ALTIVEC_BUILTIN_STVXL:
3844 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3846 case ALTIVEC_BUILTIN_MFVSCR:
3847 icode = CODE_FOR_altivec_mfvscr;
3848 tmode = insn_data[icode].operand[0].mode;
3851 || GET_MODE (target) != tmode
3852 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3853 target = gen_reg_rtx (tmode);
3855 pat = GEN_FCN (icode) (target);
3861 case ALTIVEC_BUILTIN_MTVSCR:
3862 icode = CODE_FOR_altivec_mtvscr;
3863 arg0 = TREE_VALUE (arglist);
3864 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3865 mode0 = insn_data[icode].operand[0].mode;
3867 /* If we got invalid arguments bail out before generating bad rtl. */
3868 if (arg0 == error_mark_node)
3871 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3872 op0 = copy_to_mode_reg (mode0, op0);
3874 pat = GEN_FCN (icode) (op0);
3879 case ALTIVEC_BUILTIN_DSSALL:
3880 emit_insn (gen_altivec_dssall ());
3883 case ALTIVEC_BUILTIN_DSS:
3884 icode = CODE_FOR_altivec_dss;
3885 arg0 = TREE_VALUE (arglist);
3886 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3887 mode0 = insn_data[icode].operand[0].mode;
3889 /* If we got invalid arguments bail out before generating bad rtl. */
3890 if (arg0 == error_mark_node)
3893 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3894 op0 = copy_to_mode_reg (mode0, op0);
3896 emit_insn (gen_altivec_dss (op0));
3900 /* Handle DST variants. */
3901 d = (struct builtin_description *) bdesc_dst;
3902 for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
3903 if (d->code == fcode)
3905 arg0 = TREE_VALUE (arglist);
3906 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3907 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3908 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3909 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3910 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3911 mode0 = insn_data[d->icode].operand[0].mode;
3912 mode1 = insn_data[d->icode].operand[1].mode;
3913 mode2 = insn_data[d->icode].operand[2].mode;
3915 /* Invalid arguments, bail out before generating bad rtl. */
3916 if (arg0 == error_mark_node
3917 || arg1 == error_mark_node
3918 || arg2 == error_mark_node)
3921 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
3922 op0 = copy_to_mode_reg (mode0, op0);
3923 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
3924 op1 = copy_to_mode_reg (mode1, op1);
3926 if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
3928 error ("argument 3 of `%s' must be a 2-bit literal", d->name);
3932 pat = GEN_FCN (d->icode) (op0, op1, op2);
3939 /* Handle simple unary operations. */
3940 d = (struct builtin_description *) bdesc_1arg;
3941 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
3942 if (d->code == fcode)
3943 return altivec_expand_unop_builtin (d->icode, arglist, target);
3945 /* Handle simple binary operations. */
3946 d = (struct builtin_description *) bdesc_2arg;
3947 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3948 if (d->code == fcode)
3949 return altivec_expand_binop_builtin (d->icode, arglist, target);
3951 /* Expand the AltiVec predicates. */
3952 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
3953 for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
3954 if (dp->code == fcode)
3955 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
3957 /* LV* are funky. We initialized them differently. */
3960 case ALTIVEC_BUILTIN_LVSL:
3961 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
3963 case ALTIVEC_BUILTIN_LVSR:
3964 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
3966 case ALTIVEC_BUILTIN_LVEBX:
3967 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
3969 case ALTIVEC_BUILTIN_LVEHX:
3970 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
3972 case ALTIVEC_BUILTIN_LVEWX:
3973 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
3975 case ALTIVEC_BUILTIN_LVXL:
3976 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
3978 case ALTIVEC_BUILTIN_LVX:
3979 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
3986 /* Handle simple ternary operations. */
3987 d = (struct builtin_description *) bdesc_3arg;
3988 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
3989 if (d->code == fcode)
3990 return altivec_expand_ternop_builtin (d->icode, arglist, target);
3996 /* Expand an expression EXP that calls a built-in function,
3997 with result going to TARGET if that's convenient
3998 (and in mode MODE if that's convenient).
3999 SUBTARGET may be used as the target for computing one of EXP's operands.
4000 IGNORE is nonzero if the value is to be ignored. */
4003 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4006 rtx subtarget ATTRIBUTE_UNUSED;
4007 enum machine_mode mode ATTRIBUTE_UNUSED;
4008 int ignore ATTRIBUTE_UNUSED;
4011 return altivec_expand_builtin (exp, target);
4017 rs6000_init_builtins ()
4020 altivec_init_builtins ();
4024 altivec_init_builtins (void)
4026 struct builtin_description *d;
4027 struct builtin_description_predicates *dp;
4030 tree endlink = void_list_node;
4032 tree pint_type_node = build_pointer_type (integer_type_node);
4033 tree pvoid_type_node = build_pointer_type (void_type_node);
4034 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4035 tree pchar_type_node = build_pointer_type (char_type_node);
4036 tree pfloat_type_node = build_pointer_type (float_type_node);
4038 tree v4sf_ftype_v4sf_v4sf_v16qi
4039 = build_function_type (V4SF_type_node,
4040 tree_cons (NULL_TREE, V4SF_type_node,
4041 tree_cons (NULL_TREE, V4SF_type_node,
4042 tree_cons (NULL_TREE,
4045 tree v4si_ftype_v4si_v4si_v16qi
4046 = build_function_type (V4SI_type_node,
4047 tree_cons (NULL_TREE, V4SI_type_node,
4048 tree_cons (NULL_TREE, V4SI_type_node,
4049 tree_cons (NULL_TREE,
4052 tree v8hi_ftype_v8hi_v8hi_v16qi
4053 = build_function_type (V8HI_type_node,
4054 tree_cons (NULL_TREE, V8HI_type_node,
4055 tree_cons (NULL_TREE, V8HI_type_node,
4056 tree_cons (NULL_TREE,
4059 tree v16qi_ftype_v16qi_v16qi_v16qi
4060 = build_function_type (V16QI_type_node,
4061 tree_cons (NULL_TREE, V16QI_type_node,
4062 tree_cons (NULL_TREE, V16QI_type_node,
4063 tree_cons (NULL_TREE,
4067 /* V4SI foo (char). */
4068 tree v4si_ftype_char
4069 = build_function_type (V4SI_type_node,
4070 tree_cons (NULL_TREE, char_type_node, endlink));
4072 /* V8HI foo (char). */
4073 tree v8hi_ftype_char
4074 = build_function_type (V8HI_type_node,
4075 tree_cons (NULL_TREE, char_type_node, endlink));
4077 /* V16QI foo (char). */
4078 tree v16qi_ftype_char
4079 = build_function_type (V16QI_type_node,
4080 tree_cons (NULL_TREE, char_type_node, endlink));
4081 /* V4SF foo (V4SF). */
4082 tree v4sf_ftype_v4sf
4083 = build_function_type (V4SF_type_node,
4084 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4086 /* V4SI foo (int *). */
4087 tree v4si_ftype_pint
4088 = build_function_type (V4SI_type_node,
4089 tree_cons (NULL_TREE, pint_type_node, endlink));
4090 /* V8HI foo (short *). */
4091 tree v8hi_ftype_pshort
4092 = build_function_type (V8HI_type_node,
4093 tree_cons (NULL_TREE, pshort_type_node, endlink));
4094 /* V16QI foo (char *). */
4095 tree v16qi_ftype_pchar
4096 = build_function_type (V16QI_type_node,
4097 tree_cons (NULL_TREE, pchar_type_node, endlink));
4098 /* V4SF foo (float *). */
4099 tree v4sf_ftype_pfloat
4100 = build_function_type (V4SF_type_node,
4101 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4103 /* V8HI foo (V16QI). */
4104 tree v8hi_ftype_v16qi
4105 = build_function_type (V8HI_type_node,
4106 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4108 /* void foo (void *, int, char/literal). */
4109 tree void_ftype_pvoid_int_char
4110 = build_function_type (void_type_node,
4111 tree_cons (NULL_TREE, pvoid_type_node,
4112 tree_cons (NULL_TREE, integer_type_node,
4113 tree_cons (NULL_TREE,
4117 /* void foo (int *, V4SI). */
4118 tree void_ftype_pint_v4si
4119 = build_function_type (void_type_node,
4120 tree_cons (NULL_TREE, pint_type_node,
4121 tree_cons (NULL_TREE, V4SI_type_node,
4123 /* void foo (short *, V8HI). */
4124 tree void_ftype_pshort_v8hi
4125 = build_function_type (void_type_node,
4126 tree_cons (NULL_TREE, pshort_type_node,
4127 tree_cons (NULL_TREE, V8HI_type_node,
4129 /* void foo (char *, V16QI). */
4130 tree void_ftype_pchar_v16qi
4131 = build_function_type (void_type_node,
4132 tree_cons (NULL_TREE, pchar_type_node,
4133 tree_cons (NULL_TREE, V16QI_type_node,
4135 /* void foo (float *, V4SF). */
4136 tree void_ftype_pfloat_v4sf
4137 = build_function_type (void_type_node,
4138 tree_cons (NULL_TREE, pfloat_type_node,
4139 tree_cons (NULL_TREE, V4SF_type_node,
4142 /* void foo (V4SI). */
4143 tree void_ftype_v4si
4144 = build_function_type (void_type_node,
4145 tree_cons (NULL_TREE, V4SI_type_node,
4148 /* void foo (vint, int, void *). */
4149 tree void_ftype_v4si_int_pvoid
4150 = build_function_type (void_type_node,
4151 tree_cons (NULL_TREE, V4SI_type_node,
4152 tree_cons (NULL_TREE, integer_type_node,
4153 tree_cons (NULL_TREE,
4157 /* void foo (vchar, int, void *). */
4158 tree void_ftype_v16qi_int_pvoid
4159 = build_function_type (void_type_node,
4160 tree_cons (NULL_TREE, V16QI_type_node,
4161 tree_cons (NULL_TREE, integer_type_node,
4162 tree_cons (NULL_TREE,
4166 /* void foo (vshort, int, void *). */
4167 tree void_ftype_v8hi_int_pvoid
4168 = build_function_type (void_type_node,
4169 tree_cons (NULL_TREE, V8HI_type_node,
4170 tree_cons (NULL_TREE, integer_type_node,
4171 tree_cons (NULL_TREE,
4175 /* void foo (char). */
4177 = build_function_type (void_type_node,
4178 tree_cons (NULL_TREE, char_type_node,
4181 /* void foo (void). */
4182 tree void_ftype_void
4183 = build_function_type (void_type_node, void_list_node);
4185 /* vshort foo (void). */
4186 tree v8hi_ftype_void
4187 = build_function_type (V8HI_type_node, void_list_node);
4189 tree v4si_ftype_v4si_v4si
4190 = build_function_type (V4SI_type_node,
4191 tree_cons (NULL_TREE, V4SI_type_node,
4192 tree_cons (NULL_TREE, V4SI_type_node,
4195 /* These are for the unsigned 5 bit literals. */
4197 tree v4sf_ftype_v4si_char
4198 = build_function_type (V4SF_type_node,
4199 tree_cons (NULL_TREE, V4SI_type_node,
4200 tree_cons (NULL_TREE, char_type_node,
4202 tree v4si_ftype_v4sf_char
4203 = build_function_type (V4SI_type_node,
4204 tree_cons (NULL_TREE, V4SF_type_node,
4205 tree_cons (NULL_TREE, char_type_node,
4207 tree v4si_ftype_v4si_char
4208 = build_function_type (V4SI_type_node,
4209 tree_cons (NULL_TREE, V4SI_type_node,
4210 tree_cons (NULL_TREE, char_type_node,
4212 tree v8hi_ftype_v8hi_char
4213 = build_function_type (V8HI_type_node,
4214 tree_cons (NULL_TREE, V8HI_type_node,
4215 tree_cons (NULL_TREE, char_type_node,
4217 tree v16qi_ftype_v16qi_char
4218 = build_function_type (V16QI_type_node,
4219 tree_cons (NULL_TREE, V16QI_type_node,
4220 tree_cons (NULL_TREE, char_type_node,
4223 /* These are for the unsigned 4 bit literals. */
4225 tree v16qi_ftype_v16qi_v16qi_char
4226 = build_function_type (V16QI_type_node,
4227 tree_cons (NULL_TREE, V16QI_type_node,
4228 tree_cons (NULL_TREE, V16QI_type_node,
4229 tree_cons (NULL_TREE,
4233 tree v8hi_ftype_v8hi_v8hi_char
4234 = build_function_type (V8HI_type_node,
4235 tree_cons (NULL_TREE, V8HI_type_node,
4236 tree_cons (NULL_TREE, V8HI_type_node,
4237 tree_cons (NULL_TREE,
4241 tree v4si_ftype_v4si_v4si_char
4242 = build_function_type (V4SI_type_node,
4243 tree_cons (NULL_TREE, V4SI_type_node,
4244 tree_cons (NULL_TREE, V4SI_type_node,
4245 tree_cons (NULL_TREE,
4249 tree v4sf_ftype_v4sf_v4sf_char
4250 = build_function_type (V4SF_type_node,
4251 tree_cons (NULL_TREE, V4SF_type_node,
4252 tree_cons (NULL_TREE, V4SF_type_node,
4253 tree_cons (NULL_TREE,
4257 /* End of 4 bit literals. */
4259 tree v4sf_ftype_v4sf_v4sf
4260 = build_function_type (V4SF_type_node,
4261 tree_cons (NULL_TREE, V4SF_type_node,
4262 tree_cons (NULL_TREE, V4SF_type_node,
4264 tree v4sf_ftype_v4sf_v4sf_v4si
4265 = build_function_type (V4SF_type_node,
4266 tree_cons (NULL_TREE, V4SF_type_node,
4267 tree_cons (NULL_TREE, V4SF_type_node,
4268 tree_cons (NULL_TREE,
4271 tree v4sf_ftype_v4sf_v4sf_v4sf
4272 = build_function_type (V4SF_type_node,
4273 tree_cons (NULL_TREE, V4SF_type_node,
4274 tree_cons (NULL_TREE, V4SF_type_node,
4275 tree_cons (NULL_TREE,
4278 tree v4si_ftype_v4si_v4si_v4si
4279 = build_function_type (V4SI_type_node,
4280 tree_cons (NULL_TREE, V4SI_type_node,
4281 tree_cons (NULL_TREE, V4SI_type_node,
4282 tree_cons (NULL_TREE,
4286 tree v8hi_ftype_v8hi_v8hi
4287 = build_function_type (V8HI_type_node,
4288 tree_cons (NULL_TREE, V8HI_type_node,
4289 tree_cons (NULL_TREE, V8HI_type_node,
4291 tree v8hi_ftype_v8hi_v8hi_v8hi
4292 = build_function_type (V8HI_type_node,
4293 tree_cons (NULL_TREE, V8HI_type_node,
4294 tree_cons (NULL_TREE, V8HI_type_node,
4295 tree_cons (NULL_TREE,
4298 tree v4si_ftype_v8hi_v8hi_v4si
4299 = build_function_type (V4SI_type_node,
4300 tree_cons (NULL_TREE, V8HI_type_node,
4301 tree_cons (NULL_TREE, V8HI_type_node,
4302 tree_cons (NULL_TREE,
4305 tree v4si_ftype_v16qi_v16qi_v4si
4306 = build_function_type (V4SI_type_node,
4307 tree_cons (NULL_TREE, V16QI_type_node,
4308 tree_cons (NULL_TREE, V16QI_type_node,
4309 tree_cons (NULL_TREE,
4313 tree v16qi_ftype_v16qi_v16qi
4314 = build_function_type (V16QI_type_node,
4315 tree_cons (NULL_TREE, V16QI_type_node,
4316 tree_cons (NULL_TREE, V16QI_type_node,
4319 tree v4si_ftype_v4sf_v4sf
4320 = build_function_type (V4SI_type_node,
4321 tree_cons (NULL_TREE, V4SF_type_node,
4322 tree_cons (NULL_TREE, V4SF_type_node,
4325 tree v8hi_ftype_v16qi_v16qi
4326 = build_function_type (V8HI_type_node,
4327 tree_cons (NULL_TREE, V16QI_type_node,
4328 tree_cons (NULL_TREE, V16QI_type_node,
4331 tree v4si_ftype_v8hi_v8hi
4332 = build_function_type (V4SI_type_node,
4333 tree_cons (NULL_TREE, V8HI_type_node,
4334 tree_cons (NULL_TREE, V8HI_type_node,
4337 tree v8hi_ftype_v4si_v4si
4338 = build_function_type (V8HI_type_node,
4339 tree_cons (NULL_TREE, V4SI_type_node,
4340 tree_cons (NULL_TREE, V4SI_type_node,
4343 tree v16qi_ftype_v8hi_v8hi
4344 = build_function_type (V16QI_type_node,
4345 tree_cons (NULL_TREE, V8HI_type_node,
4346 tree_cons (NULL_TREE, V8HI_type_node,
4349 tree v4si_ftype_v16qi_v4si
4350 = build_function_type (V4SI_type_node,
4351 tree_cons (NULL_TREE, V16QI_type_node,
4352 tree_cons (NULL_TREE, V4SI_type_node,
4355 tree v4si_ftype_v16qi_v16qi
4356 = build_function_type (V4SI_type_node,
4357 tree_cons (NULL_TREE, V16QI_type_node,
4358 tree_cons (NULL_TREE, V16QI_type_node,
4361 tree v4si_ftype_v8hi_v4si
4362 = build_function_type (V4SI_type_node,
4363 tree_cons (NULL_TREE, V8HI_type_node,
4364 tree_cons (NULL_TREE, V4SI_type_node,
4367 tree v4si_ftype_v8hi
4368 = build_function_type (V4SI_type_node,
4369 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4371 tree int_ftype_v4si_v4si
4372 = build_function_type (integer_type_node,
4373 tree_cons (NULL_TREE, V4SI_type_node,
4374 tree_cons (NULL_TREE, V4SI_type_node,
4377 tree int_ftype_v4sf_v4sf
4378 = build_function_type (integer_type_node,
4379 tree_cons (NULL_TREE, V4SF_type_node,
4380 tree_cons (NULL_TREE, V4SF_type_node,
4383 tree int_ftype_v16qi_v16qi
4384 = build_function_type (integer_type_node,
4385 tree_cons (NULL_TREE, V16QI_type_node,
4386 tree_cons (NULL_TREE, V16QI_type_node,
4389 tree int_ftype_int_v4si_v4si
4390 = build_function_type
4392 tree_cons (NULL_TREE, integer_type_node,
4393 tree_cons (NULL_TREE, V4SI_type_node,
4394 tree_cons (NULL_TREE, V4SI_type_node,
4397 tree int_ftype_int_v4sf_v4sf
4398 = build_function_type
4400 tree_cons (NULL_TREE, integer_type_node,
4401 tree_cons (NULL_TREE, V4SF_type_node,
4402 tree_cons (NULL_TREE, V4SF_type_node,
4405 tree int_ftype_int_v8hi_v8hi
4406 = build_function_type
4408 tree_cons (NULL_TREE, integer_type_node,
4409 tree_cons (NULL_TREE, V8HI_type_node,
4410 tree_cons (NULL_TREE, V8HI_type_node,
4413 tree int_ftype_int_v16qi_v16qi
4414 = build_function_type
4416 tree_cons (NULL_TREE, integer_type_node,
4417 tree_cons (NULL_TREE, V16QI_type_node,
4418 tree_cons (NULL_TREE, V16QI_type_node,
4421 tree v16qi_ftype_int_pvoid
4422 = build_function_type (V16QI_type_node,
4423 tree_cons (NULL_TREE, integer_type_node,
4424 tree_cons (NULL_TREE, pvoid_type_node,
4427 tree v4si_ftype_int_pvoid
4428 = build_function_type (V4SI_type_node,
4429 tree_cons (NULL_TREE, integer_type_node,
4430 tree_cons (NULL_TREE, pvoid_type_node,
4433 tree v8hi_ftype_int_pvoid
4434 = build_function_type (V8HI_type_node,
4435 tree_cons (NULL_TREE, integer_type_node,
4436 tree_cons (NULL_TREE, pvoid_type_node,
4439 tree int_ftype_v8hi_v8hi
4440 = build_function_type (integer_type_node,
4441 tree_cons (NULL_TREE, V8HI_type_node,
4442 tree_cons (NULL_TREE, V8HI_type_node,
4445 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4446 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4447 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4448 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4449 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4450 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4451 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4452 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4453 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4454 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4455 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4456 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4457 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4458 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4459 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4460 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4461 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4462 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4463 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4464 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4465 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4466 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4467 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4468 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4470 /* Add the simple ternary operators. */
4471 d = (struct builtin_description *) bdesc_3arg;
4472 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4475 enum machine_mode mode0, mode1, mode2, mode3;
4481 mode0 = insn_data[d->icode].operand[0].mode;
4482 mode1 = insn_data[d->icode].operand[1].mode;
4483 mode2 = insn_data[d->icode].operand[2].mode;
4484 mode3 = insn_data[d->icode].operand[3].mode;
4486 /* When all four are of the same mode. */
4487 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4492 type = v4si_ftype_v4si_v4si_v4si;
4495 type = v4sf_ftype_v4sf_v4sf_v4sf;
4498 type = v8hi_ftype_v8hi_v8hi_v8hi;
4501 type = v16qi_ftype_v16qi_v16qi_v16qi;
4507 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4512 type = v4si_ftype_v4si_v4si_v16qi;
4515 type = v4sf_ftype_v4sf_v4sf_v16qi;
4518 type = v8hi_ftype_v8hi_v8hi_v16qi;
4521 type = v16qi_ftype_v16qi_v16qi_v16qi;
4527 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4528 && mode3 == V4SImode)
4529 type = v4si_ftype_v16qi_v16qi_v4si;
4530 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4531 && mode3 == V4SImode)
4532 type = v4si_ftype_v8hi_v8hi_v4si;
4533 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4534 && mode3 == V4SImode)
4535 type = v4sf_ftype_v4sf_v4sf_v4si;
4537 /* vchar, vchar, vchar, 4 bit literal. */
4538 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4540 type = v16qi_ftype_v16qi_v16qi_char;
4542 /* vshort, vshort, vshort, 4 bit literal. */
4543 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4545 type = v8hi_ftype_v8hi_v8hi_char;
4547 /* vint, vint, vint, 4 bit literal. */
4548 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4550 type = v4si_ftype_v4si_v4si_char;
4552 /* vfloat, vfloat, vfloat, 4 bit literal. */
4553 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4555 type = v4sf_ftype_v4sf_v4sf_char;
4560 def_builtin (d->mask, d->name, type, d->code);
4563 /* Add the DST variants. */
4564 d = (struct builtin_description *) bdesc_dst;
4565 for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4566 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4568 /* Initialize the predicates. */
4569 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4570 for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4572 enum machine_mode mode1;
4575 mode1 = insn_data[dp->icode].operand[1].mode;
4580 type = int_ftype_int_v4si_v4si;
4583 type = int_ftype_int_v8hi_v8hi;
4586 type = int_ftype_int_v16qi_v16qi;
4589 type = int_ftype_int_v4sf_v4sf;
4595 def_builtin (dp->mask, dp->name, type, dp->code);
4598 /* Add the simple binary operators. */
4599 d = (struct builtin_description *) bdesc_2arg;
4600 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4602 enum machine_mode mode0, mode1, mode2;
4608 mode0 = insn_data[d->icode].operand[0].mode;
4609 mode1 = insn_data[d->icode].operand[1].mode;
4610 mode2 = insn_data[d->icode].operand[2].mode;
4612 /* When all three operands are of the same mode. */
4613 if (mode0 == mode1 && mode1 == mode2)
4618 type = v4sf_ftype_v4sf_v4sf;
4621 type = v4si_ftype_v4si_v4si;
4624 type = v16qi_ftype_v16qi_v16qi;
4627 type = v8hi_ftype_v8hi_v8hi;
4634 /* A few other combos we really don't want to do manually. */
4636 /* vint, vfloat, vfloat. */
4637 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4638 type = v4si_ftype_v4sf_v4sf;
4640 /* vshort, vchar, vchar. */
4641 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4642 type = v8hi_ftype_v16qi_v16qi;
4644 /* vint, vshort, vshort. */
4645 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4646 type = v4si_ftype_v8hi_v8hi;
4648 /* vshort, vint, vint. */
4649 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4650 type = v8hi_ftype_v4si_v4si;
4652 /* vchar, vshort, vshort. */
4653 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4654 type = v16qi_ftype_v8hi_v8hi;
4656 /* vint, vchar, vint. */
4657 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4658 type = v4si_ftype_v16qi_v4si;
4660 /* vint, vchar, vchar. */
4661 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4662 type = v4si_ftype_v16qi_v16qi;
4664 /* vint, vshort, vint. */
4665 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4666 type = v4si_ftype_v8hi_v4si;
4668 /* vint, vint, 5 bit literal. */
4669 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4670 type = v4si_ftype_v4si_char;
4672 /* vshort, vshort, 5 bit literal. */
4673 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4674 type = v8hi_ftype_v8hi_char;
4676 /* vchar, vchar, 5 bit literal. */
4677 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4678 type = v16qi_ftype_v16qi_char;
4680 /* vfloat, vint, 5 bit literal. */
4681 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4682 type = v4sf_ftype_v4si_char;
4684 /* vint, vfloat, 5 bit literal. */
4685 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4686 type = v4si_ftype_v4sf_char;
4689 else if (mode0 == SImode)
4694 type = int_ftype_v4si_v4si;
4697 type = int_ftype_v4sf_v4sf;
4700 type = int_ftype_v16qi_v16qi;
4703 type = int_ftype_v8hi_v8hi;
4713 def_builtin (d->mask, d->name, type, d->code);
4716 /* Add the simple unary operators. */
4717 d = (struct builtin_description *) bdesc_1arg;
4718 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4720 enum machine_mode mode0, mode1;
4726 mode0 = insn_data[d->icode].operand[0].mode;
4727 mode1 = insn_data[d->icode].operand[1].mode;
4729 if (mode0 == V4SImode && mode1 == QImode)
4730 type = v4si_ftype_char;
4731 else if (mode0 == V8HImode && mode1 == QImode)
4732 type = v8hi_ftype_char;
4733 else if (mode0 == V16QImode && mode1 == QImode)
4734 type = v16qi_ftype_char;
4735 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4736 type = v4sf_ftype_v4sf;
4737 else if (mode0 == V8HImode && mode1 == V16QImode)
4738 type = v8hi_ftype_v16qi;
4739 else if (mode0 == V4SImode && mode1 == V8HImode)
4740 type = v4si_ftype_v8hi;
4744 def_builtin (d->mask, d->name, type, d->code);
4749 /* Generate a memory reference for expand_block_move, copying volatile,
4750 and other bits from an original memory reference. */
4753 expand_block_move_mem (mode, addr, orig_mem)
4754 enum machine_mode mode;
4758 rtx mem = gen_rtx_MEM (mode, addr);
4760 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4764 /* Expand a block move operation, and return 1 if successful. Return 0
4765 if we should let the compiler generate normal code.
4767 operands[0] is the destination
4768 operands[1] is the source
4769 operands[2] is the length
4770 operands[3] is the alignment */
4772 #define MAX_MOVE_REG 4
4775 expand_block_move (operands)
4778 rtx orig_dest = operands[0];
4779 rtx orig_src = operands[1];
4780 rtx bytes_rtx = operands[2];
4781 rtx align_rtx = operands[3];
4782 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4793 rtx stores[MAX_MOVE_REG];
4796 /* If this is not a fixed size move, just call memcpy */
4800 /* If this is not a fixed size alignment, abort */
4801 if (GET_CODE (align_rtx) != CONST_INT)
4803 align = INTVAL (align_rtx);
4805 /* Anything to move? */
4806 bytes = INTVAL (bytes_rtx);
4810 /* store_one_arg depends on expand_block_move to handle at least the size of
4811 reg_parm_stack_space. */
4812 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4815 /* Move the address into scratch registers. */
4816 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4817 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4819 if (TARGET_STRING) /* string instructions are available */
4821 for ( ; bytes > 0; bytes -= move_bytes)
4823 if (bytes > 24 /* move up to 32 bytes at a time */
4831 && ! fixed_regs[12])
4833 move_bytes = (bytes > 32) ? 32 : bytes;
4834 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4837 expand_block_move_mem (BLKmode,
4840 GEN_INT ((move_bytes == 32)
4844 else if (bytes > 16 /* move up to 24 bytes at a time */
4850 && ! fixed_regs[10])
4852 move_bytes = (bytes > 24) ? 24 : bytes;
4853 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4856 expand_block_move_mem (BLKmode,
4859 GEN_INT (move_bytes),
4862 else if (bytes > 8 /* move up to 16 bytes at a time */
4868 move_bytes = (bytes > 16) ? 16 : bytes;
4869 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
4872 expand_block_move_mem (BLKmode,
4875 GEN_INT (move_bytes),
4878 else if (bytes >= 8 && TARGET_POWERPC64
4879 /* 64-bit loads and stores require word-aligned
4881 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4884 tmp_reg = gen_reg_rtx (DImode);
4885 emit_move_insn (tmp_reg,
4886 expand_block_move_mem (DImode,
4887 src_reg, orig_src));
4888 emit_move_insn (expand_block_move_mem (DImode,
4889 dest_reg, orig_dest),
4892 else if (bytes > 4 && !TARGET_POWERPC64)
4893 { /* move up to 8 bytes at a time */
4894 move_bytes = (bytes > 8) ? 8 : bytes;
4895 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
4898 expand_block_move_mem (BLKmode,
4901 GEN_INT (move_bytes),
4904 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4905 { /* move 4 bytes */
4907 tmp_reg = gen_reg_rtx (SImode);
4908 emit_move_insn (tmp_reg,
4909 expand_block_move_mem (SImode,
4910 src_reg, orig_src));
4911 emit_move_insn (expand_block_move_mem (SImode,
4912 dest_reg, orig_dest),
4915 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4916 { /* move 2 bytes */
4918 tmp_reg = gen_reg_rtx (HImode);
4919 emit_move_insn (tmp_reg,
4920 expand_block_move_mem (HImode,
4921 src_reg, orig_src));
4922 emit_move_insn (expand_block_move_mem (HImode,
4923 dest_reg, orig_dest),
4926 else if (bytes == 1) /* move 1 byte */
4929 tmp_reg = gen_reg_rtx (QImode);
4930 emit_move_insn (tmp_reg,
4931 expand_block_move_mem (QImode,
4932 src_reg, orig_src));
4933 emit_move_insn (expand_block_move_mem (QImode,
4934 dest_reg, orig_dest),
4938 { /* move up to 4 bytes at a time */
4939 move_bytes = (bytes > 4) ? 4 : bytes;
4940 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
4943 expand_block_move_mem (BLKmode,
4946 GEN_INT (move_bytes),
4950 if (bytes > move_bytes)
4952 if (! TARGET_POWERPC64)
4954 emit_insn (gen_addsi3 (src_reg, src_reg,
4955 GEN_INT (move_bytes)));
4956 emit_insn (gen_addsi3 (dest_reg, dest_reg,
4957 GEN_INT (move_bytes)));
4961 emit_insn (gen_adddi3 (src_reg, src_reg,
4962 GEN_INT (move_bytes)));
4963 emit_insn (gen_adddi3 (dest_reg, dest_reg,
4964 GEN_INT (move_bytes)));
4970 else /* string instructions not available */
4972 num_reg = offset = 0;
4973 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
4975 /* Calculate the correct offset for src/dest */
4979 dest_addr = dest_reg;
4983 src_addr = plus_constant (src_reg, offset);
4984 dest_addr = plus_constant (dest_reg, offset);
4987 /* Generate the appropriate load and store, saving the stores
4989 if (bytes >= 8 && TARGET_POWERPC64
4990 /* 64-bit loads and stores require word-aligned
4992 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4995 tmp_reg = gen_reg_rtx (DImode);
4996 emit_insn (gen_movdi (tmp_reg,
4997 expand_block_move_mem (DImode,
5000 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5005 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5008 tmp_reg = gen_reg_rtx (SImode);
5009 emit_insn (gen_movsi (tmp_reg,
5010 expand_block_move_mem (SImode,
5013 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5018 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5021 tmp_reg = gen_reg_rtx (HImode);
5022 emit_insn (gen_movhi (tmp_reg,
5023 expand_block_move_mem (HImode,
5026 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5034 tmp_reg = gen_reg_rtx (QImode);
5035 emit_insn (gen_movqi (tmp_reg,
5036 expand_block_move_mem (QImode,
5039 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5045 if (num_reg >= MAX_MOVE_REG)
5047 for (i = 0; i < num_reg; i++)
5048 emit_insn (stores[i]);
5053 for (i = 0; i < num_reg; i++)
5054 emit_insn (stores[i]);
5061 /* Return 1 if OP is a load multiple operation. It is known to be a
5062 PARALLEL and the first section will be tested. */
5065 load_multiple_operation (op, mode)
5067 enum machine_mode mode ATTRIBUTE_UNUSED;
5069 int count = XVECLEN (op, 0);
5070 unsigned int dest_regno;
5074 /* Perform a quick check so we don't blow up below. */
5076 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5077 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5078 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5081 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5082 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5084 for (i = 1; i < count; i++)
5086 rtx elt = XVECEXP (op, 0, i);
5088 if (GET_CODE (elt) != SET
5089 || GET_CODE (SET_DEST (elt)) != REG
5090 || GET_MODE (SET_DEST (elt)) != SImode
5091 || REGNO (SET_DEST (elt)) != dest_regno + i
5092 || GET_CODE (SET_SRC (elt)) != MEM
5093 || GET_MODE (SET_SRC (elt)) != SImode
5094 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5095 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5096 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5097 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5104 /* Similar, but tests for store multiple. Here, the second vector element
5105 is a CLOBBER. It will be tested later. */
5108 store_multiple_operation (op, mode)
5110 enum machine_mode mode ATTRIBUTE_UNUSED;
5112 int count = XVECLEN (op, 0) - 1;
5113 unsigned int src_regno;
5117 /* Perform a quick check so we don't blow up below. */
5119 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5120 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5121 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5124 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5125 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5127 for (i = 1; i < count; i++)
5129 rtx elt = XVECEXP (op, 0, i + 1);
5131 if (GET_CODE (elt) != SET
5132 || GET_CODE (SET_SRC (elt)) != REG
5133 || GET_MODE (SET_SRC (elt)) != SImode
5134 || REGNO (SET_SRC (elt)) != src_regno + i
5135 || GET_CODE (SET_DEST (elt)) != MEM
5136 || GET_MODE (SET_DEST (elt)) != SImode
5137 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5138 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5139 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5140 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5147 /* Return 1 for a parallel vrsave operation. */
5150 vrsave_operation (op, mode)
5152 enum machine_mode mode ATTRIBUTE_UNUSED;
5154 int count = XVECLEN (op, 0);
5155 unsigned int dest_regno, src_regno;
5159 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5160 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5161 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5164 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5165 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5167 if (dest_regno != VRSAVE_REGNO
5168 && src_regno != VRSAVE_REGNO)
5171 for (i = 1; i < count; i++)
5173 rtx elt = XVECEXP (op, 0, i);
5175 if (GET_CODE (elt) != CLOBBER
5176 && GET_CODE (elt) != SET)
5183 /* Return 1 for an PARALLEL suitable for mtcrf. */
5186 mtcrf_operation (op, mode)
5188 enum machine_mode mode ATTRIBUTE_UNUSED;
5190 int count = XVECLEN (op, 0);
5194 /* Perform a quick check so we don't blow up below. */
5196 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5197 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5198 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5200 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5202 if (GET_CODE (src_reg) != REG
5203 || GET_MODE (src_reg) != SImode
5204 || ! INT_REGNO_P (REGNO (src_reg)))
5207 for (i = 0; i < count; i++)
5209 rtx exp = XVECEXP (op, 0, i);
5213 if (GET_CODE (exp) != SET
5214 || GET_CODE (SET_DEST (exp)) != REG
5215 || GET_MODE (SET_DEST (exp)) != CCmode
5216 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5218 unspec = SET_SRC (exp);
5219 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5221 if (GET_CODE (unspec) != UNSPEC
5222 || XINT (unspec, 1) != 20
5223 || XVECLEN (unspec, 0) != 2
5224 || XVECEXP (unspec, 0, 0) != src_reg
5225 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5226 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5232 /* Return 1 for an PARALLEL suitable for lmw. */
5235 lmw_operation (op, mode)
5237 enum machine_mode mode ATTRIBUTE_UNUSED;
5239 int count = XVECLEN (op, 0);
5240 unsigned int dest_regno;
5242 unsigned int base_regno;
5243 HOST_WIDE_INT offset;
5246 /* Perform a quick check so we don't blow up below. */
5248 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5249 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5250 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5253 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5254 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5257 || count != 32 - (int) dest_regno)
5260 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5263 base_regno = REGNO (src_addr);
5264 if (base_regno == 0)
5267 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5269 offset = INTVAL (XEXP (src_addr, 1));
5270 base_regno = REGNO (XEXP (src_addr, 0));
5275 for (i = 0; i < count; i++)
5277 rtx elt = XVECEXP (op, 0, i);
5280 HOST_WIDE_INT newoffset;
5282 if (GET_CODE (elt) != SET
5283 || GET_CODE (SET_DEST (elt)) != REG
5284 || GET_MODE (SET_DEST (elt)) != SImode
5285 || REGNO (SET_DEST (elt)) != dest_regno + i
5286 || GET_CODE (SET_SRC (elt)) != MEM
5287 || GET_MODE (SET_SRC (elt)) != SImode)
5289 newaddr = XEXP (SET_SRC (elt), 0);
5290 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5295 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5297 addr_reg = XEXP (newaddr, 0);
5298 newoffset = INTVAL (XEXP (newaddr, 1));
5302 if (REGNO (addr_reg) != base_regno
5303 || newoffset != offset + 4 * i)
5310 /* Return 1 for an PARALLEL suitable for stmw. */
5313 stmw_operation (op, mode)
5315 enum machine_mode mode ATTRIBUTE_UNUSED;
5317 int count = XVECLEN (op, 0);
5318 unsigned int src_regno;
5320 unsigned int base_regno;
5321 HOST_WIDE_INT offset;
5324 /* Perform a quick check so we don't blow up below. */
5326 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5327 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5328 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5331 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5332 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5335 || count != 32 - (int) src_regno)
5338 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5341 base_regno = REGNO (dest_addr);
5342 if (base_regno == 0)
5345 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5347 offset = INTVAL (XEXP (dest_addr, 1));
5348 base_regno = REGNO (XEXP (dest_addr, 0));
5353 for (i = 0; i < count; i++)
5355 rtx elt = XVECEXP (op, 0, i);
5358 HOST_WIDE_INT newoffset;
5360 if (GET_CODE (elt) != SET
5361 || GET_CODE (SET_SRC (elt)) != REG
5362 || GET_MODE (SET_SRC (elt)) != SImode
5363 || REGNO (SET_SRC (elt)) != src_regno + i
5364 || GET_CODE (SET_DEST (elt)) != MEM
5365 || GET_MODE (SET_DEST (elt)) != SImode)
5367 newaddr = XEXP (SET_DEST (elt), 0);
5368 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5373 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5375 addr_reg = XEXP (newaddr, 0);
5376 newoffset = INTVAL (XEXP (newaddr, 1));
5380 if (REGNO (addr_reg) != base_regno
5381 || newoffset != offset + 4 * i)
5388 /* A validation routine: say whether CODE, a condition code, and MODE
5389 match. The other alternatives either don't make sense or should
5390 never be generated. */
5393 validate_condition_mode (code, mode)
5395 enum machine_mode mode;
5397 if (GET_RTX_CLASS (code) != '<'
5398 || GET_MODE_CLASS (mode) != MODE_CC)
5401 /* These don't make sense. */
5402 if ((code == GT || code == LT || code == GE || code == LE)
5403 && mode == CCUNSmode)
5406 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5407 && mode != CCUNSmode)
5410 if (mode != CCFPmode
5411 && (code == ORDERED || code == UNORDERED
5412 || code == UNEQ || code == LTGT
5413 || code == UNGT || code == UNLT
5414 || code == UNGE || code == UNLE))
5417 /* These should never be generated except for
5418 flag_unsafe_math_optimizations. */
5419 if (mode == CCFPmode
5420 && ! flag_unsafe_math_optimizations
5421 && (code == LE || code == GE
5422 || code == UNEQ || code == LTGT
5423 || code == UNGT || code == UNLT))
5426 /* These are invalid; the information is not there. */
5427 if (mode == CCEQmode
5428 && code != EQ && code != NE)
5432 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5433 We only check the opcode against the mode of the CC value here. */
5436 branch_comparison_operator (op, mode)
5438 enum machine_mode mode ATTRIBUTE_UNUSED;
5440 enum rtx_code code = GET_CODE (op);
5441 enum machine_mode cc_mode;
5443 if (GET_RTX_CLASS (code) != '<')
5446 cc_mode = GET_MODE (XEXP (op, 0));
5447 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5450 validate_condition_mode (code, cc_mode);
5455 /* Return 1 if OP is a comparison operation that is valid for a branch
5456 insn and which is true if the corresponding bit in the CC register
5460 branch_positive_comparison_operator (op, mode)
5462 enum machine_mode mode;
5466 if (! branch_comparison_operator (op, mode))
5469 code = GET_CODE (op);
5470 return (code == EQ || code == LT || code == GT
5471 || code == LTU || code == GTU
5472 || code == UNORDERED);
5475 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5476 We check the opcode against the mode of the CC value and disallow EQ or
5477 NE comparisons for integers. */
5480 scc_comparison_operator (op, mode)
5482 enum machine_mode mode;
5484 enum rtx_code code = GET_CODE (op);
5485 enum machine_mode cc_mode;
5487 if (GET_MODE (op) != mode && mode != VOIDmode)
5490 if (GET_RTX_CLASS (code) != '<')
5493 cc_mode = GET_MODE (XEXP (op, 0));
5494 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5497 validate_condition_mode (code, cc_mode);
5499 if (code == NE && cc_mode != CCFPmode)
5506 trap_comparison_operator (op, mode)
5508 enum machine_mode mode;
5510 if (mode != VOIDmode && mode != GET_MODE (op))
5512 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5516 boolean_operator (op, mode)
5518 enum machine_mode mode ATTRIBUTE_UNUSED;
5520 enum rtx_code code = GET_CODE (op);
5521 return (code == AND || code == IOR || code == XOR);
5525 boolean_or_operator (op, mode)
5527 enum machine_mode mode ATTRIBUTE_UNUSED;
5529 enum rtx_code code = GET_CODE (op);
5530 return (code == IOR || code == XOR);
5534 min_max_operator (op, mode)
5536 enum machine_mode mode ATTRIBUTE_UNUSED;
5538 enum rtx_code code = GET_CODE (op);
5539 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5542 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5543 mask required to convert the result of a rotate insn into a shift
5544 left insn of SHIFTOP bits. Both are known to be CONST_INT. */
5547 includes_lshift_p (shiftop, andop)
5551 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5553 shift_mask <<= INTVAL (shiftop);
5555 return (INTVAL (andop) & ~shift_mask) == 0;
5558 /* Similar, but for right shift. */
5561 includes_rshift_p (shiftop, andop)
5565 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5567 shift_mask >>= INTVAL (shiftop);
5569 return (INTVAL (andop) & ~shift_mask) == 0;
5572 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5573 to perform a left shift. It must have exactly SHIFTOP least
5574 signifigant 0's, then one or more 1's, then zero or more 0's. */
5577 includes_rldic_lshift_p (shiftop, andop)
5581 if (GET_CODE (andop) == CONST_INT)
5583 HOST_WIDE_INT c, lsb, shift_mask;
5586 if (c == 0 || c == ~0)
5590 shift_mask <<= INTVAL (shiftop);
5592 /* Find the least signifigant one bit. */
5595 /* It must coincide with the LSB of the shift mask. */
5596 if (-lsb != shift_mask)
5599 /* Invert to look for the next transition (if any). */
5602 /* Remove the low group of ones (originally low group of zeros). */
5605 /* Again find the lsb, and check we have all 1's above. */
5609 else if (GET_CODE (andop) == CONST_DOUBLE
5610 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5612 HOST_WIDE_INT low, high, lsb;
5613 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5615 low = CONST_DOUBLE_LOW (andop);
5616 if (HOST_BITS_PER_WIDE_INT < 64)
5617 high = CONST_DOUBLE_HIGH (andop);
5619 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5620 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5623 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5625 shift_mask_high = ~0;
5626 if (INTVAL (shiftop) > 32)
5627 shift_mask_high <<= INTVAL (shiftop) - 32;
5631 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5638 return high == -lsb;
5641 shift_mask_low = ~0;
5642 shift_mask_low <<= INTVAL (shiftop);
5646 if (-lsb != shift_mask_low)
5649 if (HOST_BITS_PER_WIDE_INT < 64)
5654 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5657 return high == -lsb;
5661 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5667 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5668 to perform a left shift. It must have SHIFTOP or more least
5669 signifigant 0's, with the remainder of the word 1's. */
5672 includes_rldicr_lshift_p (shiftop, andop)
5676 if (GET_CODE (andop) == CONST_INT)
5678 HOST_WIDE_INT c, lsb, shift_mask;
5681 shift_mask <<= INTVAL (shiftop);
5684 /* Find the least signifigant one bit. */
5687 /* It must be covered by the shift mask.
5688 This test also rejects c == 0. */
5689 if ((lsb & shift_mask) == 0)
5692 /* Check we have all 1's above the transition, and reject all 1's. */
5693 return c == -lsb && lsb != 1;
5695 else if (GET_CODE (andop) == CONST_DOUBLE
5696 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5698 HOST_WIDE_INT low, lsb, shift_mask_low;
5700 low = CONST_DOUBLE_LOW (andop);
5702 if (HOST_BITS_PER_WIDE_INT < 64)
5704 HOST_WIDE_INT high, shift_mask_high;
5706 high = CONST_DOUBLE_HIGH (andop);
5710 shift_mask_high = ~0;
5711 if (INTVAL (shiftop) > 32)
5712 shift_mask_high <<= INTVAL (shiftop) - 32;
5716 if ((lsb & shift_mask_high) == 0)
5719 return high == -lsb;
5725 shift_mask_low = ~0;
5726 shift_mask_low <<= INTVAL (shiftop);
5730 if ((lsb & shift_mask_low) == 0)
5733 return low == -lsb && lsb != 1;
5739 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5740 for lfq and stfq insns.
5742 Note reg1 and reg2 *must* be hard registers. To be sure we will
5743 abort if we are passed pseudo registers. */
5746 registers_ok_for_quad_peep (reg1, reg2)
5749 /* We might have been passed a SUBREG. */
5750 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5753 return (REGNO (reg1) == REGNO (reg2) - 1);
5756 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5757 addr1 and addr2 must be in consecutive memory locations
5758 (addr2 == addr1 + 8). */
5761 addrs_ok_for_quad_peep (addr1, addr2)
5768 /* Extract an offset (if used) from the first addr. */
5769 if (GET_CODE (addr1) == PLUS)
5771 /* If not a REG, return zero. */
5772 if (GET_CODE (XEXP (addr1, 0)) != REG)
5776 reg1 = REGNO (XEXP (addr1, 0));
5777 /* The offset must be constant! */
5778 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5780 offset1 = INTVAL (XEXP (addr1, 1));
5783 else if (GET_CODE (addr1) != REG)
5787 reg1 = REGNO (addr1);
5788 /* This was a simple (mem (reg)) expression. Offset is 0. */
5792 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5793 if (GET_CODE (addr2) != PLUS)
5796 if (GET_CODE (XEXP (addr2, 0)) != REG
5797 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5800 if (reg1 != REGNO (XEXP (addr2, 0)))
5803 /* The offset for the second addr must be 8 more than the first addr. */
5804 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5807 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5812 /* Return the register class of a scratch register needed to copy IN into
5813 or out of a register in CLASS in MODE. If it can be done directly,
5814 NO_REGS is returned. */
5817 secondary_reload_class (class, mode, in)
5818 enum reg_class class;
5819 enum machine_mode mode ATTRIBUTE_UNUSED;
5824 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5826 /* We cannot copy a symbolic operand directly into anything
5827 other than BASE_REGS for TARGET_ELF. So indicate that a
5828 register from BASE_REGS is needed as an intermediate
5831 On Darwin, pic addresses require a load from memory, which
5832 needs a base register. */
5833 if (class != BASE_REGS
5834 && (GET_CODE (in) == SYMBOL_REF
5835 || GET_CODE (in) == HIGH
5836 || GET_CODE (in) == LABEL_REF
5837 || GET_CODE (in) == CONST))
5841 if (GET_CODE (in) == REG)
5844 if (regno >= FIRST_PSEUDO_REGISTER)
5846 regno = true_regnum (in);
5847 if (regno >= FIRST_PSEUDO_REGISTER)
5851 else if (GET_CODE (in) == SUBREG)
5853 regno = true_regnum (in);
5854 if (regno >= FIRST_PSEUDO_REGISTER)
5860 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5862 if (class == GENERAL_REGS || class == BASE_REGS
5863 || (regno >= 0 && INT_REGNO_P (regno)))
5866 /* Constants, memory, and FP registers can go into FP registers. */
5867 if ((regno == -1 || FP_REGNO_P (regno))
5868 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5871 /* Memory, and AltiVec registers can go into AltiVec registers. */
5872 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5873 && class == ALTIVEC_REGS)
5876 /* We can copy among the CR registers. */
5877 if ((class == CR_REGS || class == CR0_REGS)
5878 && regno >= 0 && CR_REGNO_P (regno))
5881 /* Otherwise, we need GENERAL_REGS. */
5882 return GENERAL_REGS;
5885 /* Given a comparison operation, return the bit number in CCR to test. We
5886 know this is a valid comparison.
5888 SCC_P is 1 if this is for an scc. That means that %D will have been
5889 used instead of %C, so the bits will be in different places.
5891 Return -1 if OP isn't a valid comparison for some reason. */
5898 enum rtx_code code = GET_CODE (op);
5899 enum machine_mode cc_mode;
5904 if (GET_RTX_CLASS (code) != '<')
5909 if (GET_CODE (reg) != REG
5910 || ! CR_REGNO_P (REGNO (reg)))
5913 cc_mode = GET_MODE (reg);
5914 cc_regnum = REGNO (reg);
5915 base_bit = 4 * (cc_regnum - CR0_REGNO);
5917 validate_condition_mode (code, cc_mode);
5922 return scc_p ? base_bit + 3 : base_bit + 2;
5924 return base_bit + 2;
5925 case GT: case GTU: case UNLE:
5926 return base_bit + 1;
5927 case LT: case LTU: case UNGE:
5929 case ORDERED: case UNORDERED:
5930 return base_bit + 3;
5933 /* If scc, we will have done a cror to put the bit in the
5934 unordered position. So test that bit. For integer, this is ! LT
5935 unless this is an scc insn. */
5936 return scc_p ? base_bit + 3 : base_bit;
5939 return scc_p ? base_bit + 3 : base_bit + 1;
5946 /* Return the GOT register. */
5949 rs6000_got_register (value)
5950 rtx value ATTRIBUTE_UNUSED;
5952 /* The second flow pass currently (June 1999) can't update
5953 regs_ever_live without disturbing other parts of the compiler, so
5954 update it here to make the prolog/epilogue code happy. */
5955 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5956 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
5958 current_function_uses_pic_offset_table = 1;
5960 return pic_offset_table_rtx;
5963 /* Functions to init, mark and free struct machine_function.
5964 These will be called, via pointer variables,
5965 from push_function_context and pop_function_context. */
5968 rs6000_init_machine_status (p)
5971 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
5975 rs6000_free_machine_status (p)
5978 if (p->machine == NULL)
5986 /* Print an operand. Recognize special options, documented below. */
5989 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
5990 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
5992 #define SMALL_DATA_RELOC "sda21"
5993 #define SMALL_DATA_REG 0
5997 print_operand (file, x, code)
6005 /* These macros test for integers and extract the low-order bits. */
6007 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6008 && GET_MODE (X) == VOIDmode)
6010 #define INT_LOWPART(X) \
6011 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6016 /* Write out an instruction after the call which may be replaced
6017 with glue code by the loader. This depends on the AIX version. */
6018 asm_fprintf (file, RS6000_CALL_GLUE);
6021 /* %a is output_address. */
6024 /* If X is a constant integer whose low-order 5 bits are zero,
6025 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6026 in the AIX assembler where "sri" with a zero shift count
6027 writes a trash instruction. */
6028 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6035 /* If constant, low-order 16 bits of constant, unsigned.
6036 Otherwise, write normally. */
6038 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6040 print_operand (file, x, 0);
6044 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6045 for 64-bit mask direction. */
6046 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6049 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6053 /* There used to be a comment for 'C' reading "This is an
6054 optional cror needed for certain floating-point
6055 comparisons. Otherwise write nothing." */
6057 /* Similar, except that this is for an scc, so we must be able to
6058 encode the test in a single bit that is one. We do the above
6059 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6060 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6061 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6063 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6065 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6067 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6070 else if (GET_CODE (x) == NE)
6072 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6074 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6075 base_bit + 2, base_bit + 2);
6080 /* X is a CR register. Print the number of the EQ bit of the CR */
6081 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6082 output_operand_lossage ("invalid %%E value");
6084 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6088 /* X is a CR register. Print the shift count needed to move it
6089 to the high-order four bits. */
6090 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6091 output_operand_lossage ("invalid %%f value");
6093 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6097 /* Similar, but print the count for the rotate in the opposite
6099 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6100 output_operand_lossage ("invalid %%F value");
6102 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6106 /* X is a constant integer. If it is negative, print "m",
6107 otherwise print "z". This is to make a aze or ame insn. */
6108 if (GET_CODE (x) != CONST_INT)
6109 output_operand_lossage ("invalid %%G value");
6110 else if (INTVAL (x) >= 0)
6117 /* If constant, output low-order five bits. Otherwise, write
6120 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6122 print_operand (file, x, 0);
6126 /* If constant, output low-order six bits. Otherwise, write
6129 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6131 print_operand (file, x, 0);
6135 /* Print `i' if this is a constant, else nothing. */
6141 /* Write the bit number in CCR for jump. */
6144 output_operand_lossage ("invalid %%j code");
6146 fprintf (file, "%d", i);
6150 /* Similar, but add one for shift count in rlinm for scc and pass
6151 scc flag to `ccr_bit'. */
6154 output_operand_lossage ("invalid %%J code");
6156 /* If we want bit 31, write a shift count of zero, not 32. */
6157 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6161 /* X must be a constant. Write the 1's complement of the
6164 output_operand_lossage ("invalid %%k value");
6166 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6170 /* X must be a symbolic constant on ELF. Write an
6171 expression suitable for an 'addi' that adds in the low 16
6173 if (GET_CODE (x) != CONST)
6175 print_operand_address (file, x);
6180 if (GET_CODE (XEXP (x, 0)) != PLUS
6181 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6182 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6183 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6184 output_operand_lossage ("invalid %%K value");
6185 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6187 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6191 /* %l is output_asm_label. */
6194 /* Write second word of DImode or DFmode reference. Works on register
6195 or non-indexed memory only. */
6196 if (GET_CODE (x) == REG)
6197 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6198 else if (GET_CODE (x) == MEM)
6200 /* Handle possible auto-increment. Since it is pre-increment and
6201 we have already done it, we can just use an offset of word. */
6202 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6203 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6204 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6207 output_address (XEXP (adjust_address_nv (x, SImode,
6211 if (small_data_operand (x, GET_MODE (x)))
6212 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6213 reg_names[SMALL_DATA_REG]);
6218 /* MB value for a mask operand. */
6219 if (! mask_operand (x, VOIDmode))
6220 output_operand_lossage ("invalid %%m value");
6222 val = INT_LOWPART (x);
6224 /* If the high bit is set and the low bit is not, the value is zero.
6225 If the high bit is zero, the value is the first 1 bit we find from
6227 if ((val & 0x80000000) && ((val & 1) == 0))
6232 else if ((val & 0x80000000) == 0)
6234 for (i = 1; i < 32; i++)
6235 if ((val <<= 1) & 0x80000000)
6237 fprintf (file, "%d", i);
6241 /* Otherwise, look for the first 0 bit from the right. The result is its
6242 number plus 1. We know the low-order bit is one. */
6243 for (i = 0; i < 32; i++)
6244 if (((val >>= 1) & 1) == 0)
6247 /* If we ended in ...01, i would be 0. The correct value is 31, so
6249 fprintf (file, "%d", 31 - i);
6253 /* ME value for a mask operand. */
6254 if (! mask_operand (x, VOIDmode))
6255 output_operand_lossage ("invalid %%M value");
6257 val = INT_LOWPART (x);
6259 /* If the low bit is set and the high bit is not, the value is 31.
6260 If the low bit is zero, the value is the first 1 bit we find from
6262 if ((val & 1) && ((val & 0x80000000) == 0))
6267 else if ((val & 1) == 0)
6269 for (i = 0; i < 32; i++)
6270 if ((val >>= 1) & 1)
6273 /* If we had ....10, i would be 0. The result should be
6274 30, so we need 30 - i. */
6275 fprintf (file, "%d", 30 - i);
6279 /* Otherwise, look for the first 0 bit from the left. The result is its
6280 number minus 1. We know the high-order bit is one. */
6281 for (i = 0; i < 32; i++)
6282 if (((val <<= 1) & 0x80000000) == 0)
6285 fprintf (file, "%d", i);
6288 /* %n outputs the negative of its operand. */
6291 /* Write the number of elements in the vector times 4. */
6292 if (GET_CODE (x) != PARALLEL)
6293 output_operand_lossage ("invalid %%N value");
6295 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6299 /* Similar, but subtract 1 first. */
6300 if (GET_CODE (x) != PARALLEL)
6301 output_operand_lossage ("invalid %%O value");
6303 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6307 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6309 || INT_LOWPART (x) < 0
6310 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6311 output_operand_lossage ("invalid %%p value");
6313 fprintf (file, "%d", i);
6317 /* The operand must be an indirect memory reference. The result
6318 is the register number. */
6319 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6320 || REGNO (XEXP (x, 0)) >= 32)
6321 output_operand_lossage ("invalid %%P value");
6323 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6327 /* This outputs the logical code corresponding to a boolean
6328 expression. The expression may have one or both operands
6329 negated (if one, only the first one). For condition register
6330 logical operations, it will also treat the negated
6331 CR codes as NOTs, but not handle NOTs of them. */
6333 const char *const *t = 0;
6335 enum rtx_code code = GET_CODE (x);
6336 static const char * const tbl[3][3] = {
6337 { "and", "andc", "nor" },
6338 { "or", "orc", "nand" },
6339 { "xor", "eqv", "xor" } };
6343 else if (code == IOR)
6345 else if (code == XOR)
6348 output_operand_lossage ("invalid %%q value");
6350 if (GET_CODE (XEXP (x, 0)) != NOT)
6354 if (GET_CODE (XEXP (x, 1)) == NOT)
6365 /* X is a CR register. Print the mask for `mtcrf'. */
6366 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6367 output_operand_lossage ("invalid %%R value");
6369 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6373 /* Low 5 bits of 32 - value */
6375 output_operand_lossage ("invalid %%s value");
6377 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6381 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6382 CONST_INT 32-bit mask is considered sign-extended so any
6383 transition must occur within the CONST_INT, not on the boundary. */
6384 if (! mask64_operand (x, VOIDmode))
6385 output_operand_lossage ("invalid %%S value");
6387 val = INT_LOWPART (x);
6389 if (val & 1) /* Clear Left */
6391 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6392 if (!((val >>= 1) & 1))
6395 #if HOST_BITS_PER_WIDE_INT == 32
6396 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6398 val = CONST_DOUBLE_HIGH (x);
6403 for (i = 32; i < 64; i++)
6404 if (!((val >>= 1) & 1))
6408 /* i = index of last set bit from right
6409 mask begins at 63 - i from left */
6411 output_operand_lossage ("%%S computed all 1's mask");
6413 fprintf (file, "%d", 63 - i);
6416 else /* Clear Right */
6418 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6419 if ((val >>= 1) & 1)
6422 #if HOST_BITS_PER_WIDE_INT == 32
6423 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6425 val = CONST_DOUBLE_HIGH (x);
6427 if (val == (HOST_WIDE_INT) -1)
6430 for (i = 32; i < 64; i++)
6431 if ((val >>= 1) & 1)
6435 /* i = index of last clear bit from right
6436 mask ends at 62 - i from left */
6438 output_operand_lossage ("%%S computed all 0's mask");
6440 fprintf (file, "%d", 62 - i);
6445 /* Print the symbolic name of a branch target register. */
6446 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6447 && REGNO (x) != COUNT_REGISTER_REGNUM))
6448 output_operand_lossage ("invalid %%T value");
6449 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6450 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6452 fputs ("ctr", file);
6456 /* High-order 16 bits of constant for use in unsigned operand. */
6458 output_operand_lossage ("invalid %%u value");
6460 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6461 (INT_LOWPART (x) >> 16) & 0xffff);
6465 /* High-order 16 bits of constant for use in signed operand. */
6467 output_operand_lossage ("invalid %%v value");
6469 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6470 (INT_LOWPART (x) >> 16) & 0xffff);
6474 /* Print `u' if this has an auto-increment or auto-decrement. */
6475 if (GET_CODE (x) == MEM
6476 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6477 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6482 /* Print the trap code for this operand. */
6483 switch (GET_CODE (x))
6486 fputs ("eq", file); /* 4 */
6489 fputs ("ne", file); /* 24 */
6492 fputs ("lt", file); /* 16 */
6495 fputs ("le", file); /* 20 */
6498 fputs ("gt", file); /* 8 */
6501 fputs ("ge", file); /* 12 */
6504 fputs ("llt", file); /* 2 */
6507 fputs ("lle", file); /* 6 */
6510 fputs ("lgt", file); /* 1 */
6513 fputs ("lge", file); /* 5 */
6521 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6524 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6525 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6527 print_operand (file, x, 0);
6531 /* MB value for a PowerPC64 rldic operand. */
6532 val = (GET_CODE (x) == CONST_INT
6533 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6538 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6539 if ((val <<= 1) < 0)
6542 #if HOST_BITS_PER_WIDE_INT == 32
6543 if (GET_CODE (x) == CONST_INT && i >= 0)
6544 i += 32; /* zero-extend high-part was all 0's */
6545 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6547 val = CONST_DOUBLE_LOW (x);
6554 for ( ; i < 64; i++)
6555 if ((val <<= 1) < 0)
6560 fprintf (file, "%d", i + 1);
6564 if (GET_CODE (x) == MEM
6565 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6570 /* Like 'L', for third word of TImode */
6571 if (GET_CODE (x) == REG)
6572 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6573 else if (GET_CODE (x) == MEM)
6575 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6576 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6577 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6579 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6580 if (small_data_operand (x, GET_MODE (x)))
6581 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6582 reg_names[SMALL_DATA_REG]);
6587 /* X is a SYMBOL_REF. Write out the name preceded by a
6588 period and without any trailing data in brackets. Used for function
6589 names. If we are configured for System V (or the embedded ABI) on
6590 the PowerPC, do not emit the period, since those systems do not use
6591 TOCs and the like. */
6592 if (GET_CODE (x) != SYMBOL_REF)
6595 if (XSTR (x, 0)[0] != '.')
6597 switch (DEFAULT_ABI)
6607 case ABI_AIX_NODESC:
6613 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6615 assemble_name (file, XSTR (x, 0));
6620 /* Like 'L', for last word of TImode. */
6621 if (GET_CODE (x) == REG)
6622 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6623 else if (GET_CODE (x) == MEM)
6625 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6626 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6627 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6629 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6630 if (small_data_operand (x, GET_MODE (x)))
6631 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6632 reg_names[SMALL_DATA_REG]);
6636 /* Print AltiVec memory operand. */
6641 if (GET_CODE (x) != MEM)
6646 if (GET_CODE (tmp) == REG)
6647 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6648 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6650 if (REGNO (XEXP (tmp, 0)) == 0)
6651 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6652 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6654 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6655 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6663 if (GET_CODE (x) == REG)
6664 fprintf (file, "%s", reg_names[REGNO (x)]);
6665 else if (GET_CODE (x) == MEM)
6667 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6668 know the width from the mode. */
6669 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6670 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6671 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6672 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6673 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6674 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6676 output_address (XEXP (x, 0));
6679 output_addr_const (file, x);
6683 output_operand_lossage ("invalid %%xn code");
6687 /* Print the address of an operand. */
6690 print_operand_address (file, x)
6694 if (GET_CODE (x) == REG)
6695 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6696 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6697 || GET_CODE (x) == LABEL_REF)
6699 output_addr_const (file, x);
6700 if (small_data_operand (x, GET_MODE (x)))
6701 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6702 reg_names[SMALL_DATA_REG]);
6703 else if (TARGET_TOC)
6706 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6708 if (REGNO (XEXP (x, 0)) == 0)
6709 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6710 reg_names[ REGNO (XEXP (x, 0)) ]);
6712 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6713 reg_names[ REGNO (XEXP (x, 1)) ]);
6715 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6717 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6718 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6721 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6722 && CONSTANT_P (XEXP (x, 1)))
6724 output_addr_const (file, XEXP (x, 1));
6725 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6729 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6730 && CONSTANT_P (XEXP (x, 1)))
6732 fprintf (file, "lo16(");
6733 output_addr_const (file, XEXP (x, 1));
6734 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6737 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6739 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6741 rtx contains_minus = XEXP (x, 1);
6745 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6746 turn it into (sym) for output_addr_const. */
6747 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6748 contains_minus = XEXP (contains_minus, 0);
6750 minus = XEXP (contains_minus, 0);
6751 symref = XEXP (minus, 0);
6752 XEXP (contains_minus, 0) = symref;
6757 name = XSTR (symref, 0);
6758 newname = alloca (strlen (name) + sizeof ("@toc"));
6759 strcpy (newname, name);
6760 strcat (newname, "@toc");
6761 XSTR (symref, 0) = newname;
6763 output_addr_const (file, XEXP (x, 1));
6765 XSTR (symref, 0) = name;
6766 XEXP (contains_minus, 0) = minus;
6769 output_addr_const (file, XEXP (x, 1));
6771 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6777 /* Target hook for assembling integer objects. The powerpc version has
6778 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6779 is defined. It also needs to handle DI-mode objects on 64-bit
6783 rs6000_assemble_integer (x, size, aligned_p)
6788 #ifdef RELOCATABLE_NEEDS_FIXUP
6789 /* Special handling for SI values. */
6790 if (size == 4 && aligned_p)
6792 extern int in_toc_section PARAMS ((void));
6793 static int recurse = 0;
6795 /* For -mrelocatable, we mark all addresses that need to be fixed up
6796 in the .fixup section. */
6797 if (TARGET_RELOCATABLE
6798 && !in_toc_section ()
6799 && !in_text_section ()
6801 && GET_CODE (x) != CONST_INT
6802 && GET_CODE (x) != CONST_DOUBLE
6808 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6810 ASM_OUTPUT_LABEL (asm_out_file, buf);
6811 fprintf (asm_out_file, "\t.long\t(");
6812 output_addr_const (asm_out_file, x);
6813 fprintf (asm_out_file, ")@fixup\n");
6814 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6815 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6816 fprintf (asm_out_file, "\t.long\t");
6817 assemble_name (asm_out_file, buf);
6818 fprintf (asm_out_file, "\n\t.previous\n");
6822 /* Remove initial .'s to turn a -mcall-aixdesc function
6823 address into the address of the descriptor, not the function
6825 else if (GET_CODE (x) == SYMBOL_REF
6826 && XSTR (x, 0)[0] == '.'
6827 && DEFAULT_ABI == ABI_AIX)
6829 const char *name = XSTR (x, 0);
6830 while (*name == '.')
6833 fprintf (asm_out_file, "\t.long\t%s\n", name);
6837 #endif /* RELOCATABLE_NEEDS_FIXUP */
6838 return default_assemble_integer (x, size, aligned_p);
6842 rs6000_reverse_condition (mode, code)
6843 enum machine_mode mode;
6846 /* Reversal of FP compares takes care -- an ordered compare
6847 becomes an unordered compare and vice versa. */
6848 if (mode == CCFPmode)
6849 return reverse_condition_maybe_unordered (code);
6851 return reverse_condition (code);
6854 /* Generate a compare for CODE. Return a brand-new rtx that
6855 represents the result of the compare. */
6858 rs6000_generate_compare (code)
6861 enum machine_mode comp_mode;
6864 if (rs6000_compare_fp_p)
6865 comp_mode = CCFPmode;
6866 else if (code == GTU || code == LTU
6867 || code == GEU || code == LEU)
6868 comp_mode = CCUNSmode;
6872 /* First, the compare. */
6873 compare_result = gen_reg_rtx (comp_mode);
6874 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6875 gen_rtx_COMPARE (comp_mode,
6877 rs6000_compare_op1)));
6879 /* Some kinds of FP comparisons need an OR operation;
6880 except for flag_unsafe_math_optimizations we don't bother. */
6881 if (rs6000_compare_fp_p
6882 && ! flag_unsafe_math_optimizations
6883 && (code == LE || code == GE
6884 || code == UNEQ || code == LTGT
6885 || code == UNGT || code == UNLT))
6887 enum rtx_code or1, or2;
6888 rtx or1_rtx, or2_rtx, compare2_rtx;
6889 rtx or_result = gen_reg_rtx (CCEQmode);
6893 case LE: or1 = LT; or2 = EQ; break;
6894 case GE: or1 = GT; or2 = EQ; break;
6895 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
6896 case LTGT: or1 = LT; or2 = GT; break;
6897 case UNGT: or1 = UNORDERED; or2 = GT; break;
6898 case UNLT: or1 = UNORDERED; or2 = LT; break;
6901 validate_condition_mode (or1, comp_mode);
6902 validate_condition_mode (or2, comp_mode);
6903 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6904 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6905 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6906 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6908 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6910 compare_result = or_result;
6914 validate_condition_mode (code, GET_MODE (compare_result));
6916 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6920 /* Emit the RTL for an sCOND pattern. */
6923 rs6000_emit_sCOND (code, result)
6928 enum machine_mode op_mode;
6930 condition_rtx = rs6000_generate_compare (code);
6932 op_mode = GET_MODE (rs6000_compare_op0);
6933 if (op_mode == VOIDmode)
6934 op_mode = GET_MODE (rs6000_compare_op1);
6936 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
6938 PUT_MODE (condition_rtx, DImode);
6939 convert_move (result, condition_rtx, 0);
6943 PUT_MODE (condition_rtx, SImode);
6944 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
6948 /* Emit a branch of kind CODE to location LOC. */
6951 rs6000_emit_cbranch (code, loc)
6955 rtx condition_rtx, loc_ref;
6957 condition_rtx = rs6000_generate_compare (code);
6958 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
6959 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
6960 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
6964 /* Return the string to output a conditional branch to LABEL, which is
6965 the operand number of the label, or -1 if the branch is really a
6968 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
6969 condition code register and its mode specifies what kind of
6972 REVERSED is non-zero if we should reverse the sense of the comparison.
6974 INSN is the insn. */
6977 output_cbranch (op, label, reversed, insn)
6983 static char string[64];
6984 enum rtx_code code = GET_CODE (op);
6985 rtx cc_reg = XEXP (op, 0);
6986 enum machine_mode mode = GET_MODE (cc_reg);
6987 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
6988 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
6989 int really_reversed = reversed ^ need_longbranch;
6995 validate_condition_mode (code, mode);
6997 /* Work out which way this really branches. We could use
6998 reverse_condition_maybe_unordered here always but this
6999 makes the resulting assembler clearer. */
7000 if (really_reversed)
7001 code = rs6000_reverse_condition (mode, code);
7005 /* Not all of these are actually distinct opcodes, but
7006 we distinguish them for clarity of the resulting assembler. */
7008 ccode = "ne"; break;
7010 ccode = "eq"; break;
7012 ccode = "ge"; break;
7013 case GT: case GTU: case UNGT:
7014 ccode = "gt"; break;
7016 ccode = "le"; break;
7017 case LT: case LTU: case UNLT:
7018 ccode = "lt"; break;
7019 case UNORDERED: ccode = "un"; break;
7020 case ORDERED: ccode = "nu"; break;
7021 case UNGE: ccode = "nl"; break;
7022 case UNLE: ccode = "ng"; break;
7027 /* Maybe we have a guess as to how likely the branch is.
7028 The old mnemonics don't have a way to specify this information. */
7029 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7030 if (note != NULL_RTX)
7032 /* PROB is the difference from 50%. */
7033 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7035 /* For branches that are very close to 50%, assume not-taken. */
7036 if (abs (prob) > REG_BR_PROB_BASE / 20
7037 && ((prob > 0) ^ need_longbranch))
7046 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7048 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7050 /* We need to escape any '%' characters in the reg_names string.
7051 Assume they'd only be the first character... */
7052 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7054 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7058 /* If the branch distance was too far, we may have to use an
7059 unconditional branch to go the distance. */
7060 if (need_longbranch)
7061 s += sprintf (s, ",$+8\n\tb %s", label);
7063 s += sprintf (s, ",%s", label);
7069 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7070 operands of the last comparison is nonzero/true, FALSE_COND if it
7071 is zero/false. Return 0 if the hardware has no such operation. */
7074 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7080 enum rtx_code code = GET_CODE (op);
7081 rtx op0 = rs6000_compare_op0;
7082 rtx op1 = rs6000_compare_op1;
7084 enum machine_mode mode = GET_MODE (op0);
7087 /* First, work out if the hardware can do this at all, or
7088 if it's too slow... */
7089 /* If the comparison is an integer one, since we only have fsel
7090 it'll be cheaper to use a branch. */
7091 if (! rs6000_compare_fp_p)
7094 /* Eliminate half of the comparisons by switching operands, this
7095 makes the remaining code simpler. */
7096 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7097 || code == LTGT || code == LT)
7099 code = reverse_condition_maybe_unordered (code);
7101 true_cond = false_cond;
7105 /* UNEQ and LTGT take four instructions for a comparison with zero,
7106 it'll probably be faster to use a branch here too. */
7110 if (GET_CODE (op1) == CONST_DOUBLE)
7111 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7113 /* We're going to try to implement comparions by performing
7114 a subtract, then comparing against zero. Unfortunately,
7115 Inf - Inf is NaN which is not zero, and so if we don't
7116 know that the the operand is finite and the comparison
7117 would treat EQ different to UNORDERED, we can't do it. */
7118 if (! flag_unsafe_math_optimizations
7119 && code != GT && code != UNGE
7120 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7121 /* Constructs of the form (a OP b ? a : b) are safe. */
7122 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7123 || (! rtx_equal_p (op0, true_cond)
7124 && ! rtx_equal_p (op1, true_cond))))
7126 /* At this point we know we can use fsel. */
7128 /* Reduce the comparison to a comparison against zero. */
7129 temp = gen_reg_rtx (mode);
7130 emit_insn (gen_rtx_SET (VOIDmode, temp,
7131 gen_rtx_MINUS (mode, op0, op1)));
7133 op1 = CONST0_RTX (mode);
7135 /* If we don't care about NaNs we can reduce some of the comparisons
7136 down to faster ones. */
7137 if (flag_unsafe_math_optimizations)
7143 true_cond = false_cond;
7156 /* Now, reduce everything down to a GE. */
7163 temp = gen_reg_rtx (mode);
7164 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7169 temp = gen_reg_rtx (mode);
7170 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
7175 temp = gen_reg_rtx (mode);
7176 emit_insn (gen_rtx_SET (VOIDmode, temp,
7178 gen_rtx_ABS (mode, op0))));
7183 temp = gen_reg_rtx (mode);
7184 emit_insn (gen_rtx_SET (VOIDmode, temp,
7185 gen_rtx_IF_THEN_ELSE (mode,
7186 gen_rtx_GE (VOIDmode,
7188 true_cond, false_cond)));
7190 true_cond = false_cond;
7192 temp = gen_reg_rtx (mode);
7193 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7198 temp = gen_reg_rtx (mode);
7199 emit_insn (gen_rtx_SET (VOIDmode, temp,
7200 gen_rtx_IF_THEN_ELSE (mode,
7201 gen_rtx_GE (VOIDmode,
7203 true_cond, false_cond)));
7205 false_cond = true_cond;
7207 temp = gen_reg_rtx (mode);
7208 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7216 emit_insn (gen_rtx_SET (VOIDmode, dest,
7217 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
7218 gen_rtx_GE (VOIDmode,
7220 true_cond, false_cond)));
7225 rs6000_emit_minmax (dest, code, op0, op1)
7231 enum machine_mode mode = GET_MODE (op0);
7233 if (code == SMAX || code == UMAX)
7234 target = emit_conditional_move (dest, GE, op0, op1, mode,
7237 target = emit_conditional_move (dest, GE, op0, op1, mode,
7239 if (target == NULL_RTX)
7242 emit_move_insn (dest, target);
7245 /* This page contains routines that are used to determine what the
7246 function prologue and epilogue code will do and write them out. */
7248 /* Return the first fixed-point register that is required to be
7249 saved. 32 if none. */
7252 first_reg_to_save ()
7256 /* Find lowest numbered live register. */
7257 for (first_reg = 13; first_reg <= 31; first_reg++)
7258 if (regs_ever_live[first_reg]
7259 && (! call_used_regs[first_reg]
7260 || (first_reg == PIC_OFFSET_TABLE_REGNUM
7261 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7262 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7265 if (current_function_profile)
7267 /* AIX must save/restore every register that contains a parameter
7268 before/after the .__mcount call plus an additional register
7269 for the static chain, if needed; use registers from 30 down to 22
7271 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7273 int last_parm_reg, profile_first_reg;
7275 /* Figure out last used parameter register. The proper thing
7276 to do is to walk incoming args of the function. A function
7277 might have live parameter registers even if it has no
7279 for (last_parm_reg = 10;
7280 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7284 /* Calculate first reg for saving parameter registers
7286 Skip reg 31 which may contain the frame pointer. */
7287 profile_first_reg = (33 - last_parm_reg
7288 - (current_function_needs_context ? 1 : 0));
7290 /* Need to skip another reg to account for R31 being PICBASE
7291 (when flag_pic is set) or R30 being used as the frame
7292 pointer (when flag_pic is not set). */
7293 --profile_first_reg;
7295 /* Do not save frame pointer if no parameters needs to be saved. */
7296 if (profile_first_reg == 31)
7297 profile_first_reg = 32;
7299 if (first_reg > profile_first_reg)
7300 first_reg = profile_first_reg;
7303 /* SVR4 may need one register to preserve the static chain. */
7304 else if (current_function_needs_context)
7306 /* Skip reg 31 which may contain the frame pointer. */
7313 if (flag_pic && current_function_uses_pic_offset_table &&
7314 (first_reg > PIC_OFFSET_TABLE_REGNUM))
7315 return PIC_OFFSET_TABLE_REGNUM;
7321 /* Similar, for FP regs. */
7324 first_fp_reg_to_save ()
7328 /* Find lowest numbered live register. */
7329 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7330 if (regs_ever_live[first_reg])
7336 /* Similar, for AltiVec regs. */
7339 first_altivec_reg_to_save ()
7343 /* Stack frame remains as is unless we are in AltiVec ABI. */
7344 if (! TARGET_ALTIVEC_ABI)
7345 return LAST_ALTIVEC_REGNO + 1;
7347 /* Find lowest numbered live register. */
7348 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7349 if (regs_ever_live[i])
7355 /* Return a 32-bit mask of the AltiVec registers we need to set in
7356 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7357 the 32-bit word is 0. */
7360 compute_vrsave_mask ()
7362 unsigned int i, mask = 0;
7364 /* First, find out if we use _any_ altivec registers. */
7365 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7366 if (regs_ever_live[i])
7367 mask |= ALTIVEC_REG_BIT (i);
7372 /* Next, add all registers that are call-clobbered. We do this
7373 because post-reload register optimizers such as regrename_optimize
7374 may choose to use them. They never change the register class
7375 chosen by reload, so cannot create new uses of altivec registers
7376 if there were none before, so the early exit above is safe. */
7377 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7378 altivec registers not saved in the mask, which might well make the
7379 adjustments below more effective in eliding the save/restore of
7380 VRSAVE in small functions. */
7381 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7382 if (call_used_regs[i])
7383 mask |= ALTIVEC_REG_BIT (i);
7385 /* Next, remove the argument registers from the set. These must
7386 be in the VRSAVE mask set by the caller, so we don't need to add
7387 them in again. More importantly, the mask we compute here is
7388 used to generate CLOBBERs in the set_vrsave insn, and we do not
7389 wish the argument registers to die. */
7390 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7391 mask &= ~ALTIVEC_REG_BIT (i);
7393 /* Similarly, remove the return value from the set. */
7396 diddle_return_value (is_altivec_return_reg, &yes);
7398 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7405 is_altivec_return_reg (reg, xyes)
7409 bool *yes = (bool *) xyes;
7410 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7415 /* Calculate the stack information for the current function. This is
7416 complicated by having two separate calling sequences, the AIX calling
7417 sequence and the V.4 calling sequence.
7419 AIX (and Darwin/Mac OS X) stack frames look like:
7421 SP----> +---------------------------------------+
7422 | back chain to caller | 0 0
7423 +---------------------------------------+
7424 | saved CR | 4 8 (8-11)
7425 +---------------------------------------+
7427 +---------------------------------------+
7428 | reserved for compilers | 12 24
7429 +---------------------------------------+
7430 | reserved for binders | 16 32
7431 +---------------------------------------+
7432 | saved TOC pointer | 20 40
7433 +---------------------------------------+
7434 | Parameter save area (P) | 24 48
7435 +---------------------------------------+
7436 | Alloca space (A) | 24+P etc.
7437 +---------------------------------------+
7438 | Local variable space (L) | 24+P+A
7439 +---------------------------------------+
7440 | Float/int conversion temporary (X) | 24+P+A+L
7441 +---------------------------------------+
7442 | Save area for AltiVec registers (W) | 24+P+A+L+X
7443 +---------------------------------------+
7444 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7445 +---------------------------------------+
7446 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7447 +---------------------------------------+
7448 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7449 +---------------------------------------+
7450 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7451 +---------------------------------------+
7452 old SP->| back chain to caller's caller |
7453 +---------------------------------------+
7455 The required alignment for AIX configurations is two words (i.e., 8
7459 V.4 stack frames look like:
7461 SP----> +---------------------------------------+
7462 | back chain to caller | 0
7463 +---------------------------------------+
7464 | caller's saved LR | 4
7465 +---------------------------------------+
7466 | Parameter save area (P) | 8
7467 +---------------------------------------+
7468 | Alloca space (A) | 8+P
7469 +---------------------------------------+
7470 | Varargs save area (V) | 8+P+A
7471 +---------------------------------------+
7472 | Local variable space (L) | 8+P+A+V
7473 +---------------------------------------+
7474 | Float/int conversion temporary (X) | 8+P+A+V+L
7475 +---------------------------------------+
7476 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7477 +---------------------------------------+
7478 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7479 +---------------------------------------+
7480 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7481 +---------------------------------------+
7482 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7483 +---------------------------------------+
7484 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7485 +---------------------------------------+
7486 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7487 +---------------------------------------+
7488 old SP->| back chain to caller's caller |
7489 +---------------------------------------+
7491 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7492 given. (But note below and in sysv4.h that we require only 8 and
7493 may round up the size of our stack frame anyways. The historical
7494 reason is early versions of powerpc-linux which didn't properly
7495 align the stack at program startup. A happy side-effect is that
7496 -mno-eabi libraries can be used with -meabi programs.)
7498 The EABI configuration defaults to the V.4 layout, unless
7499 -mcall-aix is used, in which case the AIX layout is used. However,
7500 the stack alignment requirements may differ. If -mno-eabi is not
7501 given, the required stack alignment is 8 bytes; if -mno-eabi is
7502 given, the required alignment is 16 bytes. (But see V.4 comment
7505 #ifndef ABI_STACK_BOUNDARY
7506 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7510 rs6000_stack_info ()
7512 static rs6000_stack_t info, zero_info;
7513 rs6000_stack_t *info_ptr = &info;
7514 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7515 enum rs6000_abi abi;
7519 /* Zero all fields portably. */
7522 /* Select which calling sequence. */
7523 info_ptr->abi = abi = DEFAULT_ABI;
7525 /* Calculate which registers need to be saved & save area size. */
7526 info_ptr->first_gp_reg_save = first_reg_to_save ();
7527 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
7528 even if it currently looks like we won't. */
7529 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7530 || (flag_pic == 1 && abi == ABI_V4)
7531 || (flag_pic && abi == ABI_DARWIN))
7532 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
7533 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
7535 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7537 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7538 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7540 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7541 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7542 - info_ptr->first_altivec_reg_save);
7544 /* Does this function call anything? */
7545 info_ptr->calls_p = (! current_function_is_leaf
7546 || cfun->machine->ra_needs_full_frame);
7548 /* Determine if we need to save the link register. */
7549 if (rs6000_ra_ever_killed ()
7550 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7551 #ifdef TARGET_RELOCATABLE
7552 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7554 || (info_ptr->first_fp_reg_save != 64
7555 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7556 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7557 || (abi == ABI_V4 && current_function_calls_alloca)
7558 || (DEFAULT_ABI == ABI_DARWIN
7560 && current_function_uses_pic_offset_table)
7561 || info_ptr->calls_p)
7563 info_ptr->lr_save_p = 1;
7564 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7567 /* Determine if we need to save the condition code registers. */
7568 if (regs_ever_live[CR2_REGNO]
7569 || regs_ever_live[CR3_REGNO]
7570 || regs_ever_live[CR4_REGNO])
7572 info_ptr->cr_save_p = 1;
7574 info_ptr->cr_size = reg_size;
7577 /* If the current function calls __builtin_eh_return, then we need
7578 to allocate stack space for registers that will hold data for
7579 the exception handler. */
7580 if (current_function_calls_eh_return)
7583 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7585 ehrd_size = i * UNITS_PER_WORD;
7590 /* Determine various sizes. */
7591 info_ptr->reg_size = reg_size;
7592 info_ptr->fixed_size = RS6000_SAVE_AREA;
7593 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7594 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7595 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7598 if (TARGET_ALTIVEC_ABI)
7600 info_ptr->vrsave_mask = compute_vrsave_mask ();
7601 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7605 info_ptr->vrsave_mask = 0;
7606 info_ptr->vrsave_size = 0;
7609 /* Calculate the offsets. */
7617 case ABI_AIX_NODESC:
7619 info_ptr->fp_save_offset = - info_ptr->fp_size;
7620 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7622 if (TARGET_ALTIVEC_ABI)
7624 info_ptr->vrsave_save_offset
7625 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7627 /* Align stack so vector save area is on a quadword boundary. */
7628 if (info_ptr->altivec_size != 0)
7629 info_ptr->altivec_padding_size
7630 = 16 - (-info_ptr->vrsave_save_offset % 16);
7632 info_ptr->altivec_padding_size = 0;
7634 info_ptr->altivec_save_offset
7635 = info_ptr->vrsave_save_offset
7636 - info_ptr->altivec_padding_size
7637 - info_ptr->altivec_size;
7639 /* Adjust for AltiVec case. */
7640 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7643 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7644 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7645 info_ptr->lr_save_offset = 2*reg_size;
7649 info_ptr->fp_save_offset = - info_ptr->fp_size;
7650 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7651 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7653 if (TARGET_ALTIVEC_ABI)
7655 info_ptr->vrsave_save_offset
7656 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7658 /* Align stack so vector save area is on a quadword boundary. */
7659 if (info_ptr->altivec_size != 0)
7660 info_ptr->altivec_padding_size
7661 = 16 - (-info_ptr->vrsave_save_offset % 16);
7663 info_ptr->altivec_padding_size = 0;
7665 info_ptr->altivec_save_offset
7666 = info_ptr->vrsave_save_offset
7667 - info_ptr->altivec_padding_size
7668 - info_ptr->altivec_size;
7670 /* Adjust for AltiVec case. */
7671 info_ptr->toc_save_offset
7672 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7675 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7676 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7677 info_ptr->lr_save_offset = reg_size;
7681 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7683 + info_ptr->altivec_size
7684 + info_ptr->altivec_padding_size
7685 + info_ptr->vrsave_size
7689 + info_ptr->vrsave_size
7690 + info_ptr->toc_size,
7691 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7694 total_raw_size = (info_ptr->vars_size
7695 + info_ptr->parm_size
7696 + info_ptr->save_size
7697 + info_ptr->varargs_size
7698 + info_ptr->fixed_size);
7700 info_ptr->total_size =
7701 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7703 /* Determine if we need to allocate any stack frame:
7705 For AIX we need to push the stack if a frame pointer is needed
7706 (because the stack might be dynamically adjusted), if we are
7707 debugging, if we make calls, or if the sum of fp_save, gp_save,
7708 and local variables are more than the space needed to save all
7709 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7710 + 18*8 = 288 (GPR13 reserved).
7712 For V.4 we don't have the stack cushion that AIX uses, but assume
7713 that the debugger can handle stackless frames. */
7715 if (info_ptr->calls_p)
7716 info_ptr->push_p = 1;
7718 else if (abi == ABI_V4)
7719 info_ptr->push_p = (total_raw_size > info_ptr->fixed_size
7720 || info_ptr->calls_p);
7723 info_ptr->push_p = (frame_pointer_needed
7724 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7725 || ((total_raw_size - info_ptr->fixed_size)
7726 > (TARGET_32BIT ? 220 : 288)));
7728 /* Zero offsets if we're not saving those registers. */
7729 if (info_ptr->fp_size == 0)
7730 info_ptr->fp_save_offset = 0;
7732 if (info_ptr->gp_size == 0)
7733 info_ptr->gp_save_offset = 0;
7735 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7736 info_ptr->altivec_save_offset = 0;
7738 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7739 info_ptr->vrsave_save_offset = 0;
7741 if (! info_ptr->lr_save_p)
7742 info_ptr->lr_save_offset = 0;
7744 if (! info_ptr->cr_save_p)
7745 info_ptr->cr_save_offset = 0;
7747 if (! info_ptr->toc_save_p)
7748 info_ptr->toc_save_offset = 0;
7754 debug_stack_info (info)
7755 rs6000_stack_t *info;
7757 const char *abi_string;
7760 info = rs6000_stack_info ();
7762 fprintf (stderr, "\nStack information for function %s:\n",
7763 ((current_function_decl && DECL_NAME (current_function_decl))
7764 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7769 default: abi_string = "Unknown"; break;
7770 case ABI_NONE: abi_string = "NONE"; break;
7772 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7773 case ABI_DARWIN: abi_string = "Darwin"; break;
7774 case ABI_V4: abi_string = "V.4"; break;
7777 fprintf (stderr, "\tABI = %5s\n", abi_string);
7779 if (TARGET_ALTIVEC_ABI)
7780 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7782 if (info->first_gp_reg_save != 32)
7783 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7785 if (info->first_fp_reg_save != 64)
7786 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7788 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7789 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7790 info->first_altivec_reg_save);
7792 if (info->lr_save_p)
7793 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7795 if (info->cr_save_p)
7796 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7798 if (info->toc_save_p)
7799 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7801 if (info->vrsave_mask)
7802 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7805 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7808 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7810 if (info->gp_save_offset)
7811 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7813 if (info->fp_save_offset)
7814 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7816 if (info->altivec_save_offset)
7817 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7818 info->altivec_save_offset);
7820 if (info->vrsave_save_offset)
7821 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7822 info->vrsave_save_offset);
7824 if (info->lr_save_offset)
7825 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7827 if (info->cr_save_offset)
7828 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7830 if (info->toc_save_offset)
7831 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7833 if (info->varargs_save_offset)
7834 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7836 if (info->total_size)
7837 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7839 if (info->varargs_size)
7840 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7842 if (info->vars_size)
7843 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7845 if (info->parm_size)
7846 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7848 if (info->fixed_size)
7849 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7852 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7855 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7857 if (info->altivec_size)
7858 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
7860 if (info->vrsave_size)
7861 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
7863 if (info->altivec_padding_size)
7864 fprintf (stderr, "\taltivec_padding_size= %5d\n",
7865 info->altivec_padding_size);
7868 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
7871 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
7874 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
7876 if (info->save_size)
7877 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
7879 if (info->reg_size != 4)
7880 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
7882 fprintf (stderr, "\n");
7886 rs6000_return_addr (count, frame)
7890 /* Currently we don't optimize very well between prolog and body
7891 code and for PIC code the code can be actually quite bad, so
7892 don't try to be too clever here. */
7895 || DEFAULT_ABI == ABI_AIX
7896 || DEFAULT_ABI == ABI_AIX_NODESC)
7898 cfun->machine->ra_needs_full_frame = 1;
7905 plus_constant (copy_to_reg
7906 (gen_rtx_MEM (Pmode,
7907 memory_address (Pmode, frame))),
7908 RETURN_ADDRESS_OFFSET)));
7911 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
7915 rs6000_ra_ever_killed ()
7919 #ifdef ASM_OUTPUT_MI_THUNK
7920 if (current_function_is_thunk)
7923 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
7924 || cfun->machine->ra_needs_full_frame)
7925 return regs_ever_live[LINK_REGISTER_REGNUM];
7927 push_topmost_sequence ();
7929 pop_topmost_sequence ();
7931 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7935 /* Add a REG_MAYBE_DEAD note to the insn. */
7937 rs6000_maybe_dead (insn)
7940 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
7945 /* Emit instructions needed to load the TOC register.
7946 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
7947 a constant pool; or for SVR4 -fpic. */
7950 rs6000_emit_load_toc_table (fromprolog)
7954 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
7956 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
7958 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7960 rtx temp = (fromprolog
7961 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7962 : gen_reg_rtx (Pmode));
7963 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
7964 rs6000_maybe_dead (emit_move_insn (dest, temp));
7966 else if (flag_pic == 2)
7969 rtx tempLR = (fromprolog
7970 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7971 : gen_reg_rtx (Pmode));
7972 rtx temp0 = (fromprolog
7973 ? gen_rtx_REG (Pmode, 0)
7974 : gen_reg_rtx (Pmode));
7977 /* possibly create the toc section */
7978 if (! toc_initialized)
7981 function_section (current_function_decl);
7988 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
7989 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7991 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
7992 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7994 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
7996 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7997 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8004 static int reload_toc_labelno = 0;
8006 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8008 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8009 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8011 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8014 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8015 rs6000_maybe_dead (emit_move_insn (temp0,
8016 gen_rtx_MEM (Pmode, dest)));
8018 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8020 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8022 /* This is for AIX code running in non-PIC ELF. */
8025 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8026 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8028 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8029 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8037 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8039 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8044 get_TOC_alias_set ()
8046 static int set = -1;
8048 set = new_alias_set ();
8052 /* This retuns nonzero if the current function uses the TOC. This is
8053 determined by the presence of (unspec ... 7), which is generated by
8054 the various load_toc_* patterns. */
8061 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8064 rtx pat = PATTERN (insn);
8067 if (GET_CODE (pat) == PARALLEL)
8068 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8069 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8070 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8077 create_TOC_reference (symbol)
8080 return gen_rtx_PLUS (Pmode,
8081 gen_rtx_REG (Pmode, TOC_REGISTER),
8082 gen_rtx_CONST (Pmode,
8083 gen_rtx_MINUS (Pmode, symbol,
8084 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8088 /* __throw will restore its own return address to be the same as the
8089 return address of the function that the throw is being made to.
8090 This is unfortunate, because we want to check the original
8091 return address to see if we need to restore the TOC.
8092 So we have to squirrel it away here.
8093 This is used only in compiling __throw and __rethrow.
8095 Most of this code should be removed by CSE. */
8096 static rtx insn_after_throw;
8098 /* This does the saving... */
8100 rs6000_aix_emit_builtin_unwind_init ()
8103 rtx stack_top = gen_reg_rtx (Pmode);
8104 rtx opcode_addr = gen_reg_rtx (Pmode);
8106 insn_after_throw = gen_reg_rtx (SImode);
8108 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8109 emit_move_insn (stack_top, mem);
8111 mem = gen_rtx_MEM (Pmode,
8112 gen_rtx_PLUS (Pmode, stack_top,
8113 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8114 emit_move_insn (opcode_addr, mem);
8115 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8118 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8119 in _eh.o). Only used on AIX.
8121 The idea is that on AIX, function calls look like this:
8122 bl somefunction-trampoline
8126 somefunction-trampoline:
8128 ... load function address in the count register ...
8130 or like this, if the linker determines that this is not a cross-module call
8131 and so the TOC need not be restored:
8134 or like this, if the compiler could determine that this is not a
8137 now, the tricky bit here is that register 2 is saved and restored
8138 by the _linker_, so we can't readily generate debugging information
8139 for it. So we need to go back up the call chain looking at the
8140 insns at return addresses to see which calls saved the TOC register
8141 and so see where it gets restored from.
8143 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8144 just before the actual epilogue.
8146 On the bright side, this incurs no space or time overhead unless an
8147 exception is thrown, except for the extra code in libgcc.a.
8149 The parameter STACKSIZE is a register containing (at runtime)
8150 the amount to be popped off the stack in addition to the stack frame
8151 of this routine (which will be __throw or __rethrow, and so is
8152 guaranteed to have a stack frame). */
8155 rs6000_emit_eh_toc_restore (stacksize)
8159 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8160 rtx tocompare = gen_reg_rtx (SImode);
8161 rtx opcode = gen_reg_rtx (SImode);
8162 rtx opcode_addr = gen_reg_rtx (Pmode);
8164 rtx loop_start = gen_label_rtx ();
8165 rtx no_toc_restore_needed = gen_label_rtx ();
8166 rtx loop_exit = gen_label_rtx ();
8168 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8169 set_mem_alias_set (mem, rs6000_sr_alias_set);
8170 emit_move_insn (bottom_of_stack, mem);
8172 top_of_stack = expand_binop (Pmode, add_optab,
8173 bottom_of_stack, stacksize,
8174 NULL_RTX, 1, OPTAB_WIDEN);
8176 emit_move_insn (tocompare,
8177 GEN_INT (trunc_int_for_mode (TARGET_32BIT
8179 : 0xE8410028, SImode)));
8181 if (insn_after_throw == NULL_RTX)
8183 emit_move_insn (opcode, insn_after_throw);
8185 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8186 emit_label (loop_start);
8188 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8189 SImode, NULL_RTX, NULL_RTX,
8190 no_toc_restore_needed);
8192 mem = gen_rtx_MEM (Pmode,
8193 gen_rtx_PLUS (Pmode, bottom_of_stack,
8194 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8195 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8197 emit_label (no_toc_restore_needed);
8198 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8199 Pmode, NULL_RTX, NULL_RTX,
8202 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8203 set_mem_alias_set (mem, rs6000_sr_alias_set);
8204 emit_move_insn (bottom_of_stack, mem);
8206 mem = gen_rtx_MEM (Pmode,
8207 gen_rtx_PLUS (Pmode, bottom_of_stack,
8208 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8209 emit_move_insn (opcode_addr, mem);
8210 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8212 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8213 emit_jump (loop_start);
8214 emit_note (NULL, NOTE_INSN_LOOP_END);
8215 emit_label (loop_exit);
8217 #endif /* TARGET_AIX */
8219 /* This ties together stack memory (MEM with an alias set of
8220 rs6000_sr_alias_set) and the change to the stack pointer. */
8223 rs6000_emit_stack_tie ()
8225 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8227 set_mem_alias_set (mem, rs6000_sr_alias_set);
8228 emit_insn (gen_stack_tie (mem));
8231 /* Emit the correct code for allocating stack space, as insns.
8232 If COPY_R12, make sure a copy of the old frame is left in r12.
8233 The generated code may use hard register 0 as a temporary. */
8236 rs6000_emit_allocate_stack (size, copy_r12)
8241 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8242 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8243 rtx todec = GEN_INT (-size);
8245 if (current_function_limit_stack)
8247 if (REG_P (stack_limit_rtx)
8248 && REGNO (stack_limit_rtx) > 1
8249 && REGNO (stack_limit_rtx) <= 31)
8251 emit_insn (Pmode == SImode
8252 ? gen_addsi3 (tmp_reg,
8255 : gen_adddi3 (tmp_reg,
8259 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8262 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8264 && DEFAULT_ABI == ABI_V4)
8266 rtx toload = gen_rtx_CONST (VOIDmode,
8267 gen_rtx_PLUS (Pmode,
8271 emit_insn (gen_elf_high (tmp_reg, toload));
8272 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8273 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8277 warning ("stack limit expression is not supported");
8280 if (copy_r12 || ! TARGET_UPDATE)
8281 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8287 /* Need a note here so that try_split doesn't get confused. */
8288 if (get_last_insn() == NULL_RTX)
8289 emit_note (0, NOTE_INSN_DELETED);
8290 insn = emit_move_insn (tmp_reg, todec);
8291 try_split (PATTERN (insn), insn, 0);
8295 if (Pmode == SImode)
8296 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8299 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8304 if (Pmode == SImode)
8305 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8307 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8308 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8309 gen_rtx_REG (Pmode, 12));
8312 RTX_FRAME_RELATED_P (insn) = 1;
8314 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8315 gen_rtx_SET (VOIDmode, stack_reg,
8316 gen_rtx_PLUS (Pmode, stack_reg,
8321 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8324 (mem (plus (blah) (regXX)))
8328 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8331 altivec_frame_fixup (insn, reg, val)
8337 real = copy_rtx (PATTERN (insn));
8339 real = replace_rtx (real, reg, GEN_INT (val));
8341 RTX_FRAME_RELATED_P (insn) = 1;
8342 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8347 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8348 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8349 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8350 deduce these equivalences by itself so it wasn't necessary to hold
8351 its hand so much. */
8354 rs6000_frame_related (insn, reg, val, reg2, rreg)
8363 real = copy_rtx (PATTERN (insn));
8365 real = replace_rtx (real, reg,
8366 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8367 STACK_POINTER_REGNUM),
8370 /* We expect that 'real' is either a SET or a PARALLEL containing
8371 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8372 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8374 if (GET_CODE (real) == SET)
8378 temp = simplify_rtx (SET_SRC (set));
8380 SET_SRC (set) = temp;
8381 temp = simplify_rtx (SET_DEST (set));
8383 SET_DEST (set) = temp;
8384 if (GET_CODE (SET_DEST (set)) == MEM)
8386 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8388 XEXP (SET_DEST (set), 0) = temp;
8391 else if (GET_CODE (real) == PARALLEL)
8394 for (i = 0; i < XVECLEN (real, 0); i++)
8395 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8397 rtx set = XVECEXP (real, 0, i);
8399 temp = simplify_rtx (SET_SRC (set));
8401 SET_SRC (set) = temp;
8402 temp = simplify_rtx (SET_DEST (set));
8404 SET_DEST (set) = temp;
8405 if (GET_CODE (SET_DEST (set)) == MEM)
8407 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8409 XEXP (SET_DEST (set), 0) = temp;
8411 RTX_FRAME_RELATED_P (set) = 1;
8417 if (reg2 != NULL_RTX)
8418 real = replace_rtx (real, reg2, rreg);
8420 RTX_FRAME_RELATED_P (insn) = 1;
8421 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8426 /* Returns an insn that has a vrsave set operation with the
8427 appropriate CLOBBERs. */
8430 generate_set_vrsave (reg, info, epiloguep)
8432 rs6000_stack_t *info;
8436 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8437 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8440 = gen_rtx_SET (VOIDmode,
8442 gen_rtx_UNSPEC_VOLATILE (SImode,
8443 gen_rtvec (2, reg, vrsave),
8448 /* We need to clobber the registers in the mask so the scheduler
8449 does not move sets to VRSAVE before sets of AltiVec registers.
8451 However, if the function receives nonlocal gotos, reload will set
8452 all call saved registers live. We will end up with:
8454 (set (reg 999) (mem))
8455 (parallel [ (set (reg vrsave) (unspec blah))
8456 (clobber (reg 999))])
8458 The clobber will cause the store into reg 999 to be dead, and
8459 flow will attempt to delete an epilogue insn. In this case, we
8460 need an unspec use/set of the register. */
8462 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8463 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8465 if (!epiloguep || call_used_regs [i])
8466 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8467 gen_rtx_REG (V4SImode, i));
8470 rtx reg = gen_rtx_REG (V4SImode, i);
8473 = gen_rtx_SET (VOIDmode,
8475 gen_rtx_UNSPEC (V4SImode,
8476 gen_rtvec (1, reg), 27));
8480 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8482 for (i = 0; i < nclobs; ++i)
8483 XVECEXP (insn, 0, i) = clobs[i];
8488 /* Emit function prologue as insns. */
8491 rs6000_emit_prologue ()
8493 rs6000_stack_t *info = rs6000_stack_info ();
8494 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8495 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8496 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8497 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8498 rtx frame_reg_rtx = sp_reg_rtx;
8499 rtx cr_save_rtx = NULL;
8501 int saving_FPRs_inline;
8502 int using_store_multiple;
8503 HOST_WIDE_INT sp_offset = 0;
8505 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8506 && info->first_gp_reg_save < 31);
8507 saving_FPRs_inline = (info->first_fp_reg_save == 64
8508 || FP_SAVE_INLINE (info->first_fp_reg_save));
8510 /* For V.4, update stack before we do any saving and set back pointer. */
8511 if (info->push_p && DEFAULT_ABI == ABI_V4)
8513 if (info->total_size < 32767)
8514 sp_offset = info->total_size;
8516 frame_reg_rtx = frame_ptr_rtx;
8517 rs6000_emit_allocate_stack (info->total_size,
8518 (frame_reg_rtx != sp_reg_rtx
8521 || info->first_fp_reg_save < 64
8522 || info->first_gp_reg_save < 32
8524 if (frame_reg_rtx != sp_reg_rtx)
8525 rs6000_emit_stack_tie ();
8528 /* Save AltiVec registers if needed. */
8529 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8533 /* There should be a non inline version of this, for when we
8534 are saving lots of vector registers. */
8535 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8536 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8538 rtx areg, savereg, mem;
8541 offset = info->altivec_save_offset + sp_offset
8542 + 16 * (i - info->first_altivec_reg_save);
8544 savereg = gen_rtx_REG (V4SImode, i);
8546 areg = gen_rtx_REG (Pmode, 0);
8547 emit_move_insn (areg, GEN_INT (offset));
8549 /* AltiVec addressing mode is [reg+reg]. */
8550 mem = gen_rtx_MEM (V4SImode,
8551 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8553 set_mem_alias_set (mem, rs6000_sr_alias_set);
8555 insn = emit_move_insn (mem, savereg);
8557 altivec_frame_fixup (insn, areg, offset);
8561 /* VRSAVE is a bit vector representing which AltiVec registers
8562 are used. The OS uses this to determine which vector
8563 registers to save on a context switch. We need to save
8564 VRSAVE on the stack frame, add whatever AltiVec registers we
8565 used in this function, and do the corresponding magic in the
8568 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8570 rtx reg, mem, vrsave;
8573 /* Get VRSAVE onto a GPR. */
8574 reg = gen_rtx_REG (SImode, 12);
8575 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8576 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8579 offset = info->vrsave_save_offset + sp_offset;
8581 = gen_rtx_MEM (SImode,
8582 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8583 set_mem_alias_set (mem, rs6000_sr_alias_set);
8584 insn = emit_move_insn (mem, reg);
8586 /* Include the registers in the mask. */
8587 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8589 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8592 /* If we use the link register, get it into r0. */
8593 if (info->lr_save_p)
8594 emit_move_insn (gen_rtx_REG (Pmode, 0),
8595 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8597 /* If we need to save CR, put it into r12. */
8598 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8600 cr_save_rtx = gen_rtx_REG (SImode, 12);
8601 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8604 /* Do any required saving of fpr's. If only one or two to save, do
8605 it ourselves. Otherwise, call function. */
8606 if (saving_FPRs_inline)
8609 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8610 if ((regs_ever_live[info->first_fp_reg_save+i]
8611 && ! call_used_regs[info->first_fp_reg_save+i]))
8614 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8615 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8616 GEN_INT (info->fp_save_offset
8619 mem = gen_rtx_MEM (DFmode, addr);
8620 set_mem_alias_set (mem, rs6000_sr_alias_set);
8622 insn = emit_move_insn (mem, reg);
8623 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8624 NULL_RTX, NULL_RTX);
8627 else if (info->first_fp_reg_save != 64)
8631 const char *alloc_rname;
8633 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8635 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8637 LINK_REGISTER_REGNUM));
8638 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8639 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8640 alloc_rname = ggc_strdup (rname);
8641 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8642 gen_rtx_SYMBOL_REF (Pmode,
8644 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8647 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8648 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8649 GEN_INT (info->fp_save_offset
8650 + sp_offset + 8*i));
8651 mem = gen_rtx_MEM (DFmode, addr);
8652 set_mem_alias_set (mem, rs6000_sr_alias_set);
8654 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8656 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8657 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8658 NULL_RTX, NULL_RTX);
8661 /* Save GPRs. This is done as a PARALLEL if we are using
8662 the store-multiple instructions. */
8663 if (using_store_multiple)
8667 p = rtvec_alloc (32 - info->first_gp_reg_save);
8668 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8669 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8672 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8673 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8674 GEN_INT (info->gp_save_offset
8677 mem = gen_rtx_MEM (reg_mode, addr);
8678 set_mem_alias_set (mem, rs6000_sr_alias_set);
8680 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8682 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8683 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8684 NULL_RTX, NULL_RTX);
8689 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8690 if ((regs_ever_live[info->first_gp_reg_save+i]
8691 && ! call_used_regs[info->first_gp_reg_save+i])
8692 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8693 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8694 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8697 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8698 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8699 GEN_INT (info->gp_save_offset
8702 mem = gen_rtx_MEM (reg_mode, addr);
8703 set_mem_alias_set (mem, rs6000_sr_alias_set);
8705 insn = emit_move_insn (mem, reg);
8706 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8707 NULL_RTX, NULL_RTX);
8711 /* ??? There's no need to emit actual instructions here, but it's the
8712 easiest way to get the frame unwind information emitted. */
8713 if (current_function_calls_eh_return)
8715 unsigned int i, regno;
8721 regno = EH_RETURN_DATA_REGNO (i);
8722 if (regno == INVALID_REGNUM)
8725 reg = gen_rtx_REG (reg_mode, regno);
8726 addr = plus_constant (frame_reg_rtx,
8727 info->ehrd_offset + sp_offset
8728 + reg_size * (int) i);
8729 mem = gen_rtx_MEM (reg_mode, addr);
8730 set_mem_alias_set (mem, rs6000_sr_alias_set);
8732 insn = emit_move_insn (mem, reg);
8733 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8734 NULL_RTX, NULL_RTX);
8738 /* Save lr if we used it. */
8739 if (info->lr_save_p)
8741 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8742 GEN_INT (info->lr_save_offset + sp_offset));
8743 rtx reg = gen_rtx_REG (Pmode, 0);
8744 rtx mem = gen_rtx_MEM (Pmode, addr);
8745 /* This should not be of rs6000_sr_alias_set, because of
8746 __builtin_return_address. */
8748 insn = emit_move_insn (mem, reg);
8749 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8750 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8753 /* Save CR if we use any that must be preserved. */
8754 if (info->cr_save_p)
8756 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8757 GEN_INT (info->cr_save_offset + sp_offset));
8758 rtx mem = gen_rtx_MEM (SImode, addr);
8760 set_mem_alias_set (mem, rs6000_sr_alias_set);
8762 /* If r12 was used to hold the original sp, copy cr into r0 now
8764 if (REGNO (frame_reg_rtx) == 12)
8766 cr_save_rtx = gen_rtx_REG (SImode, 0);
8767 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8769 insn = emit_move_insn (mem, cr_save_rtx);
8771 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8772 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8773 OK. All we have to do is specify that _one_ condition code
8774 register is saved in this stack slot. The thrower's epilogue
8775 will then restore all the call-saved registers.
8776 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8777 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8778 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8781 /* Update stack and set back pointer unless this is V.4,
8782 for which it was done previously. */
8783 if (info->push_p && DEFAULT_ABI != ABI_V4)
8784 rs6000_emit_allocate_stack (info->total_size, FALSE);
8786 /* Set frame pointer, if needed. */
8787 if (frame_pointer_needed)
8789 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8791 RTX_FRAME_RELATED_P (insn) = 1;
8794 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8795 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8796 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8797 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
8799 /* If emit_load_toc_table will use the link register, we need to save
8800 it. We use R11 for this purpose because emit_load_toc_table
8801 can use register 0. This allows us to use a plain 'blr' to return
8802 from the procedure more often. */
8803 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8804 && ! info->lr_save_p
8805 && EXIT_BLOCK_PTR->pred != NULL);
8806 if (save_LR_around_toc_setup)
8807 emit_move_insn (gen_rtx_REG (Pmode, 11),
8808 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8810 rs6000_emit_load_toc_table (TRUE);
8812 if (save_LR_around_toc_setup)
8813 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8814 gen_rtx_REG (Pmode, 11));
8817 if (DEFAULT_ABI == ABI_DARWIN
8818 && flag_pic && current_function_uses_pic_offset_table)
8820 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8822 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8825 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
8826 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8830 /* Write function prologue. */
8833 rs6000_output_function_prologue (file, size)
8835 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8837 rs6000_stack_t *info = rs6000_stack_info ();
8839 if (TARGET_DEBUG_STACK)
8840 debug_stack_info (info);
8842 /* Write .extern for any function we will call to save and restore
8844 if (info->first_fp_reg_save < 64
8845 && !FP_SAVE_INLINE (info->first_fp_reg_save))
8846 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8847 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8848 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8851 /* Write .extern for AIX common mode routines, if needed. */
8852 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8854 fputs ("\t.extern __mulh\n", file);
8855 fputs ("\t.extern __mull\n", file);
8856 fputs ("\t.extern __divss\n", file);
8857 fputs ("\t.extern __divus\n", file);
8858 fputs ("\t.extern __quoss\n", file);
8859 fputs ("\t.extern __quous\n", file);
8860 common_mode_defined = 1;
8863 if (! HAVE_prologue)
8867 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8868 the "toplevel" insn chain. */
8869 emit_note (0, NOTE_INSN_DELETED);
8870 rs6000_emit_prologue ();
8871 emit_note (0, NOTE_INSN_DELETED);
8873 if (TARGET_DEBUG_STACK)
8874 debug_rtx_list (get_insns (), 100);
8875 final (get_insns (), file, FALSE, FALSE);
8879 rs6000_pic_labelno++;
8882 /* Emit function epilogue as insns.
8884 At present, dwarf2out_frame_debug_expr doesn't understand
8885 register restores, so we don't bother setting RTX_FRAME_RELATED_P
8886 anywhere in the epilogue. Most of the insns below would in any case
8887 need special notes to explain where r11 is in relation to the stack. */
8890 rs6000_emit_epilogue (sibcall)
8893 rs6000_stack_t *info;
8894 int restoring_FPRs_inline;
8895 int using_load_multiple;
8896 int using_mfcr_multiple;
8897 int use_backchain_to_restore_sp;
8899 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
8900 rtx frame_reg_rtx = sp_reg_rtx;
8901 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8902 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8905 info = rs6000_stack_info ();
8906 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8907 && info->first_gp_reg_save < 31);
8908 restoring_FPRs_inline = (sibcall
8909 || current_function_calls_eh_return
8910 || info->first_fp_reg_save == 64
8911 || FP_SAVE_INLINE (info->first_fp_reg_save));
8912 use_backchain_to_restore_sp = (frame_pointer_needed
8913 || current_function_calls_alloca
8914 || info->total_size > 32767);
8915 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
8916 || rs6000_cpu == PROCESSOR_PPC603
8917 || rs6000_cpu == PROCESSOR_PPC750
8920 /* If we have a frame pointer, a call to alloca, or a large stack
8921 frame, restore the old stack pointer using the backchain. Otherwise,
8922 we know what size to update it with. */
8923 if (use_backchain_to_restore_sp)
8925 /* Under V.4, don't reset the stack pointer until after we're done
8926 loading the saved registers. */
8927 if (DEFAULT_ABI == ABI_V4)
8928 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
8930 emit_move_insn (frame_reg_rtx,
8931 gen_rtx_MEM (Pmode, sp_reg_rtx));
8934 else if (info->push_p)
8936 if (DEFAULT_ABI == ABI_V4)
8937 sp_offset = info->total_size;
8940 emit_insn (TARGET_32BIT
8941 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8942 GEN_INT (info->total_size))
8943 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8944 GEN_INT (info->total_size)));
8948 /* Restore AltiVec registers if needed. */
8949 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8953 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8954 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8956 rtx addr, areg, mem;
8958 areg = gen_rtx_REG (Pmode, 0);
8960 (areg, GEN_INT (info->altivec_save_offset
8962 + 16 * (i - info->first_altivec_reg_save)));
8964 /* AltiVec addressing mode is [reg+reg]. */
8965 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
8966 mem = gen_rtx_MEM (V4SImode, addr);
8967 set_mem_alias_set (mem, rs6000_sr_alias_set);
8969 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
8973 /* Restore VRSAVE if needed. */
8974 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
8978 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8979 GEN_INT (info->vrsave_save_offset + sp_offset));
8980 mem = gen_rtx_MEM (SImode, addr);
8981 set_mem_alias_set (mem, rs6000_sr_alias_set);
8982 reg = gen_rtx_REG (SImode, 12);
8983 emit_move_insn (reg, mem);
8985 emit_insn (generate_set_vrsave (reg, info, 1));
8988 /* Get the old lr if we saved it. */
8989 if (info->lr_save_p)
8991 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8992 GEN_INT (info->lr_save_offset + sp_offset));
8993 rtx mem = gen_rtx_MEM (Pmode, addr);
8995 set_mem_alias_set (mem, rs6000_sr_alias_set);
8997 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9000 /* Get the old cr if we saved it. */
9001 if (info->cr_save_p)
9003 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9004 GEN_INT (info->cr_save_offset + sp_offset));
9005 rtx mem = gen_rtx_MEM (SImode, addr);
9007 set_mem_alias_set (mem, rs6000_sr_alias_set);
9009 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9012 /* Set LR here to try to overlap restores below. */
9013 if (info->lr_save_p)
9014 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9015 gen_rtx_REG (Pmode, 0));
9017 /* Load exception handler data registers, if needed. */
9018 if (current_function_calls_eh_return)
9020 unsigned int i, regno;
9026 regno = EH_RETURN_DATA_REGNO (i);
9027 if (regno == INVALID_REGNUM)
9030 addr = plus_constant (frame_reg_rtx,
9031 info->ehrd_offset + sp_offset
9032 + reg_size * (int) i);
9033 mem = gen_rtx_MEM (reg_mode, addr);
9034 set_mem_alias_set (mem, rs6000_sr_alias_set);
9036 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9040 /* Restore GPRs. This is done as a PARALLEL if we are using
9041 the load-multiple instructions. */
9042 if (using_load_multiple)
9045 p = rtvec_alloc (32 - info->first_gp_reg_save);
9046 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9048 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9049 GEN_INT (info->gp_save_offset
9052 rtx mem = gen_rtx_MEM (reg_mode, addr);
9054 set_mem_alias_set (mem, rs6000_sr_alias_set);
9057 gen_rtx_SET (VOIDmode,
9058 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9061 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9064 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9065 if ((regs_ever_live[info->first_gp_reg_save+i]
9066 && ! call_used_regs[info->first_gp_reg_save+i])
9067 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
9068 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9069 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9071 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9072 GEN_INT (info->gp_save_offset
9075 rtx mem = gen_rtx_MEM (reg_mode, addr);
9077 set_mem_alias_set (mem, rs6000_sr_alias_set);
9079 emit_move_insn (gen_rtx_REG (reg_mode,
9080 info->first_gp_reg_save + i),
9084 /* Restore fpr's if we need to do it without calling a function. */
9085 if (restoring_FPRs_inline)
9086 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9087 if ((regs_ever_live[info->first_fp_reg_save+i]
9088 && ! call_used_regs[info->first_fp_reg_save+i]))
9091 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9092 GEN_INT (info->fp_save_offset
9095 mem = gen_rtx_MEM (DFmode, addr);
9096 set_mem_alias_set (mem, rs6000_sr_alias_set);
9098 emit_move_insn (gen_rtx_REG (DFmode,
9099 info->first_fp_reg_save + i),
9103 /* If we saved cr, restore it here. Just those that were used. */
9104 if (info->cr_save_p)
9106 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9109 if (using_mfcr_multiple)
9111 for (i = 0; i < 8; i++)
9112 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9118 if (using_mfcr_multiple && count > 1)
9123 p = rtvec_alloc (count);
9126 for (i = 0; i < 8; i++)
9127 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9129 rtvec r = rtvec_alloc (2);
9130 RTVEC_ELT (r, 0) = r12_rtx;
9131 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9132 RTVEC_ELT (p, ndx) =
9133 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9134 gen_rtx_UNSPEC (CCmode, r, 20));
9137 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9142 for (i = 0; i < 8; i++)
9143 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9145 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9151 /* If this is V.4, unwind the stack pointer after all of the loads
9152 have been done. We need to emit a block here so that sched
9153 doesn't decide to move the sp change before the register restores
9154 (which may not have any obvious dependency on the stack). This
9155 doesn't hurt performance, because there is no scheduling that can
9156 be done after this point. */
9157 if (DEFAULT_ABI == ABI_V4)
9159 if (frame_reg_rtx != sp_reg_rtx)
9160 rs6000_emit_stack_tie ();
9162 if (use_backchain_to_restore_sp)
9164 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9166 else if (sp_offset != 0)
9168 emit_insn (Pmode == SImode
9169 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9170 GEN_INT (sp_offset))
9171 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9172 GEN_INT (sp_offset)));
9176 if (current_function_calls_eh_return)
9178 rtx sa = EH_RETURN_STACKADJ_RTX;
9179 emit_insn (Pmode == SImode
9180 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9181 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9187 if (! restoring_FPRs_inline)
9188 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9190 p = rtvec_alloc (2);
9192 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9193 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9195 LINK_REGISTER_REGNUM));
9197 /* If we have to restore more than two FP registers, branch to the
9198 restore function. It will return to our caller. */
9199 if (! restoring_FPRs_inline)
9203 const char *alloc_rname;
9205 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9206 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9207 alloc_rname = ggc_strdup (rname);
9208 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9209 gen_rtx_SYMBOL_REF (Pmode,
9212 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9215 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9216 GEN_INT (info->fp_save_offset + 8*i));
9217 mem = gen_rtx_MEM (DFmode, addr);
9218 set_mem_alias_set (mem, rs6000_sr_alias_set);
9220 RTVEC_ELT (p, i+3) =
9221 gen_rtx_SET (VOIDmode,
9222 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9227 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9231 /* Write function epilogue. */
9234 rs6000_output_function_epilogue (file, size)
9236 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9238 rs6000_stack_t *info = rs6000_stack_info ();
9239 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9241 if (! HAVE_epilogue)
9243 rtx insn = get_last_insn ();
9244 /* If the last insn was a BARRIER, we don't have to write anything except
9246 if (GET_CODE (insn) == NOTE)
9247 insn = prev_nonnote_insn (insn);
9248 if (insn == 0 || GET_CODE (insn) != BARRIER)
9250 /* This is slightly ugly, but at least we don't have two
9251 copies of the epilogue-emitting code. */
9254 /* A NOTE_INSN_DELETED is supposed to be at the start
9255 and end of the "toplevel" insn chain. */
9256 emit_note (0, NOTE_INSN_DELETED);
9257 rs6000_emit_epilogue (FALSE);
9258 emit_note (0, NOTE_INSN_DELETED);
9260 if (TARGET_DEBUG_STACK)
9261 debug_rtx_list (get_insns (), 100);
9262 final (get_insns (), file, FALSE, FALSE);
9267 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9270 We don't output a traceback table if -finhibit-size-directive was
9271 used. The documentation for -finhibit-size-directive reads
9272 ``don't output a @code{.size} assembler directive, or anything
9273 else that would cause trouble if the function is split in the
9274 middle, and the two halves are placed at locations far apart in
9275 memory.'' The traceback table has this property, since it
9276 includes the offset from the start of the function to the
9277 traceback table itself.
9279 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9280 different traceback table. */
9281 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9283 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9284 const char *language_string = lang_hooks.name;
9285 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9288 while (*fname == '.') /* V.4 encodes . in the name */
9291 /* Need label immediately before tbtab, so we can compute its offset
9292 from the function start. */
9295 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9296 ASM_OUTPUT_LABEL (file, fname);
9298 /* The .tbtab pseudo-op can only be used for the first eight
9299 expressions, since it can't handle the possibly variable
9300 length fields that follow. However, if you omit the optional
9301 fields, the assembler outputs zeros for all optional fields
9302 anyways, giving each variable length field is minimum length
9303 (as defined in sys/debug.h). Thus we can not use the .tbtab
9304 pseudo-op at all. */
9306 /* An all-zero word flags the start of the tbtab, for debuggers
9307 that have to find it by searching forward from the entry
9308 point or from the current pc. */
9309 fputs ("\t.long 0\n", file);
9311 /* Tbtab format type. Use format type 0. */
9312 fputs ("\t.byte 0,", file);
9314 /* Language type. Unfortunately, there doesn't seem to be any
9315 official way to get this info, so we use language_string. C
9316 is 0. C++ is 9. No number defined for Obj-C, so use the
9317 value for C for now. There is no official value for Java,
9318 although IBM appears to be using 13. There is no official value
9319 for Chill, so we've chosen 44 pseudo-randomly. */
9320 if (! strcmp (language_string, "GNU C")
9321 || ! strcmp (language_string, "GNU Objective-C"))
9323 else if (! strcmp (language_string, "GNU F77"))
9325 else if (! strcmp (language_string, "GNU Ada"))
9327 else if (! strcmp (language_string, "GNU Pascal"))
9329 else if (! strcmp (language_string, "GNU C++"))
9331 else if (! strcmp (language_string, "GNU Java"))
9333 else if (! strcmp (language_string, "GNU CHILL"))
9337 fprintf (file, "%d,", i);
9339 /* 8 single bit fields: global linkage (not set for C extern linkage,
9340 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9341 from start of procedure stored in tbtab, internal function, function
9342 has controlled storage, function has no toc, function uses fp,
9343 function logs/aborts fp operations. */
9344 /* Assume that fp operations are used if any fp reg must be saved. */
9345 fprintf (file, "%d,",
9346 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9348 /* 6 bitfields: function is interrupt handler, name present in
9349 proc table, function calls alloca, on condition directives
9350 (controls stack walks, 3 bits), saves condition reg, saves
9352 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9353 set up as a frame pointer, even when there is no alloca call. */
9354 fprintf (file, "%d,",
9355 ((optional_tbtab << 6)
9356 | ((optional_tbtab & frame_pointer_needed) << 5)
9357 | (info->cr_save_p << 1)
9358 | (info->lr_save_p)));
9360 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9362 fprintf (file, "%d,",
9363 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9365 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9366 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9370 /* Compute the parameter info from the function decl argument
9373 int next_parm_info_bit = 31;
9375 for (decl = DECL_ARGUMENTS (current_function_decl);
9376 decl; decl = TREE_CHAIN (decl))
9378 rtx parameter = DECL_INCOMING_RTL (decl);
9379 enum machine_mode mode = GET_MODE (parameter);
9381 if (GET_CODE (parameter) == REG)
9383 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9391 else if (mode == DFmode)
9396 /* If only one bit will fit, don't or in this entry. */
9397 if (next_parm_info_bit > 0)
9398 parm_info |= (bits << (next_parm_info_bit - 1));
9399 next_parm_info_bit -= 2;
9403 fixed_parms += ((GET_MODE_SIZE (mode)
9404 + (UNITS_PER_WORD - 1))
9406 next_parm_info_bit -= 1;
9412 /* Number of fixed point parameters. */
9413 /* This is actually the number of words of fixed point parameters; thus
9414 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9415 fprintf (file, "%d,", fixed_parms);
9417 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9419 /* This is actually the number of fp registers that hold parameters;
9420 and thus the maximum value is 13. */
9421 /* Set parameters on stack bit if parameters are not in their original
9422 registers, regardless of whether they are on the stack? Xlc
9423 seems to set the bit when not optimizing. */
9424 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9426 if (! optional_tbtab)
9429 /* Optional fields follow. Some are variable length. */
9431 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9433 /* There is an entry for each parameter in a register, in the order that
9434 they occur in the parameter list. Any intervening arguments on the
9435 stack are ignored. If the list overflows a long (max possible length
9436 34 bits) then completely leave off all elements that don't fit. */
9437 /* Only emit this long if there was at least one parameter. */
9438 if (fixed_parms || float_parms)
9439 fprintf (file, "\t.long %d\n", parm_info);
9441 /* Offset from start of code to tb table. */
9442 fputs ("\t.long ", file);
9443 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9445 RS6000_OUTPUT_BASENAME (file, fname);
9447 assemble_name (file, fname);
9451 RS6000_OUTPUT_BASENAME (file, fname);
9453 assemble_name (file, fname);
9457 /* Interrupt handler mask. */
9458 /* Omit this long, since we never set the interrupt handler bit
9461 /* Number of CTL (controlled storage) anchors. */
9462 /* Omit this long, since the has_ctl bit is never set above. */
9464 /* Displacement into stack of each CTL anchor. */
9465 /* Omit this list of longs, because there are no CTL anchors. */
9467 /* Length of function name. */
9468 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9470 /* Function name. */
9471 assemble_string (fname, strlen (fname));
9473 /* Register for alloca automatic storage; this is always reg 31.
9474 Only emit this if the alloca bit was set above. */
9475 if (frame_pointer_needed)
9476 fputs ("\t.byte 31\n", file);
9481 /* A C compound statement that outputs the assembler code for a thunk
9482 function, used to implement C++ virtual function calls with
9483 multiple inheritance. The thunk acts as a wrapper around a virtual
9484 function, adjusting the implicit object parameter before handing
9485 control off to the real function.
9487 First, emit code to add the integer DELTA to the location that
9488 contains the incoming first argument. Assume that this argument
9489 contains a pointer, and is the one used to pass the `this' pointer
9490 in C++. This is the incoming argument *before* the function
9491 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9492 values of all other incoming arguments.
9494 After the addition, emit code to jump to FUNCTION, which is a
9495 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9496 not touch the return address. Hence returning from FUNCTION will
9497 return to whoever called the current `thunk'.
9499 The effect must be as if FUNCTION had been called directly with the
9500 adjusted first argument. This macro is responsible for emitting
9501 all of the code for a thunk function; output_function_prologue()
9502 and output_function_epilogue() are not invoked.
9504 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9505 been extracted from it.) It might possibly be useful on some
9506 targets, but probably not.
9508 If you do not define this macro, the target-independent code in the
9509 C++ frontend will generate a less efficient heavyweight thunk that
9510 calls FUNCTION instead of jumping to it. The generic approach does
9511 not support varargs. */
9514 output_mi_thunk (file, thunk_fndecl, delta, function)
9516 tree thunk_fndecl ATTRIBUTE_UNUSED;
9520 const char *this_reg =
9521 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9524 const char *r0 = reg_names[0];
9525 const char *toc = reg_names[2];
9526 const char *schain = reg_names[11];
9527 const char *r12 = reg_names[12];
9529 static int labelno = 0;
9531 /* Small constants that can be done by one add instruction. */
9532 if (delta >= -32768 && delta <= 32767)
9534 if (! TARGET_NEW_MNEMONICS)
9535 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9537 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9540 /* Large constants that can be done by one addis instruction. */
9541 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9542 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9545 /* 32-bit constants that can be done by an add and addis instruction. */
9546 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9548 /* Break into two pieces, propagating the sign bit from the low
9549 word to the upper word. */
9550 int delta_high = delta >> 16;
9551 int delta_low = delta & 0xffff;
9552 if ((delta_low & 0x8000) != 0)
9555 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9558 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9561 if (! TARGET_NEW_MNEMONICS)
9562 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9564 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9567 /* 64-bit constants, fixme */
9571 /* Get the prefix in front of the names. */
9572 switch (DEFAULT_ABI)
9582 case ABI_AIX_NODESC:
9587 /* If the function is compiled in this module, jump to it directly.
9588 Otherwise, load up its address and jump to it. */
9590 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9592 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9593 && ! lookup_attribute ("longcall",
9594 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9596 fprintf (file, "\tb %s", prefix);
9597 assemble_name (file, fname);
9598 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9604 switch (DEFAULT_ABI)
9610 /* Set up a TOC entry for the function. */
9611 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9613 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9616 if (TARGET_MINIMAL_TOC)
9617 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9620 fputs ("\t.tc ", file);
9621 assemble_name (file, fname);
9622 fputs ("[TC],", file);
9624 assemble_name (file, fname);
9627 if (TARGET_MINIMAL_TOC)
9628 asm_fprintf (file, (TARGET_32BIT)
9629 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9630 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9631 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9632 assemble_name (file, buf);
9633 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9634 fputs ("-(.LCTOC1)", file);
9635 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9637 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9641 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9644 asm_fprintf (file, "\tmtctr %s\n", r0);
9646 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9649 asm_fprintf (file, "\tbctr\n");
9652 case ABI_AIX_NODESC:
9654 fprintf (file, "\tb %s", prefix);
9655 assemble_name (file, fname);
9656 if (flag_pic) fputs ("@plt", file);
9662 fprintf (file, "\tb %s", prefix);
9663 if (flag_pic && !machopic_name_defined_p (fname))
9664 assemble_name (file, machopic_stub_name (fname));
9666 assemble_name (file, fname);
9675 /* A quick summary of the various types of 'constant-pool tables'
9678 Target Flags Name One table per
9679 AIX (none) AIX TOC object file
9680 AIX -mfull-toc AIX TOC object file
9681 AIX -mminimal-toc AIX minimal TOC translation unit
9682 SVR4/EABI (none) SVR4 SDATA object file
9683 SVR4/EABI -fpic SVR4 pic object file
9684 SVR4/EABI -fPIC SVR4 PIC translation unit
9685 SVR4/EABI -mrelocatable EABI TOC function
9686 SVR4/EABI -maix AIX TOC object file
9687 SVR4/EABI -maix -mminimal-toc
9688 AIX minimal TOC translation unit
9690 Name Reg. Set by entries contains:
9691 made by addrs? fp? sum?
9693 AIX TOC 2 crt0 as Y option option
9694 AIX minimal TOC 30 prolog gcc Y Y option
9695 SVR4 SDATA 13 crt0 gcc N Y N
9696 SVR4 pic 30 prolog ld Y not yet N
9697 SVR4 PIC 30 prolog gcc Y option option
9698 EABI TOC 30 prolog gcc Y option option
9702 /* Hash table stuff for keeping track of TOC entries. */
9704 struct toc_hash_struct
9706 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9707 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9709 enum machine_mode key_mode;
9713 static htab_t toc_hash_table;
9715 /* Hash functions for the hash table. */
9718 rs6000_hash_constant (k)
9721 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9722 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9723 int flen = strlen (format);
9726 if (GET_CODE (k) == LABEL_REF)
9727 return result * 1231 + X0INT (XEXP (k, 0), 3);
9729 if (GET_CODE (k) == CONST_DOUBLE)
9731 else if (GET_CODE (k) == CODE_LABEL)
9736 for (; fidx < flen; fidx++)
9737 switch (format[fidx])
9742 const char *str = XSTR (k, fidx);
9744 result = result * 613 + len;
9745 for (i = 0; i < len; i++)
9746 result = result * 613 + (unsigned) str[i];
9751 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9755 result = result * 613 + (unsigned) XINT (k, fidx);
9758 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9759 result = result * 613 + (unsigned) XWINT (k, fidx);
9763 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9764 result = result * 613 + (unsigned) (XWINT (k, fidx)
9775 toc_hash_function (hash_entry)
9776 const void * hash_entry;
9778 const struct toc_hash_struct *thc =
9779 (const struct toc_hash_struct *) hash_entry;
9780 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9783 /* Compare H1 and H2 for equivalence. */
9786 toc_hash_eq (h1, h2)
9790 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9791 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9793 if (((const struct toc_hash_struct *) h1)->key_mode
9794 != ((const struct toc_hash_struct *) h2)->key_mode)
9797 /* Gotcha: One of these const_doubles will be in memory.
9798 The other may be on the constant-pool chain.
9799 So rtx_equal_p will think they are different... */
9802 if (GET_CODE (r1) != GET_CODE (r2)
9803 || GET_MODE (r1) != GET_MODE (r2))
9805 if (GET_CODE (r1) == CONST_DOUBLE)
9807 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9809 for (i = 1; i < format_len; i++)
9810 if (XWINT (r1, i) != XWINT (r2, i))
9815 else if (GET_CODE (r1) == LABEL_REF)
9816 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9817 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9819 return rtx_equal_p (r1, r2);
9822 /* Mark the hash table-entry HASH_ENTRY. */
9825 toc_hash_mark_entry (hash_slot, unused)
9827 void * unused ATTRIBUTE_UNUSED;
9829 const struct toc_hash_struct * hash_entry =
9830 *(const struct toc_hash_struct **) hash_slot;
9831 rtx r = hash_entry->key;
9832 ggc_set_mark (hash_entry);
9833 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
9834 if (GET_CODE (r) == LABEL_REF)
9837 ggc_set_mark (XEXP (r, 0));
9844 /* Mark all the elements of the TOC hash-table *HT. */
9847 toc_hash_mark_table (vht)
9852 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9855 /* These are the names given by the C++ front-end to vtables, and
9856 vtable-like objects. Ideally, this logic should not be here;
9857 instead, there should be some programmatic way of inquiring as
9858 to whether or not an object is a vtable. */
9860 #define VTABLE_NAME_P(NAME) \
9861 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
9862 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
9863 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
9864 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9867 rs6000_output_symbol_ref (file, x)
9871 /* Currently C++ toc references to vtables can be emitted before it
9872 is decided whether the vtable is public or private. If this is
9873 the case, then the linker will eventually complain that there is
9874 a reference to an unknown section. Thus, for vtables only,
9875 we emit the TOC reference to reference the symbol and not the
9877 const char *name = XSTR (x, 0);
9879 if (VTABLE_NAME_P (name))
9881 RS6000_OUTPUT_BASENAME (file, name);
9884 assemble_name (file, name);
9887 /* Output a TOC entry. We derive the entry name from what is being
9891 output_toc (file, x, labelno, mode)
9895 enum machine_mode mode;
9898 const char *name = buf;
9899 const char *real_name;
9906 /* When the linker won't eliminate them, don't output duplicate
9907 TOC entries (this happens on AIX if there is any kind of TOC,
9908 and on SVR4 under -fPIC or -mrelocatable). */
9911 struct toc_hash_struct *h;
9914 h = ggc_alloc (sizeof (*h));
9917 h->labelno = labelno;
9919 found = htab_find_slot (toc_hash_table, h, 1);
9922 else /* This is indeed a duplicate.
9923 Set this label equal to that label. */
9925 fputs ("\t.set ", file);
9926 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9927 fprintf (file, "%d,", labelno);
9928 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9929 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9935 /* If we're going to put a double constant in the TOC, make sure it's
9936 aligned properly when strict alignment is on. */
9937 if (GET_CODE (x) == CONST_DOUBLE
9939 && GET_MODE_BITSIZE (mode) >= 64
9940 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
9941 ASM_OUTPUT_ALIGN (file, 3);
9944 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
9946 /* Handle FP constants specially. Note that if we have a minimal
9947 TOC, things we put here aren't actually in the TOC, so we can allow
9949 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
9954 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9955 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
9959 if (TARGET_MINIMAL_TOC)
9960 fputs (DOUBLE_INT_ASM_OP, file);
9962 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9963 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
9968 if (TARGET_MINIMAL_TOC)
9969 fputs ("\t.long ", file);
9971 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9972 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
9976 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
9981 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9982 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
9986 if (TARGET_MINIMAL_TOC)
9987 fputs (DOUBLE_INT_ASM_OP, file);
9989 fprintf (file, "\t.tc FS_%lx[TC],", l);
9990 fprintf (file, "0x%lx00000000\n", l);
9995 if (TARGET_MINIMAL_TOC)
9996 fputs ("\t.long ", file);
9998 fprintf (file, "\t.tc FS_%lx[TC],", l);
9999 fprintf (file, "0x%lx\n", l);
10003 else if (GET_MODE (x) == VOIDmode
10004 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10006 unsigned HOST_WIDE_INT low;
10007 HOST_WIDE_INT high;
10009 if (GET_CODE (x) == CONST_DOUBLE)
10011 low = CONST_DOUBLE_LOW (x);
10012 high = CONST_DOUBLE_HIGH (x);
10015 #if HOST_BITS_PER_WIDE_INT == 32
10018 high = (low & 0x80000000) ? ~0 : 0;
10022 low = INTVAL (x) & 0xffffffff;
10023 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10027 /* TOC entries are always Pmode-sized, but since this
10028 is a bigendian machine then if we're putting smaller
10029 integer constants in the TOC we have to pad them.
10030 (This is still a win over putting the constants in
10031 a separate constant pool, because then we'd have
10032 to have both a TOC entry _and_ the actual constant.)
10034 For a 32-bit target, CONST_INT values are loaded and shifted
10035 entirely within `low' and can be stored in one TOC entry. */
10037 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10038 abort ();/* It would be easy to make this work, but it doesn't now. */
10040 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10041 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10042 POINTER_SIZE, &low, &high, 0);
10046 if (TARGET_MINIMAL_TOC)
10047 fputs (DOUBLE_INT_ASM_OP, file);
10049 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long)high, (long)low);
10050 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10055 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10057 if (TARGET_MINIMAL_TOC)
10058 fputs ("\t.long ", file);
10060 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10061 (long)high, (long)low);
10062 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10066 if (TARGET_MINIMAL_TOC)
10067 fputs ("\t.long ", file);
10069 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10070 fprintf (file, "0x%lx\n", (long) low);
10076 if (GET_CODE (x) == CONST)
10078 if (GET_CODE (XEXP (x, 0)) != PLUS)
10081 base = XEXP (XEXP (x, 0), 0);
10082 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10085 if (GET_CODE (base) == SYMBOL_REF)
10086 name = XSTR (base, 0);
10087 else if (GET_CODE (base) == LABEL_REF)
10088 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10089 else if (GET_CODE (base) == CODE_LABEL)
10090 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10094 STRIP_NAME_ENCODING (real_name, name);
10095 if (TARGET_MINIMAL_TOC)
10096 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10099 fprintf (file, "\t.tc %s", real_name);
10102 fprintf (file, ".N%d", - offset);
10104 fprintf (file, ".P%d", offset);
10106 fputs ("[TC],", file);
10109 /* Currently C++ toc references to vtables can be emitted before it
10110 is decided whether the vtable is public or private. If this is
10111 the case, then the linker will eventually complain that there is
10112 a TOC reference to an unknown section. Thus, for vtables only,
10113 we emit the TOC reference to reference the symbol and not the
10115 if (VTABLE_NAME_P (name))
10117 RS6000_OUTPUT_BASENAME (file, name);
10119 fprintf (file, "%d", offset);
10120 else if (offset > 0)
10121 fprintf (file, "+%d", offset);
10124 output_addr_const (file, x);
10128 /* Output an assembler pseudo-op to write an ASCII string of N characters
10129 starting at P to FILE.
10131 On the RS/6000, we have to do this using the .byte operation and
10132 write out special characters outside the quoted string.
10133 Also, the assembler is broken; very long strings are truncated,
10134 so we must artificially break them up early. */
10137 output_ascii (file, p, n)
10143 int i, count_string;
10144 const char *for_string = "\t.byte \"";
10145 const char *for_decimal = "\t.byte ";
10146 const char *to_close = NULL;
10149 for (i = 0; i < n; i++)
10152 if (c >= ' ' && c < 0177)
10155 fputs (for_string, file);
10158 /* Write two quotes to get one. */
10166 for_decimal = "\"\n\t.byte ";
10170 if (count_string >= 512)
10172 fputs (to_close, file);
10174 for_string = "\t.byte \"";
10175 for_decimal = "\t.byte ";
10183 fputs (for_decimal, file);
10184 fprintf (file, "%d", c);
10186 for_string = "\n\t.byte \"";
10187 for_decimal = ", ";
10193 /* Now close the string if we have written one. Then end the line. */
10195 fputs (to_close, file);
10198 /* Generate a unique section name for FILENAME for a section type
10199 represented by SECTION_DESC. Output goes into BUF.
10201 SECTION_DESC can be any string, as long as it is different for each
10202 possible section type.
10204 We name the section in the same manner as xlc. The name begins with an
10205 underscore followed by the filename (after stripping any leading directory
10206 names) with the last period replaced by the string SECTION_DESC. If
10207 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10211 rs6000_gen_section_name (buf, filename, section_desc)
10213 const char *filename;
10214 const char *section_desc;
10216 const char *q, *after_last_slash, *last_period = 0;
10220 after_last_slash = filename;
10221 for (q = filename; *q; q++)
10224 after_last_slash = q + 1;
10225 else if (*q == '.')
10229 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10230 *buf = (char *) permalloc (len);
10235 for (q = after_last_slash; *q; q++)
10237 if (q == last_period)
10239 strcpy (p, section_desc);
10240 p += strlen (section_desc);
10243 else if (ISALNUM (*q))
10247 if (last_period == 0)
10248 strcpy (p, section_desc);
10253 /* Emit profile function. */
10256 output_profile_hook (labelno)
10259 if (DEFAULT_ABI == ABI_AIX)
10262 const char *label_name;
10267 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10268 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10269 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10271 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10274 else if (DEFAULT_ABI == ABI_DARWIN)
10276 const char *mcount_name = RS6000_MCOUNT;
10277 int caller_addr_regno = LINK_REGISTER_REGNUM;
10279 /* Be conservative and always set this, at least for now. */
10280 current_function_uses_pic_offset_table = 1;
10283 /* For PIC code, set up a stub and collect the caller's address
10284 from r0, which is where the prologue puts it. */
10287 mcount_name = machopic_stub_name (mcount_name);
10288 if (current_function_uses_pic_offset_table)
10289 caller_addr_regno = 0;
10292 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10294 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10298 /* Write function profiler code. */
10301 output_function_profiler (file, labelno)
10307 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10308 switch (DEFAULT_ABI)
10314 case ABI_AIX_NODESC:
10315 fprintf (file, "\tmflr %s\n", reg_names[0]);
10318 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10319 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10320 reg_names[0], reg_names[1]);
10321 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10322 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10323 assemble_name (file, buf);
10324 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10326 else if (flag_pic > 1)
10328 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10329 reg_names[0], reg_names[1]);
10330 /* Now, we need to get the address of the label. */
10331 fputs ("\tbl 1f\n\t.long ", file);
10332 assemble_name (file, buf);
10333 fputs ("-.\n1:", file);
10334 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10335 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10336 reg_names[0], reg_names[11]);
10337 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10338 reg_names[0], reg_names[0], reg_names[11]);
10342 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10343 assemble_name (file, buf);
10344 fputs ("@ha\n", file);
10345 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10346 reg_names[0], reg_names[1]);
10347 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10348 assemble_name (file, buf);
10349 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10352 if (current_function_needs_context)
10353 asm_fprintf (file, "\tmr %s,%s\n",
10354 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10355 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10356 if (current_function_needs_context)
10357 asm_fprintf (file, "\tmr %s,%s\n",
10358 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10363 /* Don't do anything, done in output_profile_hook (). */
10369 /* Adjust the cost of a scheduling dependency. Return the new cost of
10370 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10373 rs6000_adjust_cost (insn, link, dep_insn, cost)
10376 rtx dep_insn ATTRIBUTE_UNUSED;
10379 if (! recog_memoized (insn))
10382 if (REG_NOTE_KIND (link) != 0)
10385 if (REG_NOTE_KIND (link) == 0)
10387 /* Data dependency; DEP_INSN writes a register that INSN reads
10388 some cycles later. */
10389 switch (get_attr_type (insn))
10392 /* Tell the first scheduling pass about the latency between
10393 a mtctr and bctr (and mtlr and br/blr). The first
10394 scheduling pass will not know about this latency since
10395 the mtctr instruction, which has the latency associated
10396 to it, will be generated by reload. */
10397 return TARGET_POWER ? 5 : 4;
10399 /* Leave some extra cycles between a compare and its
10400 dependent branch, to inhibit expensive mispredicts. */
10401 if ((rs6000_cpu_attr == CPU_PPC750
10402 || rs6000_cpu_attr == CPU_PPC7400
10403 || rs6000_cpu_attr == CPU_PPC7450)
10404 && recog_memoized (dep_insn)
10405 && (INSN_CODE (dep_insn) >= 0)
10406 && (get_attr_type (dep_insn) == TYPE_COMPARE
10407 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10408 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10409 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10414 /* Fall out to return default cost. */
10420 /* A C statement (sans semicolon) to update the integer scheduling
10421 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10422 INSN earlier, increase the priority to execute INSN later. Do not
10423 define this macro if you do not need to adjust the scheduling
10424 priorities of insns. */
10427 rs6000_adjust_priority (insn, priority)
10428 rtx insn ATTRIBUTE_UNUSED;
10431 /* On machines (like the 750) which have asymmetric integer units,
10432 where one integer unit can do multiply and divides and the other
10433 can't, reduce the priority of multiply/divide so it is scheduled
10434 before other integer operations. */
10437 if (! INSN_P (insn))
10440 if (GET_CODE (PATTERN (insn)) == USE)
10443 switch (rs6000_cpu_attr) {
10445 switch (get_attr_type (insn))
10452 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10453 priority, priority);
10454 if (priority >= 0 && priority < 0x01000000)
10464 /* Return how many instructions the machine can issue per cycle. */
10467 rs6000_issue_rate ()
10469 switch (rs6000_cpu_attr) {
10470 case CPU_RIOS1: /* ? */
10472 case CPU_PPC601: /* ? */
10491 /* Length in units of the trampoline for entering a nested function. */
10494 rs6000_trampoline_size ()
10498 switch (DEFAULT_ABI)
10504 ret = (TARGET_32BIT) ? 12 : 24;
10509 case ABI_AIX_NODESC:
10510 ret = (TARGET_32BIT) ? 40 : 48;
10517 /* Emit RTL insns to initialize the variable parts of a trampoline.
10518 FNADDR is an RTX for the address of the function's pure code.
10519 CXT is an RTX for the static chain value for the function. */
10522 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10527 enum machine_mode pmode = Pmode;
10528 int regsize = (TARGET_32BIT) ? 4 : 8;
10529 rtx ctx_reg = force_reg (pmode, cxt);
10531 switch (DEFAULT_ABI)
10536 /* Macros to shorten the code expansions below. */
10537 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10538 #define MEM_PLUS(addr,offset) \
10539 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10541 /* Under AIX, just build the 3 word function descriptor */
10544 rtx fn_reg = gen_reg_rtx (pmode);
10545 rtx toc_reg = gen_reg_rtx (pmode);
10546 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10547 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10548 emit_move_insn (MEM_DEREF (addr), fn_reg);
10549 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10550 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10554 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10557 case ABI_AIX_NODESC:
10558 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10559 FALSE, VOIDmode, 4,
10561 GEN_INT (rs6000_trampoline_size ()), SImode,
10571 /* Table of valid machine attributes. */
10573 const struct attribute_spec rs6000_attribute_table[] =
10575 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10576 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10577 { NULL, 0, 0, false, false, false, NULL }
10580 /* Handle a "longcall" attribute; arguments as in struct
10581 attribute_spec.handler. */
10584 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10587 tree args ATTRIBUTE_UNUSED;
10588 int flags ATTRIBUTE_UNUSED;
10589 bool *no_add_attrs;
10591 if (TREE_CODE (*node) != FUNCTION_TYPE
10592 && TREE_CODE (*node) != FIELD_DECL
10593 && TREE_CODE (*node) != TYPE_DECL)
10595 warning ("`%s' attribute only applies to functions",
10596 IDENTIFIER_POINTER (name));
10597 *no_add_attrs = true;
10603 /* Return a reference suitable for calling a function with the
10604 longcall attribute. */
10607 rs6000_longcall_ref (call_ref)
10610 const char *call_name;
10613 if (GET_CODE (call_ref) != SYMBOL_REF)
10616 /* System V adds '.' to the internal name, so skip them. */
10617 call_name = XSTR (call_ref, 0);
10618 if (*call_name == '.')
10620 while (*call_name == '.')
10623 node = get_identifier (call_name);
10624 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10627 return force_reg (Pmode, call_ref);
10631 /* A C statement or statements to switch to the appropriate section
10632 for output of RTX in mode MODE. You can assume that RTX is some
10633 kind of constant in RTL. The argument MODE is redundant except in
10634 the case of a `const_int' rtx. Select the section by calling
10635 `text_section' or one of the alternatives for other sections.
10637 Do not define this macro if you put all constants in the read-only
10640 #ifdef USING_ELFOS_H
10643 rs6000_select_rtx_section (mode, x)
10644 enum machine_mode mode;
10647 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10653 /* A C statement or statements to switch to the appropriate
10654 section for output of DECL. DECL is either a `VAR_DECL' node
10655 or a constant of some sort. RELOC indicates whether forming
10656 the initial value of DECL requires link-time relocations. */
10659 rs6000_select_section (decl, reloc)
10663 int size = int_size_in_bytes (TREE_TYPE (decl));
10666 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10673 needs_sdata = (size > 0
10674 && size <= g_switch_value
10675 && rs6000_sdata != SDATA_NONE
10676 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10678 if (TREE_CODE (decl) == STRING_CST)
10679 readonly = ! flag_writable_strings;
10680 else if (TREE_CODE (decl) == VAR_DECL)
10681 readonly = (! (flag_pic && reloc)
10682 && TREE_READONLY (decl)
10683 && ! TREE_SIDE_EFFECTS (decl)
10684 && DECL_INITIAL (decl)
10685 && DECL_INITIAL (decl) != error_mark_node
10686 && TREE_CONSTANT (DECL_INITIAL (decl)));
10687 else if (TREE_CODE (decl) == CONSTRUCTOR)
10688 readonly = (! (flag_pic && reloc)
10689 && ! TREE_SIDE_EFFECTS (decl)
10690 && TREE_CONSTANT (decl));
10693 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10696 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10699 /* A C statement to build up a unique section name, expressed as a
10700 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10701 RELOC indicates whether the initial value of EXP requires
10702 link-time relocations. If you do not define this macro, GCC will use
10703 the symbol name prefixed by `.' as the section name. Note - this
10704 macro can now be called for uninitialized data items as well as
10705 initialised data and functions. */
10708 rs6000_unique_section (decl, reloc)
10716 const char *prefix;
10718 static const char *const prefixes[7][2] =
10720 { ".rodata.", ".gnu.linkonce.r." },
10721 { ".sdata2.", ".gnu.linkonce.s2." },
10722 { ".data.", ".gnu.linkonce.d." },
10723 { ".sdata.", ".gnu.linkonce.s." },
10724 { ".bss.", ".gnu.linkonce.b." },
10725 { ".sbss.", ".gnu.linkonce.sb." },
10726 { ".text.", ".gnu.linkonce.t." }
10729 if (TREE_CODE (decl) == FUNCTION_DECL)
10738 if (TREE_CODE (decl) == STRING_CST)
10739 readonly = ! flag_writable_strings;
10740 else if (TREE_CODE (decl) == VAR_DECL)
10741 readonly = (! (flag_pic && reloc)
10742 && TREE_READONLY (decl)
10743 && ! TREE_SIDE_EFFECTS (decl)
10744 && TREE_CONSTANT (DECL_INITIAL (decl)));
10746 size = int_size_in_bytes (TREE_TYPE (decl));
10747 needs_sdata = (size > 0
10748 && size <= g_switch_value
10749 && rs6000_sdata != SDATA_NONE
10750 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10752 if (DECL_INITIAL (decl) == 0
10753 || DECL_INITIAL (decl) == error_mark_node)
10755 else if (! readonly)
10762 /* .sdata2 is only for EABI. */
10763 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10769 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10770 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10771 len = strlen (name) + strlen (prefix);
10772 string = alloca (len + 1);
10774 sprintf (string, "%s%s", prefix, name);
10776 DECL_SECTION_NAME (decl) = build_string (len, string);
10780 /* If we are referencing a function that is static or is known to be
10781 in this file, make the SYMBOL_REF special. We can use this to indicate
10782 that we can branch to this function without emitting a no-op after the
10783 call. For real AIX calling sequences, we also replace the
10784 function name with the real name (1 or 2 leading .'s), rather than
10785 the function descriptor name. This saves a lot of overriding code
10786 to read the prefixes. */
10789 rs6000_encode_section_info (decl)
10792 if (TREE_CODE (decl) == FUNCTION_DECL)
10794 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10795 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10796 && ! DECL_WEAK (decl))
10797 SYMBOL_REF_FLAG (sym_ref) = 1;
10799 if (DEFAULT_ABI == ABI_AIX)
10801 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10802 size_t len2 = strlen (XSTR (sym_ref, 0));
10803 char *str = alloca (len1 + len2 + 1);
10806 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10808 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10811 else if (rs6000_sdata != SDATA_NONE
10812 && DEFAULT_ABI == ABI_V4
10813 && TREE_CODE (decl) == VAR_DECL)
10815 int size = int_size_in_bytes (TREE_TYPE (decl));
10816 tree section_name = DECL_SECTION_NAME (decl);
10817 const char *name = (char *)0;
10822 if (TREE_CODE (section_name) == STRING_CST)
10824 name = TREE_STRING_POINTER (section_name);
10825 len = TREE_STRING_LENGTH (section_name);
10831 if ((size > 0 && size <= g_switch_value)
10833 && ((len == sizeof (".sdata") - 1
10834 && strcmp (name, ".sdata") == 0)
10835 || (len == sizeof (".sdata2") - 1
10836 && strcmp (name, ".sdata2") == 0)
10837 || (len == sizeof (".sbss") - 1
10838 && strcmp (name, ".sbss") == 0)
10839 || (len == sizeof (".sbss2") - 1
10840 && strcmp (name, ".sbss2") == 0)
10841 || (len == sizeof (".PPC.EMB.sdata0") - 1
10842 && strcmp (name, ".PPC.EMB.sdata0") == 0)
10843 || (len == sizeof (".PPC.EMB.sbss0") - 1
10844 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10846 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10847 size_t len = strlen (XSTR (sym_ref, 0));
10848 char *str = alloca (len + 2);
10851 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
10852 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
10857 #endif /* USING_ELFOS_H */
10860 /* Return a REG that occurs in ADDR with coefficient 1.
10861 ADDR can be effectively incremented by incrementing REG.
10863 r0 is special and we must not select it as an address
10864 register by this routine since our caller will try to
10865 increment the returned register via an "la" instruction. */
10868 find_addr_reg (addr)
10871 while (GET_CODE (addr) == PLUS)
10873 if (GET_CODE (XEXP (addr, 0)) == REG
10874 && REGNO (XEXP (addr, 0)) != 0)
10875 addr = XEXP (addr, 0);
10876 else if (GET_CODE (XEXP (addr, 1)) == REG
10877 && REGNO (XEXP (addr, 1)) != 0)
10878 addr = XEXP (addr, 1);
10879 else if (CONSTANT_P (XEXP (addr, 0)))
10880 addr = XEXP (addr, 1);
10881 else if (CONSTANT_P (XEXP (addr, 1)))
10882 addr = XEXP (addr, 0);
10886 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
10892 rs6000_fatal_bad_address (op)
10895 fatal_insn ("bad address", op);
10898 /* Called to register all of our global variables with the garbage
10902 rs6000_add_gc_roots ()
10904 ggc_add_rtx_root (&rs6000_compare_op0, 1);
10905 ggc_add_rtx_root (&rs6000_compare_op1, 1);
10907 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
10908 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
10909 toc_hash_mark_table);
10912 machopic_add_gc_roots ();
10919 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
10920 reference and a constant. */
10923 symbolic_operand (op)
10926 switch (GET_CODE (op))
10933 return (GET_CODE (op) == SYMBOL_REF ||
10934 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
10935 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
10936 && GET_CODE (XEXP (op, 1)) == CONST_INT);
10943 #ifdef RS6000_LONG_BRANCH
10945 static tree stub_list = 0;
10947 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
10948 procedure calls to the linked list. */
10951 add_compiler_stub (label_name, function_name, line_number)
10953 tree function_name;
10956 tree stub = build_tree_list (function_name, label_name);
10957 TREE_TYPE (stub) = build_int_2 (line_number, 0);
10958 TREE_CHAIN (stub) = stub_list;
10962 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
10963 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
10964 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
10966 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
10967 handling procedure calls from the linked list and initializes the
10971 output_compiler_stub ()
10974 char label_buf[256];
10976 tree tmp_stub, stub;
10979 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10981 fprintf (asm_out_file,
10982 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
10984 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10985 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10986 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
10987 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10989 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
10991 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
10994 label_buf[0] = '_';
10995 strcpy (label_buf+1,
10996 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
10999 strcpy (tmp_buf, "lis r12,hi16(");
11000 strcat (tmp_buf, label_buf);
11001 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11002 strcat (tmp_buf, label_buf);
11003 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11004 output_asm_insn (tmp_buf, 0);
11006 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11007 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11008 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11009 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11015 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11016 already there or not. */
11019 no_previous_def (function_name)
11020 tree function_name;
11023 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11024 if (function_name == STUB_FUNCTION_NAME (stub))
11029 /* GET_PREV_LABEL gets the label name from the previous definition of
11033 get_prev_label (function_name)
11034 tree function_name;
11037 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11038 if (function_name == STUB_FUNCTION_NAME (stub))
11039 return STUB_LABEL_NAME (stub);
11043 /* INSN is either a function call or a millicode call. It may have an
11044 unconditional jump in its delay slot.
11046 CALL_DEST is the routine we are calling. */
11049 output_call (insn, call_dest, operand_number)
11052 int operand_number;
11054 static char buf[256];
11055 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11058 tree funname = get_identifier (XSTR (call_dest, 0));
11060 if (no_previous_def (funname))
11063 rtx label_rtx = gen_label_rtx ();
11064 char *label_buf, temp_buf[256];
11065 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11066 CODE_LABEL_NUMBER (label_rtx));
11067 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11068 labelname = get_identifier (label_buf);
11069 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11071 line_number = NOTE_LINE_NUMBER (insn);
11072 add_compiler_stub (labelname, funname, line_number);
11075 labelname = get_prev_label (funname);
11077 sprintf (buf, "jbsr %%z%d,%.246s",
11078 operand_number, IDENTIFIER_POINTER (labelname));
11083 sprintf (buf, "bl %%z%d", operand_number);
11088 #endif /* RS6000_LONG_BRANCH */
11090 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11092 const char *const symbol_ = (SYMBOL); \
11093 char *buffer_ = (BUF); \
11094 if (symbol_[0] == '"') \
11096 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11098 else if (name_needs_quotes(symbol_)) \
11100 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11104 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11109 /* Generate PIC and indirect symbol stubs. */
11112 machopic_output_stub (file, symb, stub)
11114 const char *symb, *stub;
11116 unsigned int length;
11117 char *symbol_name, *lazy_ptr_name;
11118 char *local_label_0;
11119 static int label = 0;
11121 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11122 STRIP_NAME_ENCODING (symb, symb);
11126 length = strlen (symb);
11127 symbol_name = alloca (length + 32);
11128 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11130 lazy_ptr_name = alloca (length + 32);
11131 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11133 local_label_0 = alloca (length + 32);
11134 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11137 machopic_picsymbol_stub_section ();
11139 machopic_symbol_stub_section ();
11141 fprintf (file, "%s:\n", stub);
11142 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11146 fprintf (file, "\tmflr r0\n");
11147 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11148 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11149 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11150 lazy_ptr_name, local_label_0);
11151 fprintf (file, "\tmtlr r0\n");
11152 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11153 lazy_ptr_name, local_label_0);
11154 fprintf (file, "\tmtctr r12\n");
11155 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11156 lazy_ptr_name, local_label_0);
11157 fprintf (file, "\tbctr\n");
11160 fprintf (file, "non-pure not supported\n");
11162 machopic_lazy_symbol_ptr_section ();
11163 fprintf (file, "%s:\n", lazy_ptr_name);
11164 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11165 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11168 /* Legitimize PIC addresses. If the address is already
11169 position-independent, we return ORIG. Newly generated
11170 position-independent addresses go into a reg. This is REG if non
11171 zero, otherwise we allocate register(s) as necessary. */
11173 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11176 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11178 enum machine_mode mode;
11183 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11184 reg = gen_reg_rtx (Pmode);
11186 if (GET_CODE (orig) == CONST)
11188 if (GET_CODE (XEXP (orig, 0)) == PLUS
11189 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11192 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11195 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11198 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11204 if (GET_CODE (offset) == CONST_INT)
11206 if (SMALL_INT (offset))
11207 return plus_constant (base, INTVAL (offset));
11208 else if (! reload_in_progress && ! reload_completed)
11209 offset = force_reg (Pmode, offset);
11212 rtx mem = force_const_mem (Pmode, orig);
11213 return machopic_legitimize_pic_address (mem, Pmode, reg);
11216 return gen_rtx (PLUS, Pmode, base, offset);
11219 /* Fall back on generic machopic code. */
11220 return machopic_legitimize_pic_address (orig, mode, reg);
11223 /* This is just a placeholder to make linking work without having to
11224 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11225 ever needed for Darwin (not too likely!) this would have to get a
11226 real definition. */
11233 #endif /* TARGET_MACHO */
11236 static unsigned int
11237 rs6000_elf_section_type_flags (decl, name, reloc)
11242 unsigned int flags = default_section_type_flags (decl, name, reloc);
11244 if (TARGET_RELOCATABLE)
11245 flags |= SECTION_WRITE;
11250 /* Record an element in the table of global constructors. SYMBOL is
11251 a SYMBOL_REF of the function to be called; PRIORITY is a number
11252 between 0 and MAX_INIT_PRIORITY.
11254 This differs from default_named_section_asm_out_constructor in
11255 that we have special handling for -mrelocatable. */
11258 rs6000_elf_asm_out_constructor (symbol, priority)
11262 const char *section = ".ctors";
11265 if (priority != DEFAULT_INIT_PRIORITY)
11267 sprintf (buf, ".ctors.%.5u",
11268 /* Invert the numbering so the linker puts us in the proper
11269 order; constructors are run from right to left, and the
11270 linker sorts in increasing order. */
11271 MAX_INIT_PRIORITY - priority);
11275 named_section_flags (section, SECTION_WRITE);
11276 assemble_align (POINTER_SIZE);
11278 if (TARGET_RELOCATABLE)
11280 fputs ("\t.long (", asm_out_file);
11281 output_addr_const (asm_out_file, symbol);
11282 fputs (")@fixup\n", asm_out_file);
11285 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11289 rs6000_elf_asm_out_destructor (symbol, priority)
11293 const char *section = ".dtors";
11296 if (priority != DEFAULT_INIT_PRIORITY)
11298 sprintf (buf, ".dtors.%.5u",
11299 /* Invert the numbering so the linker puts us in the proper
11300 order; constructors are run from right to left, and the
11301 linker sorts in increasing order. */
11302 MAX_INIT_PRIORITY - priority);
11306 named_section_flags (section, SECTION_WRITE);
11307 assemble_align (POINTER_SIZE);
11309 if (TARGET_RELOCATABLE)
11311 fputs ("\t.long (", asm_out_file);
11312 output_addr_const (asm_out_file, symbol);
11313 fputs (")@fixup\n", asm_out_file);
11316 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11320 #ifdef OBJECT_FORMAT_COFF
11322 xcoff_asm_named_section (name, flags)
11324 unsigned int flags ATTRIBUTE_UNUSED;
11326 fprintf (asm_out_file, "\t.csect %s\n", name);