1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
51 #ifndef TARGET_NO_PROTOTYPE
52 #define TARGET_NO_PROTOTYPE 0
55 #define min(A,B) ((A) < (B) ? (A) : (B))
56 #define max(A,B) ((A) > (B) ? (A) : (B))
60 enum processor_type rs6000_cpu;
61 struct rs6000_cpu_select rs6000_select[3] =
63 /* switch name, tune arch */
64 { (const char *)0, "--with-cpu=", 1, 1 },
65 { (const char *)0, "-mcpu=", 1, 1 },
66 { (const char *)0, "-mtune=", 1, 0 },
69 /* Size of long double */
70 const char *rs6000_long_double_size_string;
71 int rs6000_long_double_type_size;
73 /* Whether -mabi=altivec has appeared */
74 int rs6000_altivec_abi;
76 /* Set to non-zero once AIX common-mode calls have been defined. */
77 static int common_mode_defined;
79 /* Save information from a "cmpxx" operation until the branch or scc is
81 rtx rs6000_compare_op0, rs6000_compare_op1;
82 int rs6000_compare_fp_p;
84 /* Label number of label created for -mrelocatable, to call to so we can
85 get the address of the GOT section */
86 int rs6000_pic_labelno;
89 /* Which abi to adhere to */
90 const char *rs6000_abi_name = RS6000_ABI_NAME;
92 /* Semantics of the small data area */
93 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
95 /* Which small data model to use */
96 const char *rs6000_sdata_name = (char *)0;
98 /* Counter for labels which are to be placed in .fixup. */
102 /* ABI enumeration available for subtarget to use. */
103 enum rs6000_abi rs6000_current_abi;
105 /* ABI string from -mabi= option. */
106 const char *rs6000_abi_string;
109 const char *rs6000_debug_name;
110 int rs6000_debug_stack; /* debug stack applications */
111 int rs6000_debug_arg; /* debug argument handling */
113 /* Flag to say the TOC is initialized */
115 char toc_label_name[10];
117 /* Alias set for saves and restores from the rs6000 stack. */
118 static int rs6000_sr_alias_set;
120 static void rs6000_add_gc_roots PARAMS ((void));
121 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
122 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
123 static void validate_condition_mode
124 PARAMS ((enum rtx_code, enum machine_mode));
125 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
126 static void rs6000_maybe_dead PARAMS ((rtx));
127 static void rs6000_emit_stack_tie PARAMS ((void));
128 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
129 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
130 static unsigned rs6000_hash_constant PARAMS ((rtx));
131 static unsigned toc_hash_function PARAMS ((const void *));
132 static int toc_hash_eq PARAMS ((const void *, const void *));
133 static int toc_hash_mark_entry PARAMS ((void **, void *));
134 static void toc_hash_mark_table PARAMS ((void *));
135 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
136 static void rs6000_free_machine_status PARAMS ((struct function *));
137 static void rs6000_init_machine_status PARAMS ((struct function *));
138 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
139 static int rs6000_ra_ever_killed PARAMS ((void));
140 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
141 const struct attribute_spec rs6000_attribute_table[];
142 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
143 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
144 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
145 HOST_WIDE_INT, HOST_WIDE_INT));
147 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
149 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
150 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
152 #ifdef OBJECT_FORMAT_COFF
153 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
155 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
156 static int rs6000_adjust_priority PARAMS ((rtx, int));
157 static int rs6000_issue_rate PARAMS ((void));
159 static void rs6000_init_builtins PARAMS ((void));
160 static void altivec_init_builtins PARAMS ((void));
161 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
162 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
163 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
164 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static void rs6000_parse_abi_options PARAMS ((void));
167 static int first_altivec_reg_to_save PARAMS ((void));
168 static unsigned int compute_vrsave_mask PARAMS ((void));
169 static void is_altivec_return_reg PARAMS ((rtx, void *));
170 int vrsave_operation PARAMS ((rtx, enum machine_mode));
171 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
172 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
174 /* Default register names. */
175 char rs6000_reg_names[][8] =
177 "0", "1", "2", "3", "4", "5", "6", "7",
178 "8", "9", "10", "11", "12", "13", "14", "15",
179 "16", "17", "18", "19", "20", "21", "22", "23",
180 "24", "25", "26", "27", "28", "29", "30", "31",
181 "0", "1", "2", "3", "4", "5", "6", "7",
182 "8", "9", "10", "11", "12", "13", "14", "15",
183 "16", "17", "18", "19", "20", "21", "22", "23",
184 "24", "25", "26", "27", "28", "29", "30", "31",
185 "mq", "lr", "ctr","ap",
186 "0", "1", "2", "3", "4", "5", "6", "7",
188 /* AltiVec registers. */
189 "0", "1", "2", "3", "4", "5", "6", "7",
190 "8", "9", "10", "11", "12", "13", "14", "15",
191 "16", "17", "18", "19", "20", "21", "22", "23",
192 "24", "25", "26", "27", "28", "29", "30", "31",
196 #ifdef TARGET_REGNAMES
197 static const char alt_reg_names[][8] =
199 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
200 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
201 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
202 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
203 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
204 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
205 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
206 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
207 "mq", "lr", "ctr", "ap",
208 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
210 /* AltiVec registers. */
211 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
212 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
213 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
214 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
219 #ifndef MASK_STRICT_ALIGN
220 #define MASK_STRICT_ALIGN 0
223 /* Initialize the GCC target structure. */
224 #undef TARGET_ATTRIBUTE_TABLE
225 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
227 #undef TARGET_ASM_ALIGNED_DI_OP
228 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
230 /* Default unaligned ops are only provided for ELF. Find the ops needed
231 for non-ELF systems. */
232 #ifndef OBJECT_FORMAT_ELF
233 #ifdef OBJECT_FORMAT_COFF
234 /* For ECOFF. rs6000_assemble_integer will handle unaligned DIs on
236 #undef TARGET_ASM_UNALIGNED_HI_OP
237 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
238 #undef TARGET_ASM_UNALIGNED_SI_OP
239 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
240 #undef TARGET_ASM_UNALIGNED_DI_OP
241 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
244 #undef TARGET_ASM_UNALIGNED_HI_OP
245 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
246 #undef TARGET_ASM_UNALIGNED_SI_OP
247 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
251 /* This hook deals with fixups for relocatable code and DI-mode objects
253 #undef TARGET_ASM_INTEGER
254 #define TARGET_ASM_INTEGER rs6000_assemble_integer
256 #undef TARGET_ASM_FUNCTION_PROLOGUE
257 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
258 #undef TARGET_ASM_FUNCTION_EPILOGUE
259 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
262 #undef TARGET_SECTION_TYPE_FLAGS
263 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
266 #undef TARGET_SCHED_ISSUE_RATE
267 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
268 #undef TARGET_SCHED_ADJUST_COST
269 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
270 #undef TARGET_SCHED_ADJUST_PRIORITY
271 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
273 #undef TARGET_INIT_BUILTINS
274 #define TARGET_INIT_BUILTINS rs6000_init_builtins
276 #undef TARGET_EXPAND_BUILTIN
277 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
279 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
280 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
282 struct gcc_target targetm = TARGET_INITIALIZER;
284 /* Override command line options. Mostly we process the processor
285 type and sometimes adjust other TARGET_ options. */
288 rs6000_override_options (default_cpu)
289 const char *default_cpu;
292 struct rs6000_cpu_select *ptr;
294 /* Simplify the entries below by making a mask for any POWER
295 variant and any PowerPC variant. */
297 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
298 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
299 | MASK_PPC_GFXOPT | MASK_POWERPC64)
300 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
304 const char *const name; /* Canonical processor name. */
305 const enum processor_type processor; /* Processor type enum value. */
306 const int target_enable; /* Target flags to enable. */
307 const int target_disable; /* Target flags to disable. */
308 } const processor_target_table[]
309 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
310 POWER_MASKS | POWERPC_MASKS},
311 {"power", PROCESSOR_POWER,
312 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
313 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
314 {"power2", PROCESSOR_POWER,
315 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
316 POWERPC_MASKS | MASK_NEW_MNEMONICS},
317 {"power3", PROCESSOR_PPC630,
318 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
319 POWER_MASKS | MASK_PPC_GPOPT},
320 {"powerpc", PROCESSOR_POWERPC,
321 MASK_POWERPC | MASK_NEW_MNEMONICS,
322 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
323 {"powerpc64", PROCESSOR_POWERPC64,
324 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
325 POWER_MASKS | POWERPC_OPT_MASKS},
326 {"rios", PROCESSOR_RIOS1,
327 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
328 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
329 {"rios1", PROCESSOR_RIOS1,
330 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
331 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
332 {"rsc", PROCESSOR_PPC601,
333 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
334 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
335 {"rsc1", PROCESSOR_PPC601,
336 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
337 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
338 {"rios2", PROCESSOR_RIOS2,
339 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
340 POWERPC_MASKS | MASK_NEW_MNEMONICS},
341 {"rs64a", PROCESSOR_RS64A,
342 MASK_POWERPC | MASK_NEW_MNEMONICS,
343 POWER_MASKS | POWERPC_OPT_MASKS},
344 {"401", PROCESSOR_PPC403,
345 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
346 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
347 {"403", PROCESSOR_PPC403,
348 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
349 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
350 {"405", PROCESSOR_PPC405,
351 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
352 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
353 {"505", PROCESSOR_MPCCORE,
354 MASK_POWERPC | MASK_NEW_MNEMONICS,
355 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
356 {"601", PROCESSOR_PPC601,
357 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
358 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
359 {"602", PROCESSOR_PPC603,
360 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
361 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
362 {"603", PROCESSOR_PPC603,
363 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
364 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
365 {"603e", PROCESSOR_PPC603,
366 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
367 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
368 {"ec603e", PROCESSOR_PPC603,
369 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
370 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
371 {"604", PROCESSOR_PPC604,
372 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
373 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
374 {"604e", PROCESSOR_PPC604e,
375 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
376 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
377 {"620", PROCESSOR_PPC620,
378 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
379 POWER_MASKS | MASK_PPC_GPOPT},
380 {"630", PROCESSOR_PPC630,
381 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
382 POWER_MASKS | MASK_PPC_GPOPT},
383 {"740", PROCESSOR_PPC750,
384 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
385 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
386 {"750", PROCESSOR_PPC750,
387 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
388 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
389 {"7400", PROCESSOR_PPC7400,
390 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
391 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
392 {"7450", PROCESSOR_PPC7450,
393 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
394 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
395 {"801", PROCESSOR_MPCCORE,
396 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
397 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
398 {"821", PROCESSOR_MPCCORE,
399 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
400 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
401 {"823", PROCESSOR_MPCCORE,
402 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
403 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
404 {"860", PROCESSOR_MPCCORE,
405 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
406 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
408 size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
410 /* Save current -mmultiple/-mno-multiple status. */
411 int multiple = TARGET_MULTIPLE;
412 /* Save current -mstring/-mno-string status. */
413 int string = TARGET_STRING;
415 /* Identify the processor type. */
416 rs6000_select[0].string = default_cpu;
417 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
419 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
421 ptr = &rs6000_select[i];
422 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
424 for (j = 0; j < ptt_size; j++)
425 if (! strcmp (ptr->string, processor_target_table[j].name))
428 rs6000_cpu = processor_target_table[j].processor;
432 target_flags |= processor_target_table[j].target_enable;
433 target_flags &= ~processor_target_table[j].target_disable;
439 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
443 /* If we are optimizing big endian systems for space, use the store
444 multiple instructions. */
445 if (BYTES_BIG_ENDIAN && optimize_size)
446 target_flags |= MASK_MULTIPLE;
448 /* If -mmultiple or -mno-multiple was explicitly used, don't
449 override with the processor default */
450 if (TARGET_MULTIPLE_SET)
451 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
453 /* If -mstring or -mno-string was explicitly used, don't override
454 with the processor default. */
455 if (TARGET_STRING_SET)
456 target_flags = (target_flags & ~MASK_STRING) | string;
458 /* Don't allow -mmultiple or -mstring on little endian systems
459 unless the cpu is a 750, because the hardware doesn't support the
460 instructions used in little endian mode, and causes an alignment
461 trap. The 750 does not cause an alignment trap (except when the
462 target is unaligned). */
464 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
468 target_flags &= ~MASK_MULTIPLE;
469 if (TARGET_MULTIPLE_SET)
470 warning ("-mmultiple is not supported on little endian systems");
475 target_flags &= ~MASK_STRING;
476 if (TARGET_STRING_SET)
477 warning ("-mstring is not supported on little endian systems");
481 if (flag_pic && DEFAULT_ABI == ABI_AIX)
483 warning ("-f%s ignored (all code is position independent)",
484 (flag_pic > 1) ? "PIC" : "pic");
488 #ifdef XCOFF_DEBUGGING_INFO
489 if (flag_function_sections && (write_symbols != NO_DEBUG)
490 && DEFAULT_ABI == ABI_AIX)
492 warning ("-ffunction-sections disabled on AIX when debugging");
493 flag_function_sections = 0;
496 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
498 warning ("-fdata-sections not supported on AIX");
499 flag_data_sections = 0;
503 /* Set debug flags */
504 if (rs6000_debug_name)
506 if (! strcmp (rs6000_debug_name, "all"))
507 rs6000_debug_stack = rs6000_debug_arg = 1;
508 else if (! strcmp (rs6000_debug_name, "stack"))
509 rs6000_debug_stack = 1;
510 else if (! strcmp (rs6000_debug_name, "arg"))
511 rs6000_debug_arg = 1;
513 error ("unknown -mdebug-%s switch", rs6000_debug_name);
516 /* Set size of long double */
517 rs6000_long_double_type_size = 64;
518 if (rs6000_long_double_size_string)
521 int size = strtol (rs6000_long_double_size_string, &tail, 10);
522 if (*tail != '\0' || (size != 64 && size != 128))
523 error ("Unknown switch -mlong-double-%s",
524 rs6000_long_double_size_string);
526 rs6000_long_double_type_size = size;
529 /* Handle -mabi= options. */
530 rs6000_parse_abi_options ();
532 #ifdef TARGET_REGNAMES
533 /* If the user desires alternate register names, copy in the
534 alternate names now. */
536 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
539 #ifdef SUBTARGET_OVERRIDE_OPTIONS
540 SUBTARGET_OVERRIDE_OPTIONS;
542 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
543 SUBSUBTARGET_OVERRIDE_OPTIONS;
546 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
547 If -maix-struct-return or -msvr4-struct-return was explicitly
548 used, don't override with the ABI default. */
549 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
551 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
552 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
554 target_flags |= MASK_AIX_STRUCT_RET;
557 /* Register global variables with the garbage collector. */
558 rs6000_add_gc_roots ();
560 /* Allocate an alias set for register saves & restores from stack. */
561 rs6000_sr_alias_set = new_alias_set ();
564 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
566 /* We can only guarantee the availability of DI pseudo-ops when
567 assembling for 64-bit targets. */
568 if (!TARGET_POWERPC64)
570 targetm.asm_out.aligned_op.di = NULL;
571 targetm.asm_out.unaligned_op.di = NULL;
574 /* Arrange to save and restore machine status around nested functions. */
575 init_machine_status = rs6000_init_machine_status;
576 free_machine_status = rs6000_free_machine_status;
579 /* Handle -mabi= options. */
581 rs6000_parse_abi_options ()
583 if (rs6000_abi_string == 0)
585 else if (! strcmp (rs6000_abi_string, "altivec"))
586 rs6000_altivec_abi = 1;
588 error ("unknown ABI specified: '%s'", rs6000_abi_string);
592 optimization_options (level, size)
593 int level ATTRIBUTE_UNUSED;
594 int size ATTRIBUTE_UNUSED;
598 /* Do anything needed at the start of the asm file. */
601 rs6000_file_start (file, default_cpu)
603 const char *default_cpu;
607 const char *start = buffer;
608 struct rs6000_cpu_select *ptr;
610 if (flag_verbose_asm)
612 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
613 rs6000_select[0].string = default_cpu;
615 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
617 ptr = &rs6000_select[i];
618 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
620 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
626 switch (rs6000_sdata)
628 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
629 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
630 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
631 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
634 if (rs6000_sdata && g_switch_value)
636 fprintf (file, "%s -G %d", start, g_switch_value);
647 /* Create a CONST_DOUBLE from a string. */
650 rs6000_float_const (string, mode)
652 enum machine_mode mode;
654 REAL_VALUE_TYPE value;
655 value = REAL_VALUE_ATOF (string, mode);
656 return immed_real_const_1 (value, mode);
659 /* Return non-zero if this function is known to have a null epilogue. */
664 if (reload_completed)
666 rs6000_stack_t *info = rs6000_stack_info ();
668 if (info->first_gp_reg_save == 32
669 && info->first_fp_reg_save == 64
670 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
673 && info->vrsave_mask == 0
681 /* Returns 1 always. */
684 any_operand (op, mode)
685 rtx op ATTRIBUTE_UNUSED;
686 enum machine_mode mode ATTRIBUTE_UNUSED;
691 /* Returns 1 if op is the count register. */
693 count_register_operand (op, mode)
695 enum machine_mode mode ATTRIBUTE_UNUSED;
697 if (GET_CODE (op) != REG)
700 if (REGNO (op) == COUNT_REGISTER_REGNUM)
703 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
710 xer_operand (op, mode)
712 enum machine_mode mode ATTRIBUTE_UNUSED;
714 if (GET_CODE (op) != REG)
717 if (XER_REGNO_P (REGNO (op)))
723 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
724 by such constants completes more quickly. */
727 s8bit_cint_operand (op, mode)
729 enum machine_mode mode ATTRIBUTE_UNUSED;
731 return ( GET_CODE (op) == CONST_INT
732 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
735 /* Return 1 if OP is a constant that can fit in a D field. */
738 short_cint_operand (op, mode)
740 enum machine_mode mode ATTRIBUTE_UNUSED;
742 return (GET_CODE (op) == CONST_INT
743 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
746 /* Similar for an unsigned D field. */
749 u_short_cint_operand (op, mode)
751 enum machine_mode mode ATTRIBUTE_UNUSED;
753 return (GET_CODE (op) == CONST_INT
754 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'));
757 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
760 non_short_cint_operand (op, mode)
762 enum machine_mode mode ATTRIBUTE_UNUSED;
764 return (GET_CODE (op) == CONST_INT
765 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
768 /* Returns 1 if OP is a CONST_INT that is a positive value
769 and an exact power of 2. */
772 exact_log2_cint_operand (op, mode)
774 enum machine_mode mode ATTRIBUTE_UNUSED;
776 return (GET_CODE (op) == CONST_INT
778 && exact_log2 (INTVAL (op)) >= 0);
781 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
785 gpc_reg_operand (op, mode)
787 enum machine_mode mode;
789 return (register_operand (op, mode)
790 && (GET_CODE (op) != REG
791 || (REGNO (op) >= ARG_POINTER_REGNUM
792 && !XER_REGNO_P (REGNO (op)))
793 || REGNO (op) < MQ_REGNO));
796 /* Returns 1 if OP is either a pseudo-register or a register denoting a
800 cc_reg_operand (op, mode)
802 enum machine_mode mode;
804 return (register_operand (op, mode)
805 && (GET_CODE (op) != REG
806 || REGNO (op) >= FIRST_PSEUDO_REGISTER
807 || CR_REGNO_P (REGNO (op))));
810 /* Returns 1 if OP is either a pseudo-register or a register denoting a
811 CR field that isn't CR0. */
814 cc_reg_not_cr0_operand (op, mode)
816 enum machine_mode mode;
818 return (register_operand (op, mode)
819 && (GET_CODE (op) != REG
820 || REGNO (op) >= FIRST_PSEUDO_REGISTER
821 || CR_REGNO_NOT_CR0_P (REGNO (op))));
824 /* Returns 1 if OP is either a constant integer valid for a D-field or
825 a non-special register. If a register, it must be in the proper
826 mode unless MODE is VOIDmode. */
829 reg_or_short_operand (op, mode)
831 enum machine_mode mode;
833 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
836 /* Similar, except check if the negation of the constant would be
837 valid for a D-field. */
840 reg_or_neg_short_operand (op, mode)
842 enum machine_mode mode;
844 if (GET_CODE (op) == CONST_INT)
845 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
847 return gpc_reg_operand (op, mode);
850 /* Return 1 if the operand is either a register or an integer whose
851 high-order 16 bits are zero. */
854 reg_or_u_short_operand (op, mode)
856 enum machine_mode mode;
858 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
861 /* Return 1 is the operand is either a non-special register or ANY
865 reg_or_cint_operand (op, mode)
867 enum machine_mode mode;
869 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
872 /* Return 1 is the operand is either a non-special register or ANY
873 32-bit signed constant integer. */
876 reg_or_arith_cint_operand (op, mode)
878 enum machine_mode mode;
880 return (gpc_reg_operand (op, mode)
881 || (GET_CODE (op) == CONST_INT
882 #if HOST_BITS_PER_WIDE_INT != 32
883 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
884 < (unsigned HOST_WIDE_INT) 0x100000000ll)
889 /* Return 1 is the operand is either a non-special register or a 32-bit
890 signed constant integer valid for 64-bit addition. */
893 reg_or_add_cint64_operand (op, mode)
895 enum machine_mode mode;
897 return (gpc_reg_operand (op, mode)
898 || (GET_CODE (op) == CONST_INT
899 && INTVAL (op) < 0x7fff8000
900 #if HOST_BITS_PER_WIDE_INT != 32
901 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
907 /* Return 1 is the operand is either a non-special register or a 32-bit
908 signed constant integer valid for 64-bit subtraction. */
911 reg_or_sub_cint64_operand (op, mode)
913 enum machine_mode mode;
915 return (gpc_reg_operand (op, mode)
916 || (GET_CODE (op) == CONST_INT
917 && (- INTVAL (op)) < 0x7fff8000
918 #if HOST_BITS_PER_WIDE_INT != 32
919 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
925 /* Return 1 is the operand is either a non-special register or ANY
926 32-bit unsigned constant integer. */
929 reg_or_logical_cint_operand (op, mode)
931 enum machine_mode mode;
933 if (GET_CODE (op) == CONST_INT)
935 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
937 if (GET_MODE_BITSIZE (mode) <= 32)
944 return ((INTVAL (op) & GET_MODE_MASK (mode)
945 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
947 else if (GET_CODE (op) == CONST_DOUBLE)
949 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
953 return CONST_DOUBLE_HIGH (op) == 0;
956 return gpc_reg_operand (op, mode);
959 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
962 got_operand (op, mode)
964 enum machine_mode mode ATTRIBUTE_UNUSED;
966 return (GET_CODE (op) == SYMBOL_REF
967 || GET_CODE (op) == CONST
968 || GET_CODE (op) == LABEL_REF);
971 /* Return 1 if the operand is a simple references that can be loaded via
972 the GOT (labels involving addition aren't allowed). */
975 got_no_const_operand (op, mode)
977 enum machine_mode mode ATTRIBUTE_UNUSED;
979 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
982 /* Return the number of instructions it takes to form a constant in an
986 num_insns_constant_wide (value)
989 /* signed constant loadable with {cal|addi} */
990 if (CONST_OK_FOR_LETTER_P (value, 'I'))
993 /* constant loadable with {cau|addis} */
994 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
997 #if HOST_BITS_PER_WIDE_INT == 64
998 else if (TARGET_POWERPC64)
1000 HOST_WIDE_INT low = value & 0xffffffff;
1001 HOST_WIDE_INT high = value >> 32;
1003 low = (low ^ 0x80000000) - 0x80000000; /* sign extend */
1005 if (high == 0 && (low & 0x80000000) == 0)
1008 else if (high == -1 && (low & 0x80000000) != 0)
1012 return num_insns_constant_wide (high) + 1;
1015 return (num_insns_constant_wide (high)
1016 + num_insns_constant_wide (low) + 1);
1025 num_insns_constant (op, mode)
1027 enum machine_mode mode;
1029 if (GET_CODE (op) == CONST_INT)
1031 #if HOST_BITS_PER_WIDE_INT == 64
1032 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1033 && mask64_operand (op, mode))
1037 return num_insns_constant_wide (INTVAL (op));
1040 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1045 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1046 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1047 return num_insns_constant_wide ((HOST_WIDE_INT)l);
1050 else if (GET_CODE (op) == CONST_DOUBLE)
1056 int endian = (WORDS_BIG_ENDIAN == 0);
1058 if (mode == VOIDmode || mode == DImode)
1060 high = CONST_DOUBLE_HIGH (op);
1061 low = CONST_DOUBLE_LOW (op);
1065 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1066 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1068 low = l[1 - endian];
1072 return (num_insns_constant_wide (low)
1073 + num_insns_constant_wide (high));
1077 if (high == 0 && (low & 0x80000000) == 0)
1078 return num_insns_constant_wide (low);
1080 else if (high == -1 && (low & 0x80000000) != 0)
1081 return num_insns_constant_wide (low);
1083 else if (mask64_operand (op, mode))
1087 return num_insns_constant_wide (high) + 1;
1090 return (num_insns_constant_wide (high)
1091 + num_insns_constant_wide (low) + 1);
1099 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1100 register with one instruction per word. We only do this if we can
1101 safely read CONST_DOUBLE_{LOW,HIGH}. */
1104 easy_fp_constant (op, mode)
1106 enum machine_mode mode;
1108 if (GET_CODE (op) != CONST_DOUBLE
1109 || GET_MODE (op) != mode
1110 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1113 /* Consider all constants with -msoft-float to be easy. */
1114 if (TARGET_SOFT_FLOAT && mode != DImode)
1117 /* If we are using V.4 style PIC, consider all constants to be hard. */
1118 if (flag_pic && DEFAULT_ABI == ABI_V4)
1121 #ifdef TARGET_RELOCATABLE
1122 /* Similarly if we are using -mrelocatable, consider all constants
1124 if (TARGET_RELOCATABLE)
1133 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1134 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1136 return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1137 && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1140 else if (mode == SFmode)
1145 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1146 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1148 return num_insns_constant_wide (l) == 1;
1151 else if (mode == DImode)
1152 return ((TARGET_POWERPC64
1153 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1154 || (num_insns_constant (op, DImode) <= 2));
1156 else if (mode == SImode)
1162 /* Return 1 if the operand is 0.0. */
1164 zero_fp_constant (op, mode)
1166 enum machine_mode mode;
1168 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1171 /* Return 1 if the operand is in volatile memory. Note that during
1172 the RTL generation phase, memory_operand does not return TRUE for
1173 volatile memory references. So this function allows us to
1174 recognize volatile references where its safe. */
1177 volatile_mem_operand (op, mode)
1179 enum machine_mode mode;
1181 if (GET_CODE (op) != MEM)
1184 if (!MEM_VOLATILE_P (op))
1187 if (mode != GET_MODE (op))
1190 if (reload_completed)
1191 return memory_operand (op, mode);
1193 if (reload_in_progress)
1194 return strict_memory_address_p (mode, XEXP (op, 0));
1196 return memory_address_p (mode, XEXP (op, 0));
1199 /* Return 1 if the operand is an offsettable memory operand. */
1202 offsettable_mem_operand (op, mode)
1204 enum machine_mode mode;
1206 return ((GET_CODE (op) == MEM)
1207 && offsettable_address_p (reload_completed || reload_in_progress,
1208 mode, XEXP (op, 0)));
1211 /* Return 1 if the operand is either an easy FP constant (see above) or
1215 mem_or_easy_const_operand (op, mode)
1217 enum machine_mode mode;
1219 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1222 /* Return 1 if the operand is either a non-special register or an item
1223 that can be used as the operand of a `mode' add insn. */
1226 add_operand (op, mode)
1228 enum machine_mode mode;
1230 if (GET_CODE (op) == CONST_INT)
1231 return (CONST_OK_FOR_LETTER_P (INTVAL(op), 'I')
1232 || CONST_OK_FOR_LETTER_P (INTVAL(op), 'L'));
1234 return gpc_reg_operand (op, mode);
1237 /* Return 1 if OP is a constant but not a valid add_operand. */
1240 non_add_cint_operand (op, mode)
1242 enum machine_mode mode ATTRIBUTE_UNUSED;
1244 return (GET_CODE (op) == CONST_INT
1245 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000
1246 && ! CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1249 /* Return 1 if the operand is a non-special register or a constant that
1250 can be used as the operand of an OR or XOR insn on the RS/6000. */
1253 logical_operand (op, mode)
1255 enum machine_mode mode;
1257 HOST_WIDE_INT opl, oph;
1259 if (gpc_reg_operand (op, mode))
1262 if (GET_CODE (op) == CONST_INT)
1264 opl = INTVAL (op) & GET_MODE_MASK (mode);
1266 #if HOST_BITS_PER_WIDE_INT <= 32
1267 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1271 else if (GET_CODE (op) == CONST_DOUBLE)
1273 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1276 opl = CONST_DOUBLE_LOW (op);
1277 oph = CONST_DOUBLE_HIGH (op);
1284 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1285 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1288 /* Return 1 if C is a constant that is not a logical operand (as
1289 above), but could be split into one. */
1292 non_logical_cint_operand (op, mode)
1294 enum machine_mode mode;
1296 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1297 && ! logical_operand (op, mode)
1298 && reg_or_logical_cint_operand (op, mode));
1301 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1302 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1303 Reject all ones and all zeros, since these should have been optimized
1304 away and confuse the making of MB and ME. */
1307 mask_operand (op, mode)
1309 enum machine_mode mode ATTRIBUTE_UNUSED;
1311 HOST_WIDE_INT c, lsb;
1313 if (GET_CODE (op) != CONST_INT)
1318 /* We don't change the number of transitions by inverting,
1319 so make sure we start with the LS bit zero. */
1323 /* Reject all zeros or all ones. */
1327 /* Find the first transition. */
1330 /* Invert to look for a second transition. */
1333 /* Erase first transition. */
1336 /* Find the second transition (if any). */
1339 /* Match if all the bits above are 1's (or c is zero). */
1343 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1344 It is if there are no more than one 1->0 or 0->1 transitions.
1345 Reject all ones and all zeros, since these should have been optimized
1346 away and confuse the making of MB and ME. */
1349 mask64_operand (op, mode)
1351 enum machine_mode mode;
1353 if (GET_CODE (op) == CONST_INT)
1355 HOST_WIDE_INT c, lsb;
1357 /* We don't change the number of transitions by inverting,
1358 so make sure we start with the LS bit zero. */
1363 /* Reject all zeros or all ones. */
1367 /* Find the transition, and check that all bits above are 1's. */
1371 else if (GET_CODE (op) == CONST_DOUBLE
1372 && (mode == VOIDmode || mode == DImode))
1374 HOST_WIDE_INT low, high, lsb;
1376 if (HOST_BITS_PER_WIDE_INT < 64)
1377 high = CONST_DOUBLE_HIGH (op);
1379 low = CONST_DOUBLE_LOW (op);
1382 if (HOST_BITS_PER_WIDE_INT < 64)
1389 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1393 return high == -lsb;
1397 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1403 /* Return 1 if the operand is either a non-special register or a constant
1404 that can be used as the operand of a PowerPC64 logical AND insn. */
1407 and64_operand (op, mode)
1409 enum machine_mode mode;
1411 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1412 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1414 return (logical_operand (op, mode) || mask64_operand (op, mode));
1417 /* Return 1 if the operand is either a non-special register or a
1418 constant that can be used as the operand of an RS/6000 logical AND insn. */
1421 and_operand (op, mode)
1423 enum machine_mode mode;
1425 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1426 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1428 return (logical_operand (op, mode) || mask_operand (op, mode));
1431 /* Return 1 if the operand is a general register or memory operand. */
1434 reg_or_mem_operand (op, mode)
1436 enum machine_mode mode;
1438 return (gpc_reg_operand (op, mode)
1439 || memory_operand (op, mode)
1440 || volatile_mem_operand (op, mode));
1443 /* Return 1 if the operand is a general register or memory operand without
1444 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1448 lwa_operand (op, mode)
1450 enum machine_mode mode;
1454 if (reload_completed && GET_CODE (inner) == SUBREG)
1455 inner = SUBREG_REG (inner);
1457 return gpc_reg_operand (inner, mode)
1458 || (memory_operand (inner, mode)
1459 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1460 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1461 && (GET_CODE (XEXP (inner, 0)) != PLUS
1462 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1463 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1466 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1467 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1471 call_operand (op, mode)
1473 enum machine_mode mode;
1475 if (mode != VOIDmode && GET_MODE (op) != mode)
1478 return (GET_CODE (op) == SYMBOL_REF
1479 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1482 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1483 this file and the function is not weakly defined. */
1486 current_file_function_operand (op, mode)
1488 enum machine_mode mode ATTRIBUTE_UNUSED;
1490 return (GET_CODE (op) == SYMBOL_REF
1491 && (SYMBOL_REF_FLAG (op)
1492 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1493 && ! DECL_WEAK (current_function_decl))));
1496 /* Return 1 if this operand is a valid input for a move insn. */
1499 input_operand (op, mode)
1501 enum machine_mode mode;
1503 /* Memory is always valid. */
1504 if (memory_operand (op, mode))
1507 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1508 if (GET_CODE (op) == CONSTANT_P_RTX)
1511 /* For floating-point, easy constants are valid. */
1512 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1514 && easy_fp_constant (op, mode))
1517 /* Allow any integer constant. */
1518 if (GET_MODE_CLASS (mode) == MODE_INT
1519 && (GET_CODE (op) == CONST_INT
1520 || GET_CODE (op) == CONST_DOUBLE))
1523 /* For floating-point or multi-word mode, the only remaining valid type
1525 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1526 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1527 return register_operand (op, mode);
1529 /* The only cases left are integral modes one word or smaller (we
1530 do not get called for MODE_CC values). These can be in any
1532 if (register_operand (op, mode))
1535 /* A SYMBOL_REF referring to the TOC is valid. */
1536 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1539 /* A constant pool expression (relative to the TOC) is valid */
1540 if (TOC_RELATIVE_EXPR_P (op))
1543 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1545 if (DEFAULT_ABI == ABI_V4
1546 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1547 && small_data_operand (op, Pmode))
1553 /* Return 1 for an operand in small memory on V.4/eabi. */
1556 small_data_operand (op, mode)
1557 rtx op ATTRIBUTE_UNUSED;
1558 enum machine_mode mode ATTRIBUTE_UNUSED;
1563 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1566 if (DEFAULT_ABI != ABI_V4)
1569 if (GET_CODE (op) == SYMBOL_REF)
1572 else if (GET_CODE (op) != CONST
1573 || GET_CODE (XEXP (op, 0)) != PLUS
1574 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1575 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1580 rtx sum = XEXP (op, 0);
1581 HOST_WIDE_INT summand;
1583 /* We have to be careful here, because it is the referenced address
1584 that must be 32k from _SDA_BASE_, not just the symbol. */
1585 summand = INTVAL (XEXP (sum, 1));
1586 if (summand < 0 || summand > g_switch_value)
1589 sym_ref = XEXP (sum, 0);
1592 if (*XSTR (sym_ref, 0) != '@')
1603 constant_pool_expr_1 (op, have_sym, have_toc)
1608 switch (GET_CODE(op))
1611 if (CONSTANT_POOL_ADDRESS_P (op))
1613 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1621 else if (! strcmp (XSTR (op, 0), toc_label_name))
1630 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) &&
1631 constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc);
1633 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1642 constant_pool_expr_p (op)
1647 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1651 toc_relative_expr_p (op)
1656 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1659 /* Try machine-dependent ways of modifying an illegitimate address
1660 to be legitimate. If we find one, return the new, valid address.
1661 This is used from only one place: `memory_address' in explow.c.
1663 OLDX is the address as it was before break_out_memory_refs was
1664 called. In some cases it is useful to look at this to decide what
1667 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1669 It is always safe for this function to do nothing. It exists to
1670 recognize opportunities to optimize the output.
1672 On RS/6000, first check for the sum of a register with a constant
1673 integer that is out of range. If so, generate code to add the
1674 constant with the low-order 16 bits masked to the register and force
1675 this result into another register (this can be done with `cau').
1676 Then generate an address of REG+(CONST&0xffff), allowing for the
1677 possibility of bit 16 being a one.
1679 Then check for the sum of a register and something not constant, try to
1680 load the other things into a register and return the sum. */
1682 rs6000_legitimize_address (x, oldx, mode)
1684 rtx oldx ATTRIBUTE_UNUSED;
1685 enum machine_mode mode;
1687 if (GET_CODE (x) == PLUS
1688 && GET_CODE (XEXP (x, 0)) == REG
1689 && GET_CODE (XEXP (x, 1)) == CONST_INT
1690 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1692 HOST_WIDE_INT high_int, low_int;
1694 high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1695 low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1696 if (low_int & 0x8000)
1697 high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1698 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1699 GEN_INT (high_int)), 0);
1700 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1702 else if (GET_CODE (x) == PLUS
1703 && GET_CODE (XEXP (x, 0)) == REG
1704 && GET_CODE (XEXP (x, 1)) != CONST_INT
1705 && GET_MODE_NUNITS (mode) == 1
1706 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1707 && (TARGET_POWERPC64 || mode != DImode)
1710 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1711 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1713 else if (ALTIVEC_VECTOR_MODE (mode))
1717 /* Make sure both operands are registers. */
1718 if (GET_CODE (x) == PLUS)
1719 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1720 force_reg (Pmode, XEXP (x, 1)));
1722 reg = force_reg (Pmode, x);
1725 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1726 && GET_CODE (x) != CONST_INT
1727 && GET_CODE (x) != CONST_DOUBLE
1729 && GET_MODE_NUNITS (mode) == 1
1730 && (GET_MODE_BITSIZE (mode) <= 32
1731 || (TARGET_HARD_FLOAT && mode == DFmode)))
1733 rtx reg = gen_reg_rtx (Pmode);
1734 emit_insn (gen_elf_high (reg, (x)));
1735 return gen_rtx_LO_SUM (Pmode, reg, (x));
1737 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1739 && GET_CODE (x) != CONST_INT
1740 && GET_CODE (x) != CONST_DOUBLE
1742 && (TARGET_HARD_FLOAT || mode != DFmode)
1746 rtx reg = gen_reg_rtx (Pmode);
1747 emit_insn (gen_macho_high (reg, (x)));
1748 return gen_rtx_LO_SUM (Pmode, reg, (x));
1751 && CONSTANT_POOL_EXPR_P (x)
1752 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1754 return create_TOC_reference (x);
1760 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1761 that is a valid memory address for an instruction.
1762 The MODE argument is the machine mode for the MEM expression
1763 that wants to use this address.
1765 On the RS/6000, there are four valid address: a SYMBOL_REF that
1766 refers to a constant pool entry of an address (or the sum of it
1767 plus a constant), a short (16-bit signed) constant plus a register,
1768 the sum of two registers, or a register indirect, possibly with an
1769 auto-increment. For DFmode and DImode with an constant plus register,
1770 we must ensure that both words are addressable or PowerPC64 with offset
1773 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1774 32-bit DImode, TImode), indexed addressing cannot be used because
1775 adjacent memory cells are accessed by adding word-sized offsets
1776 during assembly output. */
1778 rs6000_legitimate_address (mode, x, reg_ok_strict)
1779 enum machine_mode mode;
1783 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
1785 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
1787 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
1789 if (LEGITIMATE_SMALL_DATA_P (mode, x))
1791 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
1793 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
1795 && GET_CODE (x) == PLUS
1796 && GET_CODE (XEXP (x, 0)) == REG
1797 && XEXP (x, 0) == virtual_stack_vars_rtx
1798 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1800 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
1803 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1804 && (TARGET_POWERPC64 || mode != DImode)
1805 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
1807 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
1812 /* Try to output insns to set TARGET equal to the constant C if it can
1813 be done in less than N insns. Do all computations in MODE.
1814 Returns the place where the output has been placed if it can be
1815 done and the insns have been emitted. If it would take more than N
1816 insns, zero is returned and no insns and emitted. */
1819 rs6000_emit_set_const (dest, mode, source, n)
1821 enum machine_mode mode;
1822 int n ATTRIBUTE_UNUSED;
1824 HOST_WIDE_INT c0, c1;
1826 if (mode == QImode || mode == HImode || mode == SImode)
1829 dest = gen_reg_rtx (mode);
1830 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
1834 if (GET_CODE (source) == CONST_INT)
1836 c0 = INTVAL (source);
1839 else if (GET_CODE (source) == CONST_DOUBLE)
1841 #if HOST_BITS_PER_WIDE_INT >= 64
1842 c0 = CONST_DOUBLE_LOW (source);
1845 c0 = CONST_DOUBLE_LOW (source);
1846 c1 = CONST_DOUBLE_HIGH (source);
1852 return rs6000_emit_set_long_const (dest, c0, c1);
1855 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
1856 fall back to a straight forward decomposition. We do this to avoid
1857 exponential run times encountered when looking for longer sequences
1858 with rs6000_emit_set_const. */
1860 rs6000_emit_set_long_const (dest, c1, c2)
1862 HOST_WIDE_INT c1, c2;
1864 if (!TARGET_POWERPC64)
1866 rtx operand1, operand2;
1868 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
1870 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
1872 emit_move_insn (operand1, GEN_INT (c1));
1873 emit_move_insn (operand2, GEN_INT (c2));
1877 HOST_WIDE_INT d1, d2, d3, d4;
1879 /* Decompose the entire word */
1880 #if HOST_BITS_PER_WIDE_INT >= 64
1881 if (c2 != -(c1 < 0))
1883 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
1885 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1886 c1 = (c1 - d2) >> 32;
1887 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
1889 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1893 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
1895 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1899 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
1901 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
1906 /* Construct the high word */
1909 emit_move_insn (dest, GEN_INT (d4));
1911 emit_move_insn (dest,
1912 gen_rtx_PLUS (DImode, dest, GEN_INT (d3)));
1915 emit_move_insn (dest, GEN_INT (d3));
1917 /* Shift it into place */
1918 if (d3 != 0 || d4 != 0)
1919 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
1921 /* Add in the low bits. */
1923 emit_move_insn (dest, gen_rtx_PLUS (DImode, dest, GEN_INT (d2)));
1925 emit_move_insn (dest, gen_rtx_PLUS (DImode, dest, GEN_INT (d1)));
1931 /* Emit a move from SOURCE to DEST in mode MODE. */
1933 rs6000_emit_move (dest, source, mode)
1936 enum machine_mode mode;
1940 operands[1] = source;
1942 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
1943 if (GET_CODE (operands[1]) == CONST_DOUBLE
1944 && ! FLOAT_MODE_P (mode)
1945 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1947 /* FIXME. This should never happen. */
1948 /* Since it seems that it does, do the safe thing and convert
1951 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
1953 if (GET_CODE (operands[1]) == CONST_DOUBLE
1954 && ! FLOAT_MODE_P (mode)
1955 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
1956 && CONST_DOUBLE_LOW (operands[1]) >= 0)
1957 || (CONST_DOUBLE_HIGH (operands[1]) == -1
1958 && CONST_DOUBLE_LOW (operands[1]) < 0)))
1961 /* Check if GCC is setting up a block move that will end up using FP
1962 registers as temporaries. We must make sure this is acceptable. */
1963 if (GET_CODE (operands[0]) == MEM
1964 && GET_CODE (operands[1]) == MEM
1966 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
1967 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
1968 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
1969 ? 32 : MEM_ALIGN (operands[0])))
1970 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
1972 : MEM_ALIGN (operands[1]))))
1973 && ! MEM_VOLATILE_P (operands [0])
1974 && ! MEM_VOLATILE_P (operands [1]))
1976 emit_move_insn (adjust_address (operands[0], SImode, 0),
1977 adjust_address (operands[1], SImode, 0));
1978 emit_move_insn (adjust_address (operands[0], SImode, 4),
1979 adjust_address (operands[1], SImode, 4));
1983 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
1984 operands[1] = force_reg (mode, operands[1]);
1986 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
1987 && GET_CODE (operands[0]) == MEM)
1991 if (reload_in_progress || reload_completed)
1992 regnum = true_regnum (operands[1]);
1993 else if (GET_CODE (operands[1]) == REG)
1994 regnum = REGNO (operands[1]);
1998 /* If operands[1] is a register, on POWER it may have
1999 double-precision data in it, so truncate it to single
2001 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2004 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2005 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2006 operands[1] = newreg;
2010 /* Handle the case where reload calls us with an invalid address;
2011 and the case of CONSTANT_P_RTX. */
2012 if (! general_operand (operands[1], mode)
2013 || ! nonimmediate_operand (operands[0], mode)
2014 || GET_CODE (operands[1]) == CONSTANT_P_RTX)
2016 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2020 /* FIXME: In the long term, this switch statement should go away
2021 and be replaced by a sequence of tests based on things like
2027 if (CONSTANT_P (operands[1])
2028 && GET_CODE (operands[1]) != CONST_INT)
2029 operands[1] = force_const_mem (mode, operands[1]);
2035 if (CONSTANT_P (operands[1])
2036 && ! easy_fp_constant (operands[1], mode))
2037 operands[1] = force_const_mem (mode, operands[1]);
2044 /* fixme: aldyh -- allow vector constants when they are implemented. */
2045 if (CONSTANT_P (operands[1]))
2046 operands[1] = force_const_mem (mode, operands[1]);
2051 /* Use default pattern for address of ELF small data */
2054 && DEFAULT_ABI == ABI_V4
2055 && (GET_CODE (operands[1]) == SYMBOL_REF
2056 || GET_CODE (operands[1]) == CONST)
2057 && small_data_operand (operands[1], mode))
2059 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2063 if (DEFAULT_ABI == ABI_V4
2064 && mode == Pmode && mode == SImode
2065 && flag_pic == 1 && got_operand (operands[1], mode))
2067 emit_insn (gen_movsi_got (operands[0], operands[1]));
2071 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2072 && TARGET_NO_TOC && ! flag_pic
2074 && CONSTANT_P (operands[1])
2075 && GET_CODE (operands[1]) != HIGH
2076 && GET_CODE (operands[1]) != CONST_INT)
2078 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2080 /* If this is a function address on -mcall-aixdesc,
2081 convert it to the address of the descriptor. */
2082 if (DEFAULT_ABI == ABI_AIX
2083 && GET_CODE (operands[1]) == SYMBOL_REF
2084 && XSTR (operands[1], 0)[0] == '.')
2086 const char *name = XSTR (operands[1], 0);
2088 while (*name == '.')
2090 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2091 CONSTANT_POOL_ADDRESS_P (new_ref)
2092 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2093 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2094 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2095 operands[1] = new_ref;
2098 if (DEFAULT_ABI == ABI_DARWIN)
2100 emit_insn (gen_macho_high (target, operands[1]));
2101 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2105 emit_insn (gen_elf_high (target, operands[1]));
2106 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2110 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2111 and we have put it in the TOC, we just need to make a TOC-relative
2114 && GET_CODE (operands[1]) == SYMBOL_REF
2115 && CONSTANT_POOL_EXPR_P (operands[1])
2116 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2117 get_pool_mode (operands[1])))
2119 operands[1] = create_TOC_reference (operands[1]);
2121 else if (mode == Pmode
2122 && CONSTANT_P (operands[1])
2123 && ((GET_CODE (operands[1]) != CONST_INT
2124 && ! easy_fp_constant (operands[1], mode))
2125 || (GET_CODE (operands[1]) == CONST_INT
2126 && num_insns_constant (operands[1], mode) > 2)
2127 || (GET_CODE (operands[0]) == REG
2128 && FP_REGNO_P (REGNO (operands[0]))))
2129 && GET_CODE (operands[1]) != HIGH
2130 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2131 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2133 /* Emit a USE operation so that the constant isn't deleted if
2134 expensive optimizations are turned on because nobody
2135 references it. This should only be done for operands that
2136 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2137 This should not be done for operands that contain LABEL_REFs.
2138 For now, we just handle the obvious case. */
2139 if (GET_CODE (operands[1]) != LABEL_REF)
2140 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2143 /* Darwin uses a special PIC legitimizer. */
2144 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2147 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2149 if (operands[0] != operands[1])
2150 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2155 /* If we are to limit the number of things we put in the TOC and
2156 this is a symbol plus a constant we can add in one insn,
2157 just put the symbol in the TOC and add the constant. Don't do
2158 this if reload is in progress. */
2159 if (GET_CODE (operands[1]) == CONST
2160 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2161 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2162 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2163 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2164 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2165 && ! side_effects_p (operands[0]))
2168 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2169 rtx other = XEXP (XEXP (operands[1], 0), 1);
2171 sym = force_reg (mode, sym);
2173 emit_insn (gen_addsi3 (operands[0], sym, other));
2175 emit_insn (gen_adddi3 (operands[0], sym, other));
2179 operands[1] = force_const_mem (mode, operands[1]);
2182 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2183 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2184 get_pool_constant (XEXP (operands[1], 0)),
2185 get_pool_mode (XEXP (operands[1], 0))))
2188 = gen_rtx_MEM (mode,
2189 create_TOC_reference (XEXP (operands[1], 0)));
2190 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2191 RTX_UNCHANGING_P (operands[1]) = 1;
2197 if (GET_CODE (operands[0]) == MEM
2198 && GET_CODE (XEXP (operands[0], 0)) != REG
2199 && ! reload_in_progress)
2201 = replace_equiv_address (operands[0],
2202 copy_addr_to_reg (XEXP (operands[0], 0)));
2204 if (GET_CODE (operands[1]) == MEM
2205 && GET_CODE (XEXP (operands[1], 0)) != REG
2206 && ! reload_in_progress)
2208 = replace_equiv_address (operands[1],
2209 copy_addr_to_reg (XEXP (operands[1], 0)));
2216 /* Above, we may have called force_const_mem which may have returned
2217 an invalid address. If we can, fix this up; otherwise, reload will
2218 have to deal with it. */
2219 if (GET_CODE (operands[1]) == MEM
2220 && ! memory_address_p (mode, XEXP (operands[1], 0))
2221 && ! reload_in_progress)
2222 operands[1] = adjust_address (operands[1], mode, 0);
2224 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2228 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2229 for a call to a function whose data type is FNTYPE.
2230 For a library call, FNTYPE is 0.
2232 For incoming args we set the number of arguments in the prototype large
2233 so we never return a PARALLEL. */
2236 init_cumulative_args (cum, fntype, libname, incoming)
2237 CUMULATIVE_ARGS *cum;
2239 rtx libname ATTRIBUTE_UNUSED;
2242 static CUMULATIVE_ARGS zero_cumulative;
2244 *cum = zero_cumulative;
2246 cum->fregno = FP_ARG_MIN_REG;
2247 cum->vregno = ALTIVEC_ARG_MIN_REG;
2248 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2249 cum->call_cookie = CALL_NORMAL;
2250 cum->sysv_gregno = GP_ARG_MIN_REG;
2253 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2255 else if (cum->prototype)
2256 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2257 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2258 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2261 cum->nargs_prototype = 0;
2263 cum->orig_nargs = cum->nargs_prototype;
2265 /* Check for longcall's */
2266 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2267 cum->call_cookie = CALL_LONG;
2269 if (TARGET_DEBUG_ARG)
2271 fprintf (stderr, "\ninit_cumulative_args:");
2274 tree ret_type = TREE_TYPE (fntype);
2275 fprintf (stderr, " ret code = %s,",
2276 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2279 if (cum->call_cookie & CALL_LONG)
2280 fprintf (stderr, " longcall,");
2282 fprintf (stderr, " proto = %d, nargs = %d\n",
2283 cum->prototype, cum->nargs_prototype);
2287 /* If defined, a C expression which determines whether, and in which
2288 direction, to pad out an argument with extra space. The value
2289 should be of type `enum direction': either `upward' to pad above
2290 the argument, `downward' to pad below, or `none' to inhibit
2293 For the AIX ABI structs are always stored left shifted in their
2297 function_arg_padding (mode, type)
2298 enum machine_mode mode;
2301 if (type != 0 && AGGREGATE_TYPE_P (type))
2304 /* This is the default definition. */
2305 return (! BYTES_BIG_ENDIAN
2308 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2309 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2310 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2311 ? downward : upward));
2314 /* If defined, a C expression that gives the alignment boundary, in bits,
2315 of an argument with the specified mode and type. If it is not defined,
2316 PARM_BOUNDARY is used for all arguments.
2318 V.4 wants long longs to be double word aligned. */
2321 function_arg_boundary (mode, type)
2322 enum machine_mode mode;
2323 tree type ATTRIBUTE_UNUSED;
2325 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2327 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2330 return PARM_BOUNDARY;
2333 /* Update the data in CUM to advance over an argument
2334 of mode MODE and data type TYPE.
2335 (TYPE is null for libcalls where that information may not be available.) */
2338 function_arg_advance (cum, mode, type, named)
2339 CUMULATIVE_ARGS *cum;
2340 enum machine_mode mode;
2344 cum->nargs_prototype--;
2346 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2348 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2351 cum->words += RS6000_ARG_SIZE (mode, type);
2353 else if (DEFAULT_ABI == ABI_V4)
2355 if (TARGET_HARD_FLOAT
2356 && (mode == SFmode || mode == DFmode))
2358 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2363 cum->words += cum->words & 1;
2364 cum->words += RS6000_ARG_SIZE (mode, type);
2370 int gregno = cum->sysv_gregno;
2372 /* Aggregates and IEEE quad get passed by reference. */
2373 if ((type && AGGREGATE_TYPE_P (type))
2377 n_words = RS6000_ARG_SIZE (mode, type);
2379 /* Long long is put in odd registers. */
2380 if (n_words == 2 && (gregno & 1) == 0)
2383 /* Long long is not split between registers and stack. */
2384 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2386 /* Long long is aligned on the stack. */
2388 cum->words += cum->words & 1;
2389 cum->words += n_words;
2392 /* Note: continuing to accumulate gregno past when we've started
2393 spilling to the stack indicates the fact that we've started
2394 spilling to the stack to expand_builtin_saveregs. */
2395 cum->sysv_gregno = gregno + n_words;
2398 if (TARGET_DEBUG_ARG)
2400 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2401 cum->words, cum->fregno);
2402 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2403 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2404 fprintf (stderr, "mode = %4s, named = %d\n",
2405 GET_MODE_NAME (mode), named);
2410 int align = (TARGET_32BIT && (cum->words & 1) != 0
2411 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2413 cum->words += align + RS6000_ARG_SIZE (mode, type);
2415 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2418 if (TARGET_DEBUG_ARG)
2420 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2421 cum->words, cum->fregno);
2422 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2423 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2424 fprintf (stderr, "named = %d, align = %d\n", named, align);
2429 /* Determine where to put an argument to a function.
2430 Value is zero to push the argument on the stack,
2431 or a hard register in which to store the argument.
2433 MODE is the argument's machine mode.
2434 TYPE is the data type of the argument (as a tree).
2435 This is null for libcalls where that information may
2437 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2438 the preceding args and about the function being called.
2439 NAMED is nonzero if this argument is a named parameter
2440 (otherwise it is an extra parameter matching an ellipsis).
2442 On RS/6000 the first eight words of non-FP are normally in registers
2443 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2444 Under V.4, the first 8 FP args are in registers.
2446 If this is floating-point and no prototype is specified, we use
2447 both an FP and integer register (or possibly FP reg and stack). Library
2448 functions (when TYPE is zero) always have the proper types for args,
2449 so we can pass the FP value just in one register. emit_library_function
2450 doesn't support PARALLEL anyway. */
2453 function_arg (cum, mode, type, named)
2454 CUMULATIVE_ARGS *cum;
2455 enum machine_mode mode;
2459 enum rs6000_abi abi = DEFAULT_ABI;
2461 /* Return a marker to indicate whether CR1 needs to set or clear the
2462 bit that V.4 uses to say fp args were passed in registers.
2463 Assume that we don't need the marker for software floating point,
2464 or compiler generated library calls. */
2465 if (mode == VOIDmode)
2468 && TARGET_HARD_FLOAT
2469 && cum->nargs_prototype < 0
2470 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2472 return GEN_INT (cum->call_cookie
2473 | ((cum->fregno == FP_ARG_MIN_REG)
2474 ? CALL_V4_SET_FP_ARGS
2475 : CALL_V4_CLEAR_FP_ARGS));
2478 return GEN_INT (cum->call_cookie);
2481 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2483 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2484 return gen_rtx_REG (mode, cum->vregno);
2488 else if (abi == ABI_V4)
2490 if (TARGET_HARD_FLOAT
2491 && (mode == SFmode || mode == DFmode))
2493 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2494 return gen_rtx_REG (mode, cum->fregno);
2501 int gregno = cum->sysv_gregno;
2503 /* Aggregates and IEEE quad get passed by reference. */
2504 if ((type && AGGREGATE_TYPE_P (type))
2508 n_words = RS6000_ARG_SIZE (mode, type);
2510 /* Long long is put in odd registers. */
2511 if (n_words == 2 && (gregno & 1) == 0)
2514 /* Long long is not split between registers and stack. */
2515 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2516 return gen_rtx_REG (mode, gregno);
2523 int align = (TARGET_32BIT && (cum->words & 1) != 0
2524 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2525 int align_words = cum->words + align;
2527 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2530 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2533 || ((cum->nargs_prototype > 0)
2534 /* IBM AIX extended its linkage convention definition always
2535 to require FP args after register save area hole on the
2537 && (DEFAULT_ABI != ABI_AIX
2539 || (align_words < GP_ARG_NUM_REG))))
2540 return gen_rtx_REG (mode, cum->fregno);
2542 return gen_rtx_PARALLEL (mode,
2544 gen_rtx_EXPR_LIST (VOIDmode,
2545 ((align_words >= GP_ARG_NUM_REG)
2548 + RS6000_ARG_SIZE (mode, type)
2550 /* If this is partially on the stack, then
2551 we only include the portion actually
2552 in registers here. */
2553 ? gen_rtx_REG (SImode,
2554 GP_ARG_MIN_REG + align_words)
2555 : gen_rtx_REG (mode,
2556 GP_ARG_MIN_REG + align_words))),
2558 gen_rtx_EXPR_LIST (VOIDmode,
2559 gen_rtx_REG (mode, cum->fregno),
2562 else if (align_words < GP_ARG_NUM_REG)
2563 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2569 /* For an arg passed partly in registers and partly in memory,
2570 this is the number of registers used.
2571 For args passed entirely in registers or entirely in memory, zero. */
2574 function_arg_partial_nregs (cum, mode, type, named)
2575 CUMULATIVE_ARGS *cum;
2576 enum machine_mode mode;
2578 int named ATTRIBUTE_UNUSED;
2580 if (DEFAULT_ABI == ABI_V4)
2583 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2584 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2586 if (cum->nargs_prototype >= 0)
2590 if (cum->words < GP_ARG_NUM_REG
2591 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2593 int ret = GP_ARG_NUM_REG - cum->words;
2594 if (ret && TARGET_DEBUG_ARG)
2595 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2603 /* A C expression that indicates when an argument must be passed by
2604 reference. If nonzero for an argument, a copy of that argument is
2605 made in memory and a pointer to the argument is passed instead of
2606 the argument itself. The pointer is passed in whatever way is
2607 appropriate for passing a pointer to that type.
2609 Under V.4, structures and unions are passed by reference. */
2612 function_arg_pass_by_reference (cum, mode, type, named)
2613 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2614 enum machine_mode mode ATTRIBUTE_UNUSED;
2616 int named ATTRIBUTE_UNUSED;
2618 if (DEFAULT_ABI == ABI_V4
2619 && ((type && AGGREGATE_TYPE_P (type))
2622 if (TARGET_DEBUG_ARG)
2623 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2631 /* Perform any needed actions needed for a function that is receiving a
2632 variable number of arguments.
2636 MODE and TYPE are the mode and type of the current parameter.
2638 PRETEND_SIZE is a variable that should be set to the amount of stack
2639 that must be pushed by the prolog to pretend that our caller pushed
2642 Normally, this macro will push all remaining incoming registers on the
2643 stack and set PRETEND_SIZE to the length of the registers pushed. */
2646 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2647 CUMULATIVE_ARGS *cum;
2648 enum machine_mode mode;
2654 CUMULATIVE_ARGS next_cum;
2655 int reg_size = TARGET_32BIT ? 4 : 8;
2656 rtx save_area = NULL_RTX, mem;
2657 int first_reg_offset, set;
2661 fntype = TREE_TYPE (current_function_decl);
2662 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2663 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2664 != void_type_node));
2666 /* For varargs, we do not want to skip the dummy va_dcl argument.
2667 For stdargs, we do want to skip the last named argument. */
2670 function_arg_advance (&next_cum, mode, type, 1);
2672 if (DEFAULT_ABI == ABI_V4)
2674 /* Indicate to allocate space on the stack for varargs save area. */
2675 /* ??? Does this really have to be located at a magic spot on the
2676 stack, or can we allocate this with assign_stack_local instead. */
2677 cfun->machine->sysv_varargs_p = 1;
2679 save_area = plus_constant (virtual_stack_vars_rtx,
2680 - RS6000_VARARGS_SIZE);
2682 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2686 first_reg_offset = next_cum.words;
2687 save_area = virtual_incoming_args_rtx;
2688 cfun->machine->sysv_varargs_p = 0;
2690 if (MUST_PASS_IN_STACK (mode, type))
2691 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2694 set = get_varargs_alias_set ();
2695 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2697 mem = gen_rtx_MEM (BLKmode,
2698 plus_constant (save_area,
2699 first_reg_offset * reg_size)),
2700 set_mem_alias_set (mem, set);
2701 set_mem_align (mem, BITS_PER_WORD);
2704 (GP_ARG_MIN_REG + first_reg_offset, mem,
2705 GP_ARG_NUM_REG - first_reg_offset,
2706 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2708 /* ??? Does ABI_V4 need this at all? */
2709 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2712 /* Save FP registers if needed. */
2713 if (DEFAULT_ABI == ABI_V4
2714 && TARGET_HARD_FLOAT && ! no_rtl
2715 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2717 int fregno = next_cum.fregno;
2718 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2719 rtx lab = gen_label_rtx ();
2720 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2722 emit_jump_insn (gen_rtx_SET (VOIDmode,
2724 gen_rtx_IF_THEN_ELSE (VOIDmode,
2725 gen_rtx_NE (VOIDmode, cr1,
2727 gen_rtx_LABEL_REF (VOIDmode, lab),
2730 while (fregno <= FP_ARG_V4_MAX_REG)
2732 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2733 set_mem_alias_set (mem, set);
2734 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2743 /* Create the va_list data type. */
2746 rs6000_build_va_list ()
2748 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2750 /* For AIX, prefer 'char *' because that's what the system
2751 header files like. */
2752 if (DEFAULT_ABI != ABI_V4)
2753 return build_pointer_type (char_type_node);
2755 record = make_lang_type (RECORD_TYPE);
2756 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2758 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2759 unsigned_char_type_node);
2760 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2761 unsigned_char_type_node);
2762 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2764 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2767 DECL_FIELD_CONTEXT (f_gpr) = record;
2768 DECL_FIELD_CONTEXT (f_fpr) = record;
2769 DECL_FIELD_CONTEXT (f_ovf) = record;
2770 DECL_FIELD_CONTEXT (f_sav) = record;
2772 TREE_CHAIN (record) = type_decl;
2773 TYPE_NAME (record) = type_decl;
2774 TYPE_FIELDS (record) = f_gpr;
2775 TREE_CHAIN (f_gpr) = f_fpr;
2776 TREE_CHAIN (f_fpr) = f_ovf;
2777 TREE_CHAIN (f_ovf) = f_sav;
2779 layout_type (record);
2781 /* The correct type is an array type of one element. */
2782 return build_array_type (record, build_index_type (size_zero_node));
2785 /* Implement va_start. */
2788 rs6000_va_start (stdarg_p, valist, nextarg)
2793 HOST_WIDE_INT words, n_gpr, n_fpr;
2794 tree f_gpr, f_fpr, f_ovf, f_sav;
2795 tree gpr, fpr, ovf, sav, t;
2797 /* Only SVR4 needs something special. */
2798 if (DEFAULT_ABI != ABI_V4)
2800 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2804 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2805 f_fpr = TREE_CHAIN (f_gpr);
2806 f_ovf = TREE_CHAIN (f_fpr);
2807 f_sav = TREE_CHAIN (f_ovf);
2809 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2810 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2811 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2812 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2813 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2815 /* Count number of gp and fp argument registers used. */
2816 words = current_function_args_info.words;
2817 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
2818 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
2820 if (TARGET_DEBUG_ARG)
2822 fputs ("va_start: words = ", stderr);
2823 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
2824 fputs (", n_gpr = ", stderr);
2825 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
2826 fputs (", n_fpr = ", stderr);
2827 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
2828 putc ('\n', stderr);
2831 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
2832 TREE_SIDE_EFFECTS (t) = 1;
2833 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2835 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
2836 TREE_SIDE_EFFECTS (t) = 1;
2837 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2839 /* Find the overflow area. */
2840 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
2842 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
2843 build_int_2 (words * UNITS_PER_WORD, 0));
2844 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
2845 TREE_SIDE_EFFECTS (t) = 1;
2846 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2848 /* Find the register save area. */
2849 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
2850 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
2851 build_int_2 (-RS6000_VARARGS_SIZE, -1));
2852 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
2853 TREE_SIDE_EFFECTS (t) = 1;
2854 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2857 /* Implement va_arg. */
2860 rs6000_va_arg (valist, type)
2863 tree f_gpr, f_fpr, f_ovf, f_sav;
2864 tree gpr, fpr, ovf, sav, reg, t, u;
2865 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
2866 rtx lab_false, lab_over, addr_rtx, r;
2868 /* For AIX, the rule is that structures are passed left-aligned in
2869 their stack slot. However, GCC does not presently do this:
2870 structures which are the same size as integer types are passed
2871 right-aligned, as if they were in fact integers. This only
2872 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
2873 if (DEFAULT_ABI != ABI_V4)
2875 HOST_WIDE_INT align, rounded_size;
2876 enum machine_mode mode;
2879 /* Compute the rounded size of the type. */
2880 align = PARM_BOUNDARY / BITS_PER_UNIT;
2881 rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
2886 mode = TYPE_MODE (type);
2887 if (mode != BLKmode)
2890 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
2891 if (rounded_size > align)
2894 addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
2895 build_int_2 (rounded_size - adj, 0));
2898 addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2899 addr_rtx = copy_to_reg (addr_rtx);
2901 /* Compute new value for AP. */
2902 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
2903 build (PLUS_EXPR, TREE_TYPE (valist), valist,
2904 build_int_2 (rounded_size, 0)));
2905 TREE_SIDE_EFFECTS (t) = 1;
2906 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2911 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2912 f_fpr = TREE_CHAIN (f_gpr);
2913 f_ovf = TREE_CHAIN (f_fpr);
2914 f_sav = TREE_CHAIN (f_ovf);
2916 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2917 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2918 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2919 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2920 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2922 size = int_size_in_bytes (type);
2923 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2925 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
2927 /* Aggregates and long doubles are passed by reference. */
2933 size = rsize = UNITS_PER_WORD;
2935 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
2937 /* FP args go in FP registers, if present. */
2946 /* Otherwise into GP registers. */
2954 /* Pull the value out of the saved registers ... */
2956 lab_false = gen_label_rtx ();
2957 lab_over = gen_label_rtx ();
2958 addr_rtx = gen_reg_rtx (Pmode);
2960 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
2961 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
2964 /* Long long is aligned in the registers. */
2967 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
2968 build_int_2 (n_reg - 1, 0));
2969 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
2970 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
2971 TREE_SIDE_EFFECTS (u) = 1;
2972 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
2976 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
2980 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0));
2981 TREE_SIDE_EFFECTS (u) = 1;
2983 u = build1 (CONVERT_EXPR, integer_type_node, u);
2984 TREE_SIDE_EFFECTS (u) = 1;
2986 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
2987 TREE_SIDE_EFFECTS (u) = 1;
2989 t = build (PLUS_EXPR, ptr_type_node, t, u);
2990 TREE_SIDE_EFFECTS (t) = 1;
2992 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
2994 emit_move_insn (addr_rtx, r);
2996 emit_jump_insn (gen_jump (lab_over));
2998 emit_label (lab_false);
3000 /* ... otherwise out of the overflow area. */
3002 /* Make sure we don't find reg 7 for the next int arg. */
3005 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3006 TREE_SIDE_EFFECTS (t) = 1;
3007 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3010 /* Care for on-stack alignment if needed. */
3015 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0));
3016 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1));
3020 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3022 emit_move_insn (addr_rtx, r);
3024 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3025 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3026 TREE_SIDE_EFFECTS (t) = 1;
3027 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3029 emit_label (lab_over);
3033 r = gen_rtx_MEM (Pmode, addr_rtx);
3034 set_mem_alias_set (r, get_varargs_alias_set ());
3035 emit_move_insn (addr_rtx, r);
3043 #define def_builtin(MASK, NAME, TYPE, CODE) \
3045 if ((MASK) & target_flags) \
3046 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3049 struct builtin_description
3051 const unsigned int mask;
3052 const enum insn_code icode;
3053 const char *const name;
3054 const enum rs6000_builtins code;
3056 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc) */
3057 static const struct builtin_description bdesc_3arg[] =
3059 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3060 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3061 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3062 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3063 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3064 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3065 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3066 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3067 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3068 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3069 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3070 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3071 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3072 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3073 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3074 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3075 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3076 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3077 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3080 /* Simple binary operations: VECc = foo (VECa, VECb). */
3081 static const struct builtin_description bdesc_2arg[] =
3083 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3084 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3085 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3086 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3087 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3088 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3089 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3090 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3091 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3092 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3093 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3094 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3095 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3096 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3097 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3098 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3099 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3100 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3101 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3102 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3103 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3104 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3105 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3106 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3107 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3108 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3109 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3110 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3111 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3112 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3113 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3114 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3115 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3116 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3117 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3118 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3119 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3120 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3121 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3122 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3123 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3124 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3125 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3126 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3127 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3128 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3129 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3130 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3131 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3132 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3133 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3134 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3135 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3136 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3137 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3138 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3139 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3140 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3141 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3142 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3143 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3144 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3145 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3146 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3147 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3148 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3149 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3150 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3151 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3152 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3153 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3154 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3155 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3156 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3157 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3158 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3159 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3160 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3161 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3162 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3163 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3164 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3165 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3166 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3167 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3168 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3169 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3170 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3171 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3172 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3173 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3174 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3175 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3176 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3177 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3178 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3179 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3180 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3181 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3182 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3183 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3184 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3185 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3186 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3187 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3188 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3189 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3190 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3191 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3192 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3193 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3194 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3195 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3197 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3199 static const struct builtin_description bdesc_1arg[] =
3201 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3202 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3203 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3204 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3205 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3206 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3207 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3208 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3209 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3210 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3211 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3215 altivec_expand_unop_builtin (icode, arglist, target)
3216 enum insn_code icode;
3221 tree arg0 = TREE_VALUE (arglist);
3222 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3223 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3224 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3227 || GET_MODE (target) != tmode
3228 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3229 target = gen_reg_rtx (tmode);
3231 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3232 op0 = copy_to_mode_reg (mode0, op0);
3234 pat = GEN_FCN (icode) (target, op0);
3242 altivec_expand_binop_builtin (icode, arglist, target)
3243 enum insn_code icode;
3248 tree arg0 = TREE_VALUE (arglist);
3249 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3250 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3251 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3252 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3253 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3254 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3257 || GET_MODE (target) != tmode
3258 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3259 target = gen_reg_rtx (tmode);
3261 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3262 op0 = copy_to_mode_reg (mode0, op0);
3263 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3264 op1 = copy_to_mode_reg (mode1, op1);
3266 pat = GEN_FCN (icode) (target, op0, op1);
3274 altivec_expand_ternop_builtin (icode, arglist, target)
3275 enum insn_code icode;
3280 tree arg0 = TREE_VALUE (arglist);
3281 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3282 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3283 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3284 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3285 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3286 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3287 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3288 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3289 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3292 || GET_MODE (target) != tmode
3293 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3294 target = gen_reg_rtx (tmode);
3296 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3297 op0 = copy_to_mode_reg (mode0, op0);
3298 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3299 op1 = copy_to_mode_reg (mode1, op1);
3300 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3301 op2 = copy_to_mode_reg (mode2, op2);
3303 pat = GEN_FCN (icode) (target, op0, op1, op2);
3311 altivec_expand_builtin (exp, target)
3315 struct builtin_description *d;
3317 enum insn_code icode;
3318 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3319 tree arglist = TREE_OPERAND (exp, 1);
3322 enum machine_mode tmode, mode0, mode1;
3323 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3327 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3328 icode = CODE_FOR_altivec_lvx_16qi;
3329 arg0 = TREE_VALUE (arglist);
3330 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3331 tmode = insn_data[icode].operand[0].mode;
3332 mode0 = insn_data[icode].operand[1].mode;
3335 || GET_MODE (target) != tmode
3336 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3337 target = gen_reg_rtx (tmode);
3339 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3340 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3342 pat = GEN_FCN (icode) (target, op0);
3347 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3348 icode = CODE_FOR_altivec_lvx_8hi;
3349 arg0 = TREE_VALUE (arglist);
3350 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3351 tmode = insn_data[icode].operand[0].mode;
3352 mode0 = insn_data[icode].operand[1].mode;
3355 || GET_MODE (target) != tmode
3356 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3357 target = gen_reg_rtx (tmode);
3359 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3360 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3362 pat = GEN_FCN (icode) (target, op0);
3367 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3368 icode = CODE_FOR_altivec_lvx_4si;
3369 arg0 = TREE_VALUE (arglist);
3370 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3371 tmode = insn_data[icode].operand[0].mode;
3372 mode0 = insn_data[icode].operand[1].mode;
3375 || GET_MODE (target) != tmode
3376 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3377 target = gen_reg_rtx (tmode);
3379 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3380 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3382 pat = GEN_FCN (icode) (target, op0);
3387 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3388 icode = CODE_FOR_altivec_lvx_4sf;
3389 arg0 = TREE_VALUE (arglist);
3390 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3391 tmode = insn_data[icode].operand[0].mode;
3392 mode0 = insn_data[icode].operand[1].mode;
3395 || GET_MODE (target) != tmode
3396 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3397 target = gen_reg_rtx (tmode);
3399 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3400 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3402 pat = GEN_FCN (icode) (target, op0);
3408 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3409 icode = CODE_FOR_altivec_stvx_16qi;
3410 arg0 = TREE_VALUE (arglist);
3411 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3412 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3413 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3414 mode0 = insn_data[icode].operand[0].mode;
3415 mode1 = insn_data[icode].operand[1].mode;
3417 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3418 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3419 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3420 op1 = copy_to_mode_reg (mode1, op1);
3422 pat = GEN_FCN (icode) (op0, op1);
3427 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3428 icode = CODE_FOR_altivec_stvx_8hi;
3429 arg0 = TREE_VALUE (arglist);
3430 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3431 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3432 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3433 mode0 = insn_data[icode].operand[0].mode;
3434 mode1 = insn_data[icode].operand[1].mode;
3436 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3437 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3438 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3439 op1 = copy_to_mode_reg (mode1, op1);
3441 pat = GEN_FCN (icode) (op0, op1);
3446 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3447 icode = CODE_FOR_altivec_stvx_4si;
3448 arg0 = TREE_VALUE (arglist);
3449 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3450 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3451 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3452 mode0 = insn_data[icode].operand[0].mode;
3453 mode1 = insn_data[icode].operand[1].mode;
3455 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3456 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3457 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3458 op1 = copy_to_mode_reg (mode1, op1);
3460 pat = GEN_FCN (icode) (op0, op1);
3465 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3466 icode = CODE_FOR_altivec_stvx_4sf;
3467 arg0 = TREE_VALUE (arglist);
3468 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3469 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3470 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3471 mode0 = insn_data[icode].operand[0].mode;
3472 mode1 = insn_data[icode].operand[1].mode;
3474 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3475 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3476 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3477 op1 = copy_to_mode_reg (mode1, op1);
3479 pat = GEN_FCN (icode) (op0, op1);
3485 /* Handle simple unary operations. */
3486 d = (struct builtin_description *) bdesc_1arg;
3487 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
3488 if (d->code == fcode)
3489 return altivec_expand_unop_builtin (d->icode, arglist, target);
3491 /* Handle simple binary operations. */
3492 d = (struct builtin_description *) bdesc_2arg;
3493 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3494 if (d->code == fcode)
3495 return altivec_expand_binop_builtin (d->icode, arglist, target);
3497 /* Handle simple ternary operations. */
3498 d = (struct builtin_description *) bdesc_3arg;
3499 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
3500 if (d->code == fcode)
3501 return altivec_expand_ternop_builtin (d->icode, arglist, target);
3507 /* Expand an expression EXP that calls a built-in function,
3508 with result going to TARGET if that's convenient
3509 (and in mode MODE if that's convenient).
3510 SUBTARGET may be used as the target for computing one of EXP's operands.
3511 IGNORE is nonzero if the value is to be ignored. */
3514 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
3517 rtx subtarget ATTRIBUTE_UNUSED;
3518 enum machine_mode mode ATTRIBUTE_UNUSED;
3519 int ignore ATTRIBUTE_UNUSED;
3522 return altivec_expand_builtin (exp, target);
3528 rs6000_init_builtins ()
3531 altivec_init_builtins ();
3535 altivec_init_builtins (void)
3537 struct builtin_description * d;
3540 tree endlink = void_list_node;
3542 tree pint_type_node = build_pointer_type (integer_type_node);
3543 tree pshort_type_node = build_pointer_type (short_integer_type_node);
3544 tree pchar_type_node = build_pointer_type (char_type_node);
3545 tree pfloat_type_node = build_pointer_type (float_type_node);
3546 tree v4sf_ftype_v4sf_v4sf_v16qi
3547 = build_function_type (V4SF_type_node,
3548 tree_cons (NULL_TREE, V4SF_type_node,
3549 tree_cons (NULL_TREE, V4SF_type_node,
3550 tree_cons (NULL_TREE,
3553 tree v4si_ftype_v4si_v4si_v16qi
3554 = build_function_type (V4SI_type_node,
3555 tree_cons (NULL_TREE, V4SI_type_node,
3556 tree_cons (NULL_TREE, V4SI_type_node,
3557 tree_cons (NULL_TREE,
3560 tree v8hi_ftype_v8hi_v8hi_v16qi
3561 = build_function_type (V8HI_type_node,
3562 tree_cons (NULL_TREE, V8HI_type_node,
3563 tree_cons (NULL_TREE, V8HI_type_node,
3564 tree_cons (NULL_TREE,
3567 tree v16qi_ftype_v16qi_v16qi_v16qi
3568 = build_function_type (V16QI_type_node,
3569 tree_cons (NULL_TREE, V16QI_type_node,
3570 tree_cons (NULL_TREE, V16QI_type_node,
3571 tree_cons (NULL_TREE,
3575 /* V4SI foo (char) */
3576 tree v4si_ftype_char
3577 = build_function_type (V4SI_type_node,
3578 tree_cons (NULL_TREE, char_type_node, endlink));
3580 /* V8HI foo (char) */
3581 tree v8hi_ftype_char
3582 = build_function_type (V8HI_type_node,
3583 tree_cons (NULL_TREE, char_type_node, endlink));
3585 /* V16QI foo (char) */
3586 tree v16qi_ftype_char
3587 = build_function_type (V16QI_type_node,
3588 tree_cons (NULL_TREE, char_type_node, endlink));
3589 /* V4SF foo (V4SF) */
3590 tree v4sf_ftype_v4sf
3591 = build_function_type (V4SF_type_node,
3592 tree_cons (NULL_TREE, V4SF_type_node, endlink));
3594 /* V4SI foo (int *). */
3595 tree v4si_ftype_pint
3596 = build_function_type (V4SI_type_node,
3597 tree_cons (NULL_TREE, pint_type_node, endlink));
3598 /* V8HI foo (short *). */
3599 tree v8hi_ftype_pshort
3600 = build_function_type (V8HI_type_node,
3601 tree_cons (NULL_TREE, pshort_type_node, endlink));
3602 /* V16QI foo (char *). */
3603 tree v16qi_ftype_pchar
3604 = build_function_type (V16QI_type_node,
3605 tree_cons (NULL_TREE, pchar_type_node, endlink));
3606 /* V4SF foo (float *). */
3607 tree v4sf_ftype_pfloat
3608 = build_function_type (V4SF_type_node,
3609 tree_cons (NULL_TREE, pfloat_type_node, endlink));
3611 /* void foo (int *, V4SI). */
3612 tree void_ftype_pint_v4si
3613 = build_function_type (void_type_node,
3614 tree_cons (NULL_TREE, pint_type_node,
3615 tree_cons (NULL_TREE, V4SI_type_node,
3617 /* void foo (short *, V8HI). */
3618 tree void_ftype_pshort_v8hi
3619 = build_function_type (void_type_node,
3620 tree_cons (NULL_TREE, pshort_type_node,
3621 tree_cons (NULL_TREE, V8HI_type_node,
3623 /* void foo (char *, V16QI). */
3624 tree void_ftype_pchar_v16qi
3625 = build_function_type (void_type_node,
3626 tree_cons (NULL_TREE, pchar_type_node,
3627 tree_cons (NULL_TREE, V16QI_type_node,
3629 /* void foo (float *, V4SF). */
3630 tree void_ftype_pfloat_v4sf
3631 = build_function_type (void_type_node,
3632 tree_cons (NULL_TREE, pfloat_type_node,
3633 tree_cons (NULL_TREE, V4SF_type_node,
3636 tree v4si_ftype_v4si_v4si
3637 = build_function_type (V4SI_type_node,
3638 tree_cons (NULL_TREE, V4SI_type_node,
3639 tree_cons (NULL_TREE, V4SI_type_node,
3641 /* These are really for the unsigned 5 bit literals */
3642 tree v4sf_ftype_v4si_char
3643 = build_function_type (V4SF_type_node,
3644 tree_cons (NULL_TREE, V4SI_type_node,
3645 tree_cons (NULL_TREE, char_type_node,
3647 tree v4si_ftype_v4sf_char
3648 = build_function_type (V4SI_type_node,
3649 tree_cons (NULL_TREE, V4SF_type_node,
3650 tree_cons (NULL_TREE, char_type_node,
3652 tree v4si_ftype_v4si_char
3653 = build_function_type (V4SI_type_node,
3654 tree_cons (NULL_TREE, V4SI_type_node,
3655 tree_cons (NULL_TREE, char_type_node,
3657 tree v8hi_ftype_v8hi_char
3658 = build_function_type (V8HI_type_node,
3659 tree_cons (NULL_TREE, V8HI_type_node,
3660 tree_cons (NULL_TREE, char_type_node,
3662 tree v16qi_ftype_v16qi_char
3663 = build_function_type (V16QI_type_node,
3664 tree_cons (NULL_TREE, V16QI_type_node,
3665 tree_cons (NULL_TREE, char_type_node,
3668 tree v4sf_ftype_v4sf_v4sf
3669 = build_function_type (V4SF_type_node,
3670 tree_cons (NULL_TREE, V4SF_type_node,
3671 tree_cons (NULL_TREE, V4SF_type_node,
3673 tree v4sf_ftype_v4sf_v4sf_v4si
3674 = build_function_type (V4SF_type_node,
3675 tree_cons (NULL_TREE, V4SF_type_node,
3676 tree_cons (NULL_TREE, V4SF_type_node,
3677 tree_cons (NULL_TREE,
3680 tree v4sf_ftype_v4sf_v4sf_v4sf
3681 = build_function_type (V4SF_type_node,
3682 tree_cons (NULL_TREE, V4SF_type_node,
3683 tree_cons (NULL_TREE, V4SF_type_node,
3684 tree_cons (NULL_TREE,
3687 tree v4si_ftype_v4si_v4si_v4si
3688 = build_function_type (V4SI_type_node,
3689 tree_cons (NULL_TREE, V4SI_type_node,
3690 tree_cons (NULL_TREE, V4SI_type_node,
3691 tree_cons (NULL_TREE,
3695 tree v8hi_ftype_v8hi_v8hi
3696 = build_function_type (V8HI_type_node,
3697 tree_cons (NULL_TREE, V8HI_type_node,
3698 tree_cons (NULL_TREE, V8HI_type_node,
3700 tree v8hi_ftype_v8hi_v8hi_v8hi
3701 = build_function_type (V8HI_type_node,
3702 tree_cons (NULL_TREE, V8HI_type_node,
3703 tree_cons (NULL_TREE, V8HI_type_node,
3704 tree_cons (NULL_TREE,
3707 tree v4si_ftype_v8hi_v8hi_v4si
3708 = build_function_type (V4SI_type_node,
3709 tree_cons (NULL_TREE, V8HI_type_node,
3710 tree_cons (NULL_TREE, V8HI_type_node,
3711 tree_cons (NULL_TREE,
3714 tree v4si_ftype_v16qi_v16qi_v4si
3715 = build_function_type (V4SI_type_node,
3716 tree_cons (NULL_TREE, V16QI_type_node,
3717 tree_cons (NULL_TREE, V16QI_type_node,
3718 tree_cons (NULL_TREE,
3722 tree v16qi_ftype_v16qi_v16qi
3723 = build_function_type (V16QI_type_node,
3724 tree_cons (NULL_TREE, V16QI_type_node,
3725 tree_cons (NULL_TREE, V16QI_type_node,
3728 tree v4si_ftype_v4sf_v4sf
3729 = build_function_type (V4SI_type_node,
3730 tree_cons (NULL_TREE, V4SF_type_node,
3731 tree_cons (NULL_TREE, V4SF_type_node,
3734 tree v8hi_ftype_v16qi_v16qi
3735 = build_function_type (V8HI_type_node,
3736 tree_cons (NULL_TREE, V16QI_type_node,
3737 tree_cons (NULL_TREE, V16QI_type_node,
3740 tree v4si_ftype_v8hi_v8hi
3741 = build_function_type (V4SI_type_node,
3742 tree_cons (NULL_TREE, V8HI_type_node,
3743 tree_cons (NULL_TREE, V8HI_type_node,
3746 tree v8hi_ftype_v4si_v4si
3747 = build_function_type (V8HI_type_node,
3748 tree_cons (NULL_TREE, V4SI_type_node,
3749 tree_cons (NULL_TREE, V4SI_type_node,
3752 tree v16qi_ftype_v8hi_v8hi
3753 = build_function_type (V16QI_type_node,
3754 tree_cons (NULL_TREE, V8HI_type_node,
3755 tree_cons (NULL_TREE, V8HI_type_node,
3758 tree v4si_ftype_v16qi_v4si
3759 = build_function_type (V4SI_type_node,
3760 tree_cons (NULL_TREE, V16QI_type_node,
3761 tree_cons (NULL_TREE, V4SI_type_node,
3764 tree v4si_ftype_v8hi_v4si
3765 = build_function_type (V4SI_type_node,
3766 tree_cons (NULL_TREE, V8HI_type_node,
3767 tree_cons (NULL_TREE, V4SI_type_node,
3770 tree int_ftype_v4si_v4si
3771 = build_function_type (integer_type_node,
3772 tree_cons (NULL_TREE, V4SI_type_node,
3773 tree_cons (NULL_TREE, V4SI_type_node,
3776 tree int_ftype_v4sf_v4sf
3777 = build_function_type (integer_type_node,
3778 tree_cons (NULL_TREE, V4SF_type_node,
3779 tree_cons (NULL_TREE, V4SF_type_node,
3782 tree int_ftype_v16qi_v16qi
3783 = build_function_type (integer_type_node,
3784 tree_cons (NULL_TREE, V16QI_type_node,
3785 tree_cons (NULL_TREE, V16QI_type_node,
3788 tree int_ftype_v8hi_v8hi
3789 = build_function_type (integer_type_node,
3790 tree_cons (NULL_TREE, V8HI_type_node,
3791 tree_cons (NULL_TREE, V8HI_type_node,
3794 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
3795 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
3796 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
3797 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
3798 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
3799 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
3800 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
3801 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
3803 /* Add the simple ternary operators. */
3804 d = (struct builtin_description *) bdesc_3arg;
3805 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
3808 enum machine_mode mode0, mode1, mode2, mode3;
3814 mode0 = insn_data[d->icode].operand[0].mode;
3815 mode1 = insn_data[d->icode].operand[1].mode;
3816 mode2 = insn_data[d->icode].operand[2].mode;
3817 mode3 = insn_data[d->icode].operand[3].mode;
3819 /* When all four are of the same mode. */
3820 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
3825 type = v4si_ftype_v4si_v4si_v4si;
3828 type = v4sf_ftype_v4sf_v4sf_v4sf;
3831 type = v8hi_ftype_v8hi_v8hi_v8hi;
3834 type = v16qi_ftype_v16qi_v16qi_v16qi;
3840 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
3845 type = v4si_ftype_v4si_v4si_v16qi;
3848 type = v4sf_ftype_v4sf_v4sf_v16qi;
3851 type = v8hi_ftype_v8hi_v8hi_v16qi;
3854 type = v16qi_ftype_v16qi_v16qi_v16qi;
3860 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
3861 && mode3 == V4SImode)
3862 type = v4si_ftype_v16qi_v16qi_v4si;
3863 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
3864 && mode3 == V4SImode)
3865 type = v4si_ftype_v8hi_v8hi_v4si;
3866 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
3867 && mode3 == V4SImode)
3868 type = v4sf_ftype_v4sf_v4sf_v4si;
3872 def_builtin (d->mask, d->name, type, d->code);
3875 /* Add the simple binary operators. */
3876 d = (struct builtin_description *) bdesc_2arg;
3877 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
3879 enum machine_mode mode0, mode1, mode2;
3885 mode0 = insn_data[d->icode].operand[0].mode;
3886 mode1 = insn_data[d->icode].operand[1].mode;
3887 mode2 = insn_data[d->icode].operand[2].mode;
3889 /* When all three operands are of the same mode. */
3890 if (mode0 == mode1 && mode1 == mode2)
3895 type = v4sf_ftype_v4sf_v4sf;
3898 type = v4si_ftype_v4si_v4si;
3901 type = v16qi_ftype_v16qi_v16qi;
3904 type = v8hi_ftype_v8hi_v8hi;
3911 /* A few other combos we really don't want to do manually. */
3913 /* vint, vfloat, vfloat. */
3914 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
3915 type = v4si_ftype_v4sf_v4sf;
3917 /* vshort, vchar, vchar. */
3918 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
3919 type = v8hi_ftype_v16qi_v16qi;
3921 /* vint, vshort, vshort. */
3922 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
3923 type = v4si_ftype_v8hi_v8hi;
3925 /* vshort, vint, vint. */
3926 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
3927 type = v8hi_ftype_v4si_v4si;
3929 /* vchar, vshort, vshort. */
3930 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
3931 type = v16qi_ftype_v8hi_v8hi;
3933 /* vint, vchar, vint. */
3934 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
3935 type = v4si_ftype_v16qi_v4si;
3937 /* vint, vshort, vint. */
3938 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
3939 type = v4si_ftype_v8hi_v4si;
3941 /* vint, vint, 5 bit literal. */
3942 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
3943 type = v4si_ftype_v4si_char;
3945 /* vshort, vshort, 5 bit literal. */
3946 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
3947 type = v8hi_ftype_v8hi_char;
3949 /* vchar, vchar, 5 bit literal. */
3950 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
3951 type = v16qi_ftype_v16qi_char;
3953 /* vfloat, vint, 5 bit literal. */
3954 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
3955 type = v4sf_ftype_v4si_char;
3957 /* vint, vfloat, 5 bit literal. */
3958 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
3959 type = v4si_ftype_v4sf_char;
3963 else if (mode0 == SImode)
3968 type = int_ftype_v4si_v4si;
3971 type = int_ftype_v4sf_v4sf;
3974 type = int_ftype_v16qi_v16qi;
3977 type = int_ftype_v8hi_v8hi;
3987 def_builtin (d->mask, d->name, type, d->code);
3989 /* Add the simple unary operators. */
3990 d = (struct builtin_description *) bdesc_1arg;
3991 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
3993 enum machine_mode mode0, mode1;
3999 mode0 = insn_data[d->icode].operand[0].mode;
4000 mode1 = insn_data[d->icode].operand[1].mode;
4002 if (mode0 == V4SImode && mode1 == QImode)
4003 type = v4si_ftype_char;
4004 else if (mode0 == V8HImode && mode1 == QImode)
4005 type = v8hi_ftype_char;
4006 else if (mode0 == V16QImode && mode1 == QImode)
4007 type = v16qi_ftype_char;
4008 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4009 type = v4sf_ftype_v4sf;
4013 def_builtin (d->mask, d->name, type, d->code);
4018 /* Generate a memory reference for expand_block_move, copying volatile,
4019 and other bits from an original memory reference. */
4022 expand_block_move_mem (mode, addr, orig_mem)
4023 enum machine_mode mode;
4027 rtx mem = gen_rtx_MEM (mode, addr);
4029 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4033 /* Expand a block move operation, and return 1 if successful. Return 0
4034 if we should let the compiler generate normal code.
4036 operands[0] is the destination
4037 operands[1] is the source
4038 operands[2] is the length
4039 operands[3] is the alignment */
4041 #define MAX_MOVE_REG 4
4044 expand_block_move (operands)
4047 rtx orig_dest = operands[0];
4048 rtx orig_src = operands[1];
4049 rtx bytes_rtx = operands[2];
4050 rtx align_rtx = operands[3];
4051 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4062 rtx stores[MAX_MOVE_REG];
4065 /* If this is not a fixed size move, just call memcpy */
4069 /* If this is not a fixed size alignment, abort */
4070 if (GET_CODE (align_rtx) != CONST_INT)
4072 align = INTVAL (align_rtx);
4074 /* Anything to move? */
4075 bytes = INTVAL (bytes_rtx);
4079 /* store_one_arg depends on expand_block_move to handle at least the size of
4080 reg_parm_stack_space. */
4081 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4084 /* Move the address into scratch registers. */
4085 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4086 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4088 if (TARGET_STRING) /* string instructions are available */
4090 for ( ; bytes > 0; bytes -= move_bytes)
4092 if (bytes > 24 /* move up to 32 bytes at a time */
4100 && ! fixed_regs[12])
4102 move_bytes = (bytes > 32) ? 32 : bytes;
4103 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4106 expand_block_move_mem (BLKmode,
4109 GEN_INT ((move_bytes == 32)
4113 else if (bytes > 16 /* move up to 24 bytes at a time */
4119 && ! fixed_regs[10])
4121 move_bytes = (bytes > 24) ? 24 : bytes;
4122 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4125 expand_block_move_mem (BLKmode,
4128 GEN_INT (move_bytes),
4131 else if (bytes > 8 /* move up to 16 bytes at a time */
4137 move_bytes = (bytes > 16) ? 16 : bytes;
4138 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
4141 expand_block_move_mem (BLKmode,
4144 GEN_INT (move_bytes),
4147 else if (bytes >= 8 && TARGET_POWERPC64
4148 /* 64-bit loads and stores require word-aligned
4150 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4153 tmp_reg = gen_reg_rtx (DImode);
4154 emit_move_insn (tmp_reg,
4155 expand_block_move_mem (DImode,
4156 src_reg, orig_src));
4157 emit_move_insn (expand_block_move_mem (DImode,
4158 dest_reg, orig_dest),
4161 else if (bytes > 4 && !TARGET_POWERPC64)
4162 { /* move up to 8 bytes at a time */
4163 move_bytes = (bytes > 8) ? 8 : bytes;
4164 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
4167 expand_block_move_mem (BLKmode,
4170 GEN_INT (move_bytes),
4173 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4174 { /* move 4 bytes */
4176 tmp_reg = gen_reg_rtx (SImode);
4177 emit_move_insn (tmp_reg,
4178 expand_block_move_mem (SImode,
4179 src_reg, orig_src));
4180 emit_move_insn (expand_block_move_mem (SImode,
4181 dest_reg, orig_dest),
4184 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4185 { /* move 2 bytes */
4187 tmp_reg = gen_reg_rtx (HImode);
4188 emit_move_insn (tmp_reg,
4189 expand_block_move_mem (HImode,
4190 src_reg, orig_src));
4191 emit_move_insn (expand_block_move_mem (HImode,
4192 dest_reg, orig_dest),
4195 else if (bytes == 1) /* move 1 byte */
4198 tmp_reg = gen_reg_rtx (QImode);
4199 emit_move_insn (tmp_reg,
4200 expand_block_move_mem (QImode,
4201 src_reg, orig_src));
4202 emit_move_insn (expand_block_move_mem (QImode,
4203 dest_reg, orig_dest),
4207 { /* move up to 4 bytes at a time */
4208 move_bytes = (bytes > 4) ? 4 : bytes;
4209 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
4212 expand_block_move_mem (BLKmode,
4215 GEN_INT (move_bytes),
4219 if (bytes > move_bytes)
4221 if (! TARGET_POWERPC64)
4223 emit_insn (gen_addsi3 (src_reg, src_reg,
4224 GEN_INT (move_bytes)));
4225 emit_insn (gen_addsi3 (dest_reg, dest_reg,
4226 GEN_INT (move_bytes)));
4230 emit_insn (gen_adddi3 (src_reg, src_reg,
4231 GEN_INT (move_bytes)));
4232 emit_insn (gen_adddi3 (dest_reg, dest_reg,
4233 GEN_INT (move_bytes)));
4239 else /* string instructions not available */
4241 num_reg = offset = 0;
4242 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
4244 /* Calculate the correct offset for src/dest */
4248 dest_addr = dest_reg;
4252 src_addr = plus_constant (src_reg, offset);
4253 dest_addr = plus_constant (dest_reg, offset);
4256 /* Generate the appropriate load and store, saving the stores
4258 if (bytes >= 8 && TARGET_POWERPC64
4259 /* 64-bit loads and stores require word-aligned
4261 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4264 tmp_reg = gen_reg_rtx (DImode);
4265 emit_insn (gen_movdi (tmp_reg,
4266 expand_block_move_mem (DImode,
4269 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
4274 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
4277 tmp_reg = gen_reg_rtx (SImode);
4278 emit_insn (gen_movsi (tmp_reg,
4279 expand_block_move_mem (SImode,
4282 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
4287 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
4290 tmp_reg = gen_reg_rtx (HImode);
4291 emit_insn (gen_movhi (tmp_reg,
4292 expand_block_move_mem (HImode,
4295 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
4303 tmp_reg = gen_reg_rtx (QImode);
4304 emit_insn (gen_movqi (tmp_reg,
4305 expand_block_move_mem (QImode,
4308 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
4314 if (num_reg >= MAX_MOVE_REG)
4316 for (i = 0; i < num_reg; i++)
4317 emit_insn (stores[i]);
4322 for (i = 0; i < num_reg; i++)
4323 emit_insn (stores[i]);
4330 /* Return 1 if OP is a load multiple operation. It is known to be a
4331 PARALLEL and the first section will be tested. */
4334 load_multiple_operation (op, mode)
4336 enum machine_mode mode ATTRIBUTE_UNUSED;
4338 int count = XVECLEN (op, 0);
4339 unsigned int dest_regno;
4343 /* Perform a quick check so we don't blow up below. */
4345 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4346 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4347 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
4350 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4351 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
4353 for (i = 1; i < count; i++)
4355 rtx elt = XVECEXP (op, 0, i);
4357 if (GET_CODE (elt) != SET
4358 || GET_CODE (SET_DEST (elt)) != REG
4359 || GET_MODE (SET_DEST (elt)) != SImode
4360 || REGNO (SET_DEST (elt)) != dest_regno + i
4361 || GET_CODE (SET_SRC (elt)) != MEM
4362 || GET_MODE (SET_SRC (elt)) != SImode
4363 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4364 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4365 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4366 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
4373 /* Similar, but tests for store multiple. Here, the second vector element
4374 is a CLOBBER. It will be tested later. */
4377 store_multiple_operation (op, mode)
4379 enum machine_mode mode ATTRIBUTE_UNUSED;
4381 int count = XVECLEN (op, 0) - 1;
4382 unsigned int src_regno;
4386 /* Perform a quick check so we don't blow up below. */
4388 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4389 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
4390 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
4393 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4394 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
4396 for (i = 1; i < count; i++)
4398 rtx elt = XVECEXP (op, 0, i + 1);
4400 if (GET_CODE (elt) != SET
4401 || GET_CODE (SET_SRC (elt)) != REG
4402 || GET_MODE (SET_SRC (elt)) != SImode
4403 || REGNO (SET_SRC (elt)) != src_regno + i
4404 || GET_CODE (SET_DEST (elt)) != MEM
4405 || GET_MODE (SET_DEST (elt)) != SImode
4406 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4407 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4408 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4409 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
4416 /* Return 1 for a parallel vrsave operation. */
4419 vrsave_operation (op, mode)
4421 enum machine_mode mode ATTRIBUTE_UNUSED;
4423 int count = XVECLEN (op, 0);
4424 unsigned int dest_regno, src_regno;
4428 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4429 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4430 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4433 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4434 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4436 if (dest_regno != VRSAVE_REGNO
4437 && src_regno != VRSAVE_REGNO)
4440 for (i = 1; i < count; i++)
4442 rtx elt = XVECEXP (op, 0, i);
4444 if (GET_CODE (elt) != CLOBBER
4445 && GET_CODE (elt) != SET)
4452 /* Return 1 for an PARALLEL suitable for mtcrf. */
4455 mtcrf_operation (op, mode)
4457 enum machine_mode mode ATTRIBUTE_UNUSED;
4459 int count = XVECLEN (op, 0);
4463 /* Perform a quick check so we don't blow up below. */
4465 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4466 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
4467 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
4469 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
4471 if (GET_CODE (src_reg) != REG
4472 || GET_MODE (src_reg) != SImode
4473 || ! INT_REGNO_P (REGNO (src_reg)))
4476 for (i = 0; i < count; i++)
4478 rtx exp = XVECEXP (op, 0, i);
4482 if (GET_CODE (exp) != SET
4483 || GET_CODE (SET_DEST (exp)) != REG
4484 || GET_MODE (SET_DEST (exp)) != CCmode
4485 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
4487 unspec = SET_SRC (exp);
4488 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
4490 if (GET_CODE (unspec) != UNSPEC
4491 || XINT (unspec, 1) != 20
4492 || XVECLEN (unspec, 0) != 2
4493 || XVECEXP (unspec, 0, 0) != src_reg
4494 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
4495 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
4501 /* Return 1 for an PARALLEL suitable for lmw. */
4504 lmw_operation (op, mode)
4506 enum machine_mode mode ATTRIBUTE_UNUSED;
4508 int count = XVECLEN (op, 0);
4509 unsigned int dest_regno;
4511 unsigned int base_regno;
4512 HOST_WIDE_INT offset;
4515 /* Perform a quick check so we don't blow up below. */
4517 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4518 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
4519 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
4522 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
4523 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
4526 || count != 32 - (int) dest_regno)
4529 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
4532 base_regno = REGNO (src_addr);
4533 if (base_regno == 0)
4536 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
4538 offset = INTVAL (XEXP (src_addr, 1));
4539 base_regno = REGNO (XEXP (src_addr, 0));
4544 for (i = 0; i < count; i++)
4546 rtx elt = XVECEXP (op, 0, i);
4549 HOST_WIDE_INT newoffset;
4551 if (GET_CODE (elt) != SET
4552 || GET_CODE (SET_DEST (elt)) != REG
4553 || GET_MODE (SET_DEST (elt)) != SImode
4554 || REGNO (SET_DEST (elt)) != dest_regno + i
4555 || GET_CODE (SET_SRC (elt)) != MEM
4556 || GET_MODE (SET_SRC (elt)) != SImode)
4558 newaddr = XEXP (SET_SRC (elt), 0);
4559 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
4564 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
4566 addr_reg = XEXP (newaddr, 0);
4567 newoffset = INTVAL (XEXP (newaddr, 1));
4571 if (REGNO (addr_reg) != base_regno
4572 || newoffset != offset + 4 * i)
4579 /* Return 1 for an PARALLEL suitable for stmw. */
4582 stmw_operation (op, mode)
4584 enum machine_mode mode ATTRIBUTE_UNUSED;
4586 int count = XVECLEN (op, 0);
4587 unsigned int src_regno;
4589 unsigned int base_regno;
4590 HOST_WIDE_INT offset;
4593 /* Perform a quick check so we don't blow up below. */
4595 || GET_CODE (XVECEXP (op, 0, 0)) != SET
4596 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
4597 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
4600 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
4601 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
4604 || count != 32 - (int) src_regno)
4607 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
4610 base_regno = REGNO (dest_addr);
4611 if (base_regno == 0)
4614 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
4616 offset = INTVAL (XEXP (dest_addr, 1));
4617 base_regno = REGNO (XEXP (dest_addr, 0));
4622 for (i = 0; i < count; i++)
4624 rtx elt = XVECEXP (op, 0, i);
4627 HOST_WIDE_INT newoffset;
4629 if (GET_CODE (elt) != SET
4630 || GET_CODE (SET_SRC (elt)) != REG
4631 || GET_MODE (SET_SRC (elt)) != SImode
4632 || REGNO (SET_SRC (elt)) != src_regno + i
4633 || GET_CODE (SET_DEST (elt)) != MEM
4634 || GET_MODE (SET_DEST (elt)) != SImode)
4636 newaddr = XEXP (SET_DEST (elt), 0);
4637 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
4642 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
4644 addr_reg = XEXP (newaddr, 0);
4645 newoffset = INTVAL (XEXP (newaddr, 1));
4649 if (REGNO (addr_reg) != base_regno
4650 || newoffset != offset + 4 * i)
4657 /* A validation routine: say whether CODE, a condition code, and MODE
4658 match. The other alternatives either don't make sense or should
4659 never be generated. */
4662 validate_condition_mode (code, mode)
4664 enum machine_mode mode;
4666 if (GET_RTX_CLASS (code) != '<'
4667 || GET_MODE_CLASS (mode) != MODE_CC)
4670 /* These don't make sense. */
4671 if ((code == GT || code == LT || code == GE || code == LE)
4672 && mode == CCUNSmode)
4675 if ((code == GTU || code == LTU || code == GEU || code == LEU)
4676 && mode != CCUNSmode)
4679 if (mode != CCFPmode
4680 && (code == ORDERED || code == UNORDERED
4681 || code == UNEQ || code == LTGT
4682 || code == UNGT || code == UNLT
4683 || code == UNGE || code == UNLE))
4686 /* These should never be generated except for
4687 flag_unsafe_math_optimizations. */
4688 if (mode == CCFPmode
4689 && ! flag_unsafe_math_optimizations
4690 && (code == LE || code == GE
4691 || code == UNEQ || code == LTGT
4692 || code == UNGT || code == UNLT))
4695 /* These are invalid; the information is not there. */
4696 if (mode == CCEQmode
4697 && code != EQ && code != NE)
4701 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
4702 We only check the opcode against the mode of the CC value here. */
4705 branch_comparison_operator (op, mode)
4707 enum machine_mode mode ATTRIBUTE_UNUSED;
4709 enum rtx_code code = GET_CODE (op);
4710 enum machine_mode cc_mode;
4712 if (GET_RTX_CLASS (code) != '<')
4715 cc_mode = GET_MODE (XEXP (op, 0));
4716 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
4719 validate_condition_mode (code, cc_mode);
4724 /* Return 1 if OP is a comparison operation that is valid for a branch
4725 insn and which is true if the corresponding bit in the CC register
4729 branch_positive_comparison_operator (op, mode)
4731 enum machine_mode mode;
4735 if (! branch_comparison_operator (op, mode))
4738 code = GET_CODE (op);
4739 return (code == EQ || code == LT || code == GT
4740 || code == LTU || code == GTU
4741 || code == UNORDERED);
4744 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
4745 We check the opcode against the mode of the CC value and disallow EQ or
4746 NE comparisons for integers. */
4749 scc_comparison_operator (op, mode)
4751 enum machine_mode mode;
4753 enum rtx_code code = GET_CODE (op);
4754 enum machine_mode cc_mode;
4756 if (GET_MODE (op) != mode && mode != VOIDmode)
4759 if (GET_RTX_CLASS (code) != '<')
4762 cc_mode = GET_MODE (XEXP (op, 0));
4763 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
4766 validate_condition_mode (code, cc_mode);
4768 if (code == NE && cc_mode != CCFPmode)
4775 trap_comparison_operator (op, mode)
4777 enum machine_mode mode;
4779 if (mode != VOIDmode && mode != GET_MODE (op))
4781 return GET_RTX_CLASS (GET_CODE (op)) == '<';
4785 boolean_operator (op, mode)
4787 enum machine_mode mode ATTRIBUTE_UNUSED;
4789 enum rtx_code code = GET_CODE (op);
4790 return (code == AND || code == IOR || code == XOR);
4794 boolean_or_operator (op, mode)
4796 enum machine_mode mode ATTRIBUTE_UNUSED;
4798 enum rtx_code code = GET_CODE (op);
4799 return (code == IOR || code == XOR);
4803 min_max_operator (op, mode)
4805 enum machine_mode mode ATTRIBUTE_UNUSED;
4807 enum rtx_code code = GET_CODE (op);
4808 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
4811 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
4812 mask required to convert the result of a rotate insn into a shift
4813 left insn of SHIFTOP bits. Both are known to be CONST_INT. */
4816 includes_lshift_p (shiftop, andop)
4820 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
4822 shift_mask <<= INTVAL (shiftop);
4824 return (INTVAL (andop) & ~shift_mask) == 0;
4827 /* Similar, but for right shift. */
4830 includes_rshift_p (shiftop, andop)
4834 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
4836 shift_mask >>= INTVAL (shiftop);
4838 return (INTVAL (andop) & ~shift_mask) == 0;
4841 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
4842 to perform a left shift. It must have exactly SHIFTOP least
4843 signifigant 0's, then one or more 1's, then zero or more 0's. */
4846 includes_rldic_lshift_p (shiftop, andop)
4850 if (GET_CODE (andop) == CONST_INT)
4852 HOST_WIDE_INT c, lsb, shift_mask;
4855 if (c == 0 || c == ~0)
4859 shift_mask <<= INTVAL (shiftop);
4861 /* Find the least signifigant one bit. */
4864 /* It must coincide with the LSB of the shift mask. */
4865 if (-lsb != shift_mask)
4868 /* Invert to look for the next transition (if any). */
4871 /* Remove the low group of ones (originally low group of zeros). */
4874 /* Again find the lsb, and check we have all 1's above. */
4878 else if (GET_CODE (andop) == CONST_DOUBLE
4879 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
4881 HOST_WIDE_INT low, high, lsb;
4882 HOST_WIDE_INT shift_mask_low, shift_mask_high;
4884 low = CONST_DOUBLE_LOW (andop);
4885 if (HOST_BITS_PER_WIDE_INT < 64)
4886 high = CONST_DOUBLE_HIGH (andop);
4888 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
4889 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
4892 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
4894 shift_mask_high = ~0;
4895 if (INTVAL (shiftop) > 32)
4896 shift_mask_high <<= INTVAL (shiftop) - 32;
4900 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
4907 return high == -lsb;
4910 shift_mask_low = ~0;
4911 shift_mask_low <<= INTVAL (shiftop);
4915 if (-lsb != shift_mask_low)
4918 if (HOST_BITS_PER_WIDE_INT < 64)
4923 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
4926 return high == -lsb;
4930 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
4936 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
4937 to perform a left shift. It must have SHIFTOP or more least
4938 signifigant 0's, with the remainder of the word 1's. */
4941 includes_rldicr_lshift_p (shiftop, andop)
4945 if (GET_CODE (andop) == CONST_INT)
4947 HOST_WIDE_INT c, lsb, shift_mask;
4950 shift_mask <<= INTVAL (shiftop);
4953 /* Find the least signifigant one bit. */
4956 /* It must be covered by the shift mask.
4957 This test also rejects c == 0. */
4958 if ((lsb & shift_mask) == 0)
4961 /* Check we have all 1's above the transition, and reject all 1's. */
4962 return c == -lsb && lsb != 1;
4964 else if (GET_CODE (andop) == CONST_DOUBLE
4965 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
4967 HOST_WIDE_INT low, lsb, shift_mask_low;
4969 low = CONST_DOUBLE_LOW (andop);
4971 if (HOST_BITS_PER_WIDE_INT < 64)
4973 HOST_WIDE_INT high, shift_mask_high;
4975 high = CONST_DOUBLE_HIGH (andop);
4979 shift_mask_high = ~0;
4980 if (INTVAL (shiftop) > 32)
4981 shift_mask_high <<= INTVAL (shiftop) - 32;
4985 if ((lsb & shift_mask_high) == 0)
4988 return high == -lsb;
4994 shift_mask_low = ~0;
4995 shift_mask_low <<= INTVAL (shiftop);
4999 if ((lsb & shift_mask_low) == 0)
5002 return low == -lsb && lsb != 1;
5008 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5009 for lfq and stfq insns.
5011 Note reg1 and reg2 *must* be hard registers. To be sure we will
5012 abort if we are passed pseudo registers. */
5015 registers_ok_for_quad_peep (reg1, reg2)
5018 /* We might have been passed a SUBREG. */
5019 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5022 return (REGNO (reg1) == REGNO (reg2) - 1);
5025 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5026 addr1 and addr2 must be in consecutive memory locations
5027 (addr2 == addr1 + 8). */
5030 addrs_ok_for_quad_peep (addr1, addr2)
5037 /* Extract an offset (if used) from the first addr. */
5038 if (GET_CODE (addr1) == PLUS)
5040 /* If not a REG, return zero. */
5041 if (GET_CODE (XEXP (addr1, 0)) != REG)
5045 reg1 = REGNO (XEXP (addr1, 0));
5046 /* The offset must be constant! */
5047 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5049 offset1 = INTVAL (XEXP (addr1, 1));
5052 else if (GET_CODE (addr1) != REG)
5056 reg1 = REGNO (addr1);
5057 /* This was a simple (mem (reg)) expression. Offset is 0. */
5061 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5062 if (GET_CODE (addr2) != PLUS)
5065 if (GET_CODE (XEXP (addr2, 0)) != REG
5066 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5069 if (reg1 != REGNO (XEXP (addr2, 0)))
5072 /* The offset for the second addr must be 8 more than the first addr. */
5073 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5076 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5081 /* Return the register class of a scratch register needed to copy IN into
5082 or out of a register in CLASS in MODE. If it can be done directly,
5083 NO_REGS is returned. */
5086 secondary_reload_class (class, mode, in)
5087 enum reg_class class;
5088 enum machine_mode mode ATTRIBUTE_UNUSED;
5093 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5095 /* We cannot copy a symbolic operand directly into anything
5096 other than BASE_REGS for TARGET_ELF. So indicate that a
5097 register from BASE_REGS is needed as an intermediate
5100 On Darwin, pic addresses require a load from memory, which
5101 needs a base register. */
5102 if (class != BASE_REGS
5103 && (GET_CODE (in) == SYMBOL_REF
5104 || GET_CODE (in) == HIGH
5105 || GET_CODE (in) == LABEL_REF
5106 || GET_CODE (in) == CONST))
5110 if (GET_CODE (in) == REG)
5113 if (regno >= FIRST_PSEUDO_REGISTER)
5115 regno = true_regnum (in);
5116 if (regno >= FIRST_PSEUDO_REGISTER)
5120 else if (GET_CODE (in) == SUBREG)
5122 regno = true_regnum (in);
5123 if (regno >= FIRST_PSEUDO_REGISTER)
5129 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5131 if (class == GENERAL_REGS || class == BASE_REGS
5132 || (regno >= 0 && INT_REGNO_P (regno)))
5135 /* Constants, memory, and FP registers can go into FP registers. */
5136 if ((regno == -1 || FP_REGNO_P (regno))
5137 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5140 /* Memory, and AltiVec registers can go into AltiVec registers. */
5141 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5142 && class == ALTIVEC_REGS)
5145 /* We can copy among the CR registers. */
5146 if ((class == CR_REGS || class == CR0_REGS)
5147 && regno >= 0 && CR_REGNO_P (regno))
5150 /* Otherwise, we need GENERAL_REGS. */
5151 return GENERAL_REGS;
5154 /* Given a comparison operation, return the bit number in CCR to test. We
5155 know this is a valid comparison.
5157 SCC_P is 1 if this is for an scc. That means that %D will have been
5158 used instead of %C, so the bits will be in different places.
5160 Return -1 if OP isn't a valid comparison for some reason. */
5167 enum rtx_code code = GET_CODE (op);
5168 enum machine_mode cc_mode;
5173 if (GET_RTX_CLASS (code) != '<')
5178 if (GET_CODE (reg) != REG
5179 || ! CR_REGNO_P (REGNO (reg)))
5182 cc_mode = GET_MODE (reg);
5183 cc_regnum = REGNO (reg);
5184 base_bit = 4 * (cc_regnum - CR0_REGNO);
5186 validate_condition_mode (code, cc_mode);
5191 return scc_p ? base_bit + 3 : base_bit + 2;
5193 return base_bit + 2;
5194 case GT: case GTU: case UNLE:
5195 return base_bit + 1;
5196 case LT: case LTU: case UNGE:
5198 case ORDERED: case UNORDERED:
5199 return base_bit + 3;
5202 /* If scc, we will have done a cror to put the bit in the
5203 unordered position. So test that bit. For integer, this is ! LT
5204 unless this is an scc insn. */
5205 return scc_p ? base_bit + 3 : base_bit;
5208 return scc_p ? base_bit + 3 : base_bit + 1;
5215 /* Return the GOT register. */
5218 rs6000_got_register (value)
5219 rtx value ATTRIBUTE_UNUSED;
5221 /* The second flow pass currently (June 1999) can't update
5222 regs_ever_live without disturbing other parts of the compiler, so
5223 update it here to make the prolog/epilogue code happy. */
5224 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5225 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
5227 current_function_uses_pic_offset_table = 1;
5229 return pic_offset_table_rtx;
5232 /* Functions to init, mark and free struct machine_function.
5233 These will be called, via pointer variables,
5234 from push_function_context and pop_function_context. */
5237 rs6000_init_machine_status (p)
5240 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
5244 rs6000_free_machine_status (p)
5247 if (p->machine == NULL)
5255 /* Print an operand. Recognize special options, documented below. */
5258 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
5259 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
5261 #define SMALL_DATA_RELOC "sda21"
5262 #define SMALL_DATA_REG 0
5266 print_operand (file, x, code)
5274 /* These macros test for integers and extract the low-order bits. */
5276 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
5277 && GET_MODE (X) == VOIDmode)
5279 #define INT_LOWPART(X) \
5280 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
5285 /* Write out an instruction after the call which may be replaced
5286 with glue code by the loader. This depends on the AIX version. */
5287 asm_fprintf (file, RS6000_CALL_GLUE);
5290 /* %a is output_address. */
5293 /* If X is a constant integer whose low-order 5 bits are zero,
5294 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
5295 in the AIX assembler where "sri" with a zero shift count
5296 write a trash instruction. */
5297 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
5304 /* If constant, low-order 16 bits of constant, unsigned.
5305 Otherwise, write normally. */
5307 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
5309 print_operand (file, x, 0);
5313 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
5314 for 64-bit mask direction. */
5315 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
5318 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
5322 /* There used to be a comment for 'C' reading "This is an
5323 optional cror needed for certain floating-point
5324 comparisons. Otherwise write nothing." */
5326 /* Similar, except that this is for an scc, so we must be able to
5327 encode the test in a single bit that is one. We do the above
5328 for any LE, GE, GEU, or LEU and invert the bit for NE. */
5329 if (GET_CODE (x) == LE || GET_CODE (x) == GE
5330 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
5332 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
5334 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
5336 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
5339 else if (GET_CODE (x) == NE)
5341 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
5343 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
5344 base_bit + 2, base_bit + 2);
5349 /* X is a CR register. Print the number of the EQ bit of the CR */
5350 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5351 output_operand_lossage ("invalid %%E value");
5353 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
5357 /* X is a CR register. Print the shift count needed to move it
5358 to the high-order four bits. */
5359 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5360 output_operand_lossage ("invalid %%f value");
5362 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
5366 /* Similar, but print the count for the rotate in the opposite
5368 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5369 output_operand_lossage ("invalid %%F value");
5371 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
5375 /* X is a constant integer. If it is negative, print "m",
5376 otherwise print "z". This is to make a aze or ame insn. */
5377 if (GET_CODE (x) != CONST_INT)
5378 output_operand_lossage ("invalid %%G value");
5379 else if (INTVAL (x) >= 0)
5386 /* If constant, output low-order five bits. Otherwise, write
5389 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
5391 print_operand (file, x, 0);
5395 /* If constant, output low-order six bits. Otherwise, write
5398 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
5400 print_operand (file, x, 0);
5404 /* Print `i' if this is a constant, else nothing. */
5410 /* Write the bit number in CCR for jump. */
5413 output_operand_lossage ("invalid %%j code");
5415 fprintf (file, "%d", i);
5419 /* Similar, but add one for shift count in rlinm for scc and pass
5420 scc flag to `ccr_bit'. */
5423 output_operand_lossage ("invalid %%J code");
5425 /* If we want bit 31, write a shift count of zero, not 32. */
5426 fprintf (file, "%d", i == 31 ? 0 : i + 1);
5430 /* X must be a constant. Write the 1's complement of the
5433 output_operand_lossage ("invalid %%k value");
5435 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
5439 /* X must be a symbolic constant on ELF. Write an
5440 expression suitable for an 'addi' that adds in the low 16
5442 if (GET_CODE (x) != CONST)
5444 print_operand_address (file, x);
5449 if (GET_CODE (XEXP (x, 0)) != PLUS
5450 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
5451 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
5452 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
5453 output_operand_lossage ("invalid %%K value");
5454 print_operand_address (file, XEXP (XEXP (x, 0), 0));
5456 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
5460 /* %l is output_asm_label. */
5463 /* Write second word of DImode or DFmode reference. Works on register
5464 or non-indexed memory only. */
5465 if (GET_CODE (x) == REG)
5466 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
5467 else if (GET_CODE (x) == MEM)
5469 /* Handle possible auto-increment. Since it is pre-increment and
5470 we have already done it, we can just use an offset of word. */
5471 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5472 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5473 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
5476 output_address (XEXP (adjust_address_nv (x, SImode,
5480 if (small_data_operand (x, GET_MODE (x)))
5481 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5482 reg_names[SMALL_DATA_REG]);
5487 /* MB value for a mask operand. */
5488 if (! mask_operand (x, VOIDmode))
5489 output_operand_lossage ("invalid %%m value");
5491 val = INT_LOWPART (x);
5493 /* If the high bit is set and the low bit is not, the value is zero.
5494 If the high bit is zero, the value is the first 1 bit we find from
5496 if ((val & 0x80000000) && ((val & 1) == 0))
5501 else if ((val & 0x80000000) == 0)
5503 for (i = 1; i < 32; i++)
5504 if ((val <<= 1) & 0x80000000)
5506 fprintf (file, "%d", i);
5510 /* Otherwise, look for the first 0 bit from the right. The result is its
5511 number plus 1. We know the low-order bit is one. */
5512 for (i = 0; i < 32; i++)
5513 if (((val >>= 1) & 1) == 0)
5516 /* If we ended in ...01, i would be 0. The correct value is 31, so
5518 fprintf (file, "%d", 31 - i);
5522 /* ME value for a mask operand. */
5523 if (! mask_operand (x, VOIDmode))
5524 output_operand_lossage ("invalid %%M value");
5526 val = INT_LOWPART (x);
5528 /* If the low bit is set and the high bit is not, the value is 31.
5529 If the low bit is zero, the value is the first 1 bit we find from
5531 if ((val & 1) && ((val & 0x80000000) == 0))
5536 else if ((val & 1) == 0)
5538 for (i = 0; i < 32; i++)
5539 if ((val >>= 1) & 1)
5542 /* If we had ....10, i would be 0. The result should be
5543 30, so we need 30 - i. */
5544 fprintf (file, "%d", 30 - i);
5548 /* Otherwise, look for the first 0 bit from the left. The result is its
5549 number minus 1. We know the high-order bit is one. */
5550 for (i = 0; i < 32; i++)
5551 if (((val <<= 1) & 0x80000000) == 0)
5554 fprintf (file, "%d", i);
5557 /* %n outputs the negative of its operand. */
5560 /* Write the number of elements in the vector times 4. */
5561 if (GET_CODE (x) != PARALLEL)
5562 output_operand_lossage ("invalid %%N value");
5564 fprintf (file, "%d", XVECLEN (x, 0) * 4);
5568 /* Similar, but subtract 1 first. */
5569 if (GET_CODE (x) != PARALLEL)
5570 output_operand_lossage ("invalid %%O value");
5572 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
5576 /* X is a CONST_INT that is a power of two. Output the logarithm. */
5578 || INT_LOWPART (x) < 0
5579 || (i = exact_log2 (INT_LOWPART (x))) < 0)
5580 output_operand_lossage ("invalid %%p value");
5582 fprintf (file, "%d", i);
5586 /* The operand must be an indirect memory reference. The result
5587 is the register number. */
5588 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
5589 || REGNO (XEXP (x, 0)) >= 32)
5590 output_operand_lossage ("invalid %%P value");
5592 fprintf (file, "%d", REGNO (XEXP (x, 0)));
5596 /* This outputs the logical code corresponding to a boolean
5597 expression. The expression may have one or both operands
5598 negated (if one, only the first one). For condition register
5599 logical operations, it will also treat the negated
5600 CR codes as NOTs, but not handle NOTs of them. */
5602 const char *const *t = 0;
5604 enum rtx_code code = GET_CODE (x);
5605 static const char * const tbl[3][3] = {
5606 { "and", "andc", "nor" },
5607 { "or", "orc", "nand" },
5608 { "xor", "eqv", "xor" } };
5612 else if (code == IOR)
5614 else if (code == XOR)
5617 output_operand_lossage ("invalid %%q value");
5619 if (GET_CODE (XEXP (x, 0)) != NOT)
5623 if (GET_CODE (XEXP (x, 1)) == NOT)
5634 /* X is a CR register. Print the mask for `mtcrf'. */
5635 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
5636 output_operand_lossage ("invalid %%R value");
5638 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
5642 /* Low 5 bits of 32 - value */
5644 output_operand_lossage ("invalid %%s value");
5646 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
5650 /* PowerPC64 mask position. All 0's and all 1's are excluded.
5651 CONST_INT 32-bit mask is considered sign-extended so any
5652 transition must occur within the CONST_INT, not on the boundary. */
5653 if (! mask64_operand (x, VOIDmode))
5654 output_operand_lossage ("invalid %%S value");
5656 val = INT_LOWPART (x);
5658 if (val & 1) /* Clear Left */
5660 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5661 if (!((val >>= 1) & 1))
5664 #if HOST_BITS_PER_WIDE_INT == 32
5665 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5667 val = CONST_DOUBLE_HIGH (x);
5672 for (i = 32; i < 64; i++)
5673 if (!((val >>= 1) & 1))
5677 /* i = index of last set bit from right
5678 mask begins at 63 - i from left */
5680 output_operand_lossage ("%%S computed all 1's mask");
5682 fprintf (file, "%d", 63 - i);
5685 else /* Clear Right */
5687 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5688 if ((val >>= 1) & 1)
5691 #if HOST_BITS_PER_WIDE_INT == 32
5692 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5694 val = CONST_DOUBLE_HIGH (x);
5696 if (val == (HOST_WIDE_INT) -1)
5699 for (i = 32; i < 64; i++)
5700 if ((val >>= 1) & 1)
5704 /* i = index of last clear bit from right
5705 mask ends at 62 - i from left */
5707 output_operand_lossage ("%%S computed all 0's mask");
5709 fprintf (file, "%d", 62 - i);
5714 /* Print the symbolic name of a branch target register. */
5715 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
5716 && REGNO (x) != COUNT_REGISTER_REGNUM))
5717 output_operand_lossage ("invalid %%T value");
5718 else if (REGNO (x) == LINK_REGISTER_REGNUM)
5719 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
5721 fputs ("ctr", file);
5725 /* High-order 16 bits of constant for use in unsigned operand. */
5727 output_operand_lossage ("invalid %%u value");
5729 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
5730 (INT_LOWPART (x) >> 16) & 0xffff);
5734 /* High-order 16 bits of constant for use in signed operand. */
5736 output_operand_lossage ("invalid %%v value");
5738 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
5739 (INT_LOWPART (x) >> 16) & 0xffff);
5743 /* Print `u' if this has an auto-increment or auto-decrement. */
5744 if (GET_CODE (x) == MEM
5745 && (GET_CODE (XEXP (x, 0)) == PRE_INC
5746 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
5751 /* Print the trap code for this operand. */
5752 switch (GET_CODE (x))
5755 fputs ("eq", file); /* 4 */
5758 fputs ("ne", file); /* 24 */
5761 fputs ("lt", file); /* 16 */
5764 fputs ("le", file); /* 20 */
5767 fputs ("gt", file); /* 8 */
5770 fputs ("ge", file); /* 12 */
5773 fputs ("llt", file); /* 2 */
5776 fputs ("lle", file); /* 6 */
5779 fputs ("lgt", file); /* 1 */
5782 fputs ("lge", file); /* 5 */
5790 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
5793 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5794 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
5796 print_operand (file, x, 0);
5800 /* MB value for a PowerPC64 rldic operand. */
5801 val = (GET_CODE (x) == CONST_INT
5802 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
5807 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
5808 if ((val <<= 1) < 0)
5811 #if HOST_BITS_PER_WIDE_INT == 32
5812 if (GET_CODE (x) == CONST_INT && i >= 0)
5813 i += 32; /* zero-extend high-part was all 0's */
5814 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
5816 val = CONST_DOUBLE_LOW (x);
5823 for ( ; i < 64; i++)
5824 if ((val <<= 1) < 0)
5829 fprintf (file, "%d", i + 1);
5833 if (GET_CODE (x) == MEM
5834 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
5839 /* Like 'L', for third word of TImode */
5840 if (GET_CODE (x) == REG)
5841 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
5842 else if (GET_CODE (x) == MEM)
5844 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5845 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5846 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
5848 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
5849 if (small_data_operand (x, GET_MODE (x)))
5850 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5851 reg_names[SMALL_DATA_REG]);
5856 /* X is a SYMBOL_REF. Write out the name preceded by a
5857 period and without any trailing data in brackets. Used for function
5858 names. If we are configured for System V (or the embedded ABI) on
5859 the PowerPC, do not emit the period, since those systems do not use
5860 TOCs and the like. */
5861 if (GET_CODE (x) != SYMBOL_REF)
5864 if (XSTR (x, 0)[0] != '.')
5866 switch (DEFAULT_ABI)
5876 case ABI_AIX_NODESC:
5882 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
5884 assemble_name (file, XSTR (x, 0));
5889 /* Like 'L', for last word of TImode. */
5890 if (GET_CODE (x) == REG)
5891 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
5892 else if (GET_CODE (x) == MEM)
5894 if (GET_CODE (XEXP (x, 0)) == PRE_INC
5895 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
5896 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
5898 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
5899 if (small_data_operand (x, GET_MODE (x)))
5900 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5901 reg_names[SMALL_DATA_REG]);
5905 /* Print AltiVec memory operand. */
5910 if (GET_CODE (x) != MEM)
5915 if (GET_CODE (tmp) == REG)
5916 fprintf (file, "0, %s", reg_names[REGNO (tmp)]);
5917 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
5919 if (REGNO (XEXP (tmp, 0)) == 0)
5920 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
5921 reg_names[ REGNO (XEXP (tmp, 0)) ]);
5923 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
5924 reg_names[ REGNO (XEXP (tmp, 1)) ]);
5932 if (GET_CODE (x) == REG)
5933 fprintf (file, "%s", reg_names[REGNO (x)]);
5934 else if (GET_CODE (x) == MEM)
5936 /* We need to handle PRE_INC and PRE_DEC here, since we need to
5937 know the width from the mode. */
5938 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
5939 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
5940 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
5941 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
5942 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
5943 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
5945 output_address (XEXP (x, 0));
5948 output_addr_const (file, x);
5952 output_operand_lossage ("invalid %%xn code");
5956 /* Print the address of an operand. */
5959 print_operand_address (file, x)
5963 if (GET_CODE (x) == REG)
5964 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
5965 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
5966 || GET_CODE (x) == LABEL_REF)
5968 output_addr_const (file, x);
5969 if (small_data_operand (x, GET_MODE (x)))
5970 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
5971 reg_names[SMALL_DATA_REG]);
5972 else if (TARGET_TOC)
5975 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
5977 if (REGNO (XEXP (x, 0)) == 0)
5978 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
5979 reg_names[ REGNO (XEXP (x, 0)) ]);
5981 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
5982 reg_names[ REGNO (XEXP (x, 1)) ]);
5984 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
5986 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
5987 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
5990 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
5991 && CONSTANT_P (XEXP (x, 1)))
5993 output_addr_const (file, XEXP (x, 1));
5994 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
5998 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
5999 && CONSTANT_P (XEXP (x, 1)))
6001 fprintf (file, "lo16(");
6002 output_addr_const (file, XEXP (x, 1));
6003 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6006 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6008 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6010 rtx contains_minus = XEXP (x, 1);
6014 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6015 turn it into (sym) for output_addr_const. */
6016 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6017 contains_minus = XEXP (contains_minus, 0);
6019 minus = XEXP (contains_minus, 0);
6020 symref = XEXP (minus, 0);
6021 XEXP (contains_minus, 0) = symref;
6026 name = XSTR (symref, 0);
6027 newname = alloca (strlen (name) + sizeof ("@toc"));
6028 strcpy (newname, name);
6029 strcat (newname, "@toc");
6030 XSTR (symref, 0) = newname;
6032 output_addr_const (file, XEXP (x, 1));
6034 XSTR (symref, 0) = name;
6035 XEXP (contains_minus, 0) = minus;
6038 output_addr_const (file, XEXP (x, 1));
6040 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6046 /* Target hook for assembling integer objects. The powerpc version has
6047 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6048 is defined. It also needs to handle DI-mode objects on 64-bit
6052 rs6000_assemble_integer (x, size, aligned_p)
6057 #ifdef RELOCATABLE_NEEDS_FIXUP
6058 /* Special handling for SI values. */
6059 if (size == 4 && aligned_p)
6061 extern int in_toc_section PARAMS ((void));
6062 static int recurse = 0;
6064 /* For -mrelocatable, we mark all addresses that need to be fixed up
6065 in the .fixup section. */
6066 if (TARGET_RELOCATABLE
6067 && !in_toc_section ()
6068 && !in_text_section ()
6070 && GET_CODE (x) != CONST_INT
6071 && GET_CODE (x) != CONST_DOUBLE
6077 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6079 ASM_OUTPUT_LABEL (asm_out_file, buf);
6080 fprintf (asm_out_file, "\t.long\t(");
6081 output_addr_const (asm_out_file, x);
6082 fprintf (asm_out_file, ")@fixup\n");
6083 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6084 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6085 fprintf (asm_out_file, "\t.long\t");
6086 assemble_name (asm_out_file, buf);
6087 fprintf (asm_out_file, "\n\t.previous\n");
6091 /* Remove initial .'s to turn a -mcall-aixdesc function
6092 address into the address of the descriptor, not the function
6094 else if (GET_CODE (x) == SYMBOL_REF
6095 && XSTR (x, 0)[0] == '.'
6096 && DEFAULT_ABI == ABI_AIX)
6098 const char *name = XSTR (x, 0);
6099 while (*name == '.')
6102 fprintf (asm_out_file, "\t.long\t%s\n", name);
6106 #endif /* RELOCATABLE_NEEDS_FIXUP */
6107 return default_assemble_integer (x, size, aligned_p);
6111 rs6000_reverse_condition (mode, code)
6112 enum machine_mode mode;
6115 /* Reversal of FP compares takes care -- an ordered compare
6116 becomes an unordered compare and vice versa. */
6117 if (mode == CCFPmode)
6118 return reverse_condition_maybe_unordered (code);
6120 return reverse_condition (code);
6123 /* Generate a compare for CODE. Return a brand-new rtx that
6124 represents the result of the compare. */
6127 rs6000_generate_compare (code)
6130 enum machine_mode comp_mode;
6133 if (rs6000_compare_fp_p)
6134 comp_mode = CCFPmode;
6135 else if (code == GTU || code == LTU
6136 || code == GEU || code == LEU)
6137 comp_mode = CCUNSmode;
6141 /* First, the compare. */
6142 compare_result = gen_reg_rtx (comp_mode);
6143 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6144 gen_rtx_COMPARE (comp_mode,
6146 rs6000_compare_op1)));
6148 /* Some kinds of FP comparisons need an OR operation;
6149 except for flag_unsafe_math_optimizations we don't bother. */
6150 if (rs6000_compare_fp_p
6151 && ! flag_unsafe_math_optimizations
6152 && (code == LE || code == GE
6153 || code == UNEQ || code == LTGT
6154 || code == UNGT || code == UNLT))
6156 enum rtx_code or1, or2;
6157 rtx or1_rtx, or2_rtx, compare2_rtx;
6158 rtx or_result = gen_reg_rtx (CCEQmode);
6162 case LE: or1 = LT; or2 = EQ; break;
6163 case GE: or1 = GT; or2 = EQ; break;
6164 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
6165 case LTGT: or1 = LT; or2 = GT; break;
6166 case UNGT: or1 = UNORDERED; or2 = GT; break;
6167 case UNLT: or1 = UNORDERED; or2 = LT; break;
6170 validate_condition_mode (or1, comp_mode);
6171 validate_condition_mode (or2, comp_mode);
6172 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
6173 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
6174 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
6175 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
6177 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
6179 compare_result = or_result;
6183 validate_condition_mode (code, GET_MODE (compare_result));
6185 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
6189 /* Emit the RTL for an sCOND pattern. */
6192 rs6000_emit_sCOND (code, result)
6197 enum machine_mode op_mode;
6199 condition_rtx = rs6000_generate_compare (code);
6201 op_mode = GET_MODE (rs6000_compare_op0);
6202 if (op_mode == VOIDmode)
6203 op_mode = GET_MODE (rs6000_compare_op1);
6205 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
6207 PUT_MODE (condition_rtx, DImode);
6208 convert_move (result, condition_rtx, 0);
6212 PUT_MODE (condition_rtx, SImode);
6213 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
6217 /* Emit a branch of kind CODE to location LOC. */
6220 rs6000_emit_cbranch (code, loc)
6224 rtx condition_rtx, loc_ref;
6226 condition_rtx = rs6000_generate_compare (code);
6227 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
6228 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
6229 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
6233 /* Return the string to output a conditional branch to LABEL, which is
6234 the operand number of the label, or -1 if the branch is really a
6237 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
6238 condition code register and its mode specifies what kind of
6241 REVERSED is non-zero if we should reverse the sense of the comparison.
6243 INSN is the insn. */
6246 output_cbranch (op, label, reversed, insn)
6252 static char string[64];
6253 enum rtx_code code = GET_CODE (op);
6254 rtx cc_reg = XEXP (op, 0);
6255 enum machine_mode mode = GET_MODE (cc_reg);
6256 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
6257 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
6258 int really_reversed = reversed ^ need_longbranch;
6264 validate_condition_mode (code, mode);
6266 /* Work out which way this really branches. We could use
6267 reverse_condition_maybe_unordered here always but this
6268 makes the resulting assembler clearer. */
6269 if (really_reversed)
6270 code = rs6000_reverse_condition (mode, code);
6274 /* Not all of these are actually distinct opcodes, but
6275 we distinguish them for clarity of the resulting assembler. */
6277 ccode = "ne"; break;
6279 ccode = "eq"; break;
6281 ccode = "ge"; break;
6282 case GT: case GTU: case UNGT:
6283 ccode = "gt"; break;
6285 ccode = "le"; break;
6286 case LT: case LTU: case UNLT:
6287 ccode = "lt"; break;
6288 case UNORDERED: ccode = "un"; break;
6289 case ORDERED: ccode = "nu"; break;
6290 case UNGE: ccode = "nl"; break;
6291 case UNLE: ccode = "ng"; break;
6296 /* Maybe we have a guess as to how likely the branch is.
6297 The old mnemonics don't have a way to specify this information. */
6298 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
6299 if (note != NULL_RTX)
6301 /* PROB is the difference from 50%. */
6302 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
6304 /* For branches that are very close to 50%, assume not-taken. */
6305 if (abs (prob) > REG_BR_PROB_BASE / 20
6306 && ((prob > 0) ^ need_longbranch))
6315 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
6317 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
6319 /* We need to escape any '%' characters in the reg_names string.
6320 Assume they'd only be the first character... */
6321 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
6323 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
6327 /* If the branch distance was too far, we may have to use an
6328 unconditional branch to go the distance. */
6329 if (need_longbranch)
6330 s += sprintf (s, ",$+8\n\tb %s", label);
6332 s += sprintf (s, ",%s", label);
6338 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
6339 operands of the last comparison is nonzero/true, FALSE_COND if it
6340 is zero/false. Return 0 if the hardware has no such operation. */
6343 rs6000_emit_cmove (dest, op, true_cond, false_cond)
6349 enum rtx_code code = GET_CODE (op);
6350 rtx op0 = rs6000_compare_op0;
6351 rtx op1 = rs6000_compare_op1;
6353 enum machine_mode mode = GET_MODE (op0);
6356 /* First, work out if the hardware can do this at all, or
6357 if it's too slow... */
6358 /* If the comparison is an integer one, since we only have fsel
6359 it'll be cheaper to use a branch. */
6360 if (! rs6000_compare_fp_p)
6363 /* Eliminate half of the comparisons by switching operands, this
6364 makes the remaining code simpler. */
6365 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
6366 || code == LTGT || code == LT)
6368 code = reverse_condition_maybe_unordered (code);
6370 true_cond = false_cond;
6374 /* UNEQ and LTGT take four instructions for a comparison with zero,
6375 it'll probably be faster to use a branch here too. */
6379 if (GET_CODE (op1) == CONST_DOUBLE)
6380 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
6382 /* We're going to try to implement comparions by performing
6383 a subtract, then comparing against zero. Unfortunately,
6384 Inf - Inf is NaN which is not zero, and so if we don't
6385 know that the the operand is finite and the comparison
6386 would treat EQ different to UNORDERED, we can't do it. */
6387 if (! flag_unsafe_math_optimizations
6388 && code != GT && code != UNGE
6389 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
6390 /* Constructs of the form (a OP b ? a : b) are safe. */
6391 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
6392 || (! rtx_equal_p (op0, true_cond)
6393 && ! rtx_equal_p (op1, true_cond))))
6395 /* At this point we know we can use fsel. */
6397 /* Reduce the comparison to a comparison against zero. */
6398 temp = gen_reg_rtx (mode);
6399 emit_insn (gen_rtx_SET (VOIDmode, temp,
6400 gen_rtx_MINUS (mode, op0, op1)));
6402 op1 = CONST0_RTX (mode);
6404 /* If we don't care about NaNs we can reduce some of the comparisons
6405 down to faster ones. */
6406 if (flag_unsafe_math_optimizations)
6412 true_cond = false_cond;
6425 /* Now, reduce everything down to a GE. */
6432 temp = gen_reg_rtx (mode);
6433 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6438 temp = gen_reg_rtx (mode);
6439 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
6444 temp = gen_reg_rtx (mode);
6445 emit_insn (gen_rtx_SET (VOIDmode, temp,
6447 gen_rtx_ABS (mode, op0))));
6452 temp = gen_reg_rtx (mode);
6453 emit_insn (gen_rtx_SET (VOIDmode, temp,
6454 gen_rtx_IF_THEN_ELSE (mode,
6455 gen_rtx_GE (VOIDmode,
6457 true_cond, false_cond)));
6459 true_cond = false_cond;
6461 temp = gen_reg_rtx (mode);
6462 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6467 temp = gen_reg_rtx (mode);
6468 emit_insn (gen_rtx_SET (VOIDmode, temp,
6469 gen_rtx_IF_THEN_ELSE (mode,
6470 gen_rtx_GE (VOIDmode,
6472 true_cond, false_cond)));
6474 false_cond = true_cond;
6476 temp = gen_reg_rtx (mode);
6477 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
6485 emit_insn (gen_rtx_SET (VOIDmode, dest,
6486 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
6487 gen_rtx_GE (VOIDmode,
6489 true_cond, false_cond)));
6494 rs6000_emit_minmax (dest, code, op0, op1)
6500 enum machine_mode mode = GET_MODE (op0);
6502 if (code == SMAX || code == UMAX)
6503 target = emit_conditional_move (dest, GE, op0, op1, mode,
6506 target = emit_conditional_move (dest, GE, op0, op1, mode,
6508 if (target == NULL_RTX)
6511 emit_move_insn (dest, target);
6514 /* This page contains routines that are used to determine what the
6515 function prologue and epilogue code will do and write them out. */
6517 /* Return the first fixed-point register that is required to be
6518 saved. 32 if none. */
6521 first_reg_to_save ()
6525 /* Find lowest numbered live register. */
6526 for (first_reg = 13; first_reg <= 31; first_reg++)
6527 if (regs_ever_live[first_reg]
6528 && (! call_used_regs[first_reg]
6529 || (first_reg == PIC_OFFSET_TABLE_REGNUM
6530 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
6531 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
6536 /* AIX must save/restore every register that contains a parameter
6537 before/after the .__mcount call plus an additional register
6538 for the static chain, if needed; use registers from 30 down to 22
6540 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
6542 int last_parm_reg, profile_first_reg;
6544 /* Figure out last used parameter register. The proper thing
6545 to do is to walk incoming args of the function. A function
6546 might have live parameter registers even if it has no
6548 for (last_parm_reg = 10;
6549 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
6553 /* Calculate first reg for saving parameter registers
6555 Skip reg 31 which may contain the frame pointer. */
6556 profile_first_reg = (33 - last_parm_reg
6557 - (current_function_needs_context ? 1 : 0));
6559 /* Need to skip another reg to account for R31 being PICBASE
6560 (when flag_pic is set) or R30 being used as the frame
6561 pointer (when flag_pic is not set). */
6562 --profile_first_reg;
6564 /* Do not save frame pointer if no parameters needs to be saved. */
6565 if (profile_first_reg == 31)
6566 profile_first_reg = 32;
6568 if (first_reg > profile_first_reg)
6569 first_reg = profile_first_reg;
6572 /* SVR4 may need one register to preserve the static chain. */
6573 else if (current_function_needs_context)
6575 /* Skip reg 31 which may contain the frame pointer. */
6582 if (flag_pic && current_function_uses_pic_offset_table &&
6583 (first_reg > PIC_OFFSET_TABLE_REGNUM))
6584 return PIC_OFFSET_TABLE_REGNUM;
6590 /* Similar, for FP regs. */
6593 first_fp_reg_to_save ()
6597 /* Find lowest numbered live register. */
6598 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
6599 if (regs_ever_live[first_reg])
6605 /* Similar, for AltiVec regs. */
6608 first_altivec_reg_to_save ()
6612 /* Stack frame remains as is unless we are in AltiVec ABI. */
6613 if (! TARGET_ALTIVEC_ABI)
6614 return LAST_ALTIVEC_REGNO + 1;
6616 /* Find lowest numbered live register. */
6617 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
6618 if (regs_ever_live[i])
6624 /* Return a 32-bit mask of the AltiVec registers we need to set in
6625 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
6626 the 32-bit word is 0. */
6629 compute_vrsave_mask ()
6631 unsigned int i, mask = 0;
6633 /* First, find out if we use _any_ altivec registers. */
6634 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6635 if (regs_ever_live[i])
6636 mask |= ALTIVEC_REG_BIT (i);
6641 /* Next, add all registers that are call-clobbered. We do this
6642 because post-reload register optimizers such as regrename_optimize
6643 may choose to use them. They never change the register class
6644 chosen by reload, so cannot create new uses of altivec registers
6645 if there were none before, so the early exit above is safe. */
6646 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
6647 altivec registers not saved in the mask, which might well make the
6648 adjustments below more effective in eliding the save/restore of
6649 VRSAVE in small functions. */
6650 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
6651 if (call_used_regs[i])
6652 mask |= ALTIVEC_REG_BIT (i);
6654 /* Next, remove the argument registers from the set. These must
6655 be in the VRSAVE mask set by the caller, so we don't need to add
6656 them in again. More importantly, the mask we compute here is
6657 used to generate CLOBBERs in the set_vrsave insn, and we do not
6658 wish the argument registers to die. */
6659 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
6660 mask &= ~ALTIVEC_REG_BIT (i);
6662 /* Similarly, remove the return value from the set. */
6665 diddle_return_value (is_altivec_return_reg, &yes);
6667 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
6674 is_altivec_return_reg (reg, xyes)
6678 bool *yes = (bool *) xyes;
6679 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
6684 /* Calculate the stack information for the current function. This is
6685 complicated by having two separate calling sequences, the AIX calling
6686 sequence and the V.4 calling sequence.
6688 AIX (and Darwin/Mac OS X) stack frames look like:
6690 SP----> +---------------------------------------+
6691 | back chain to caller | 0 0
6692 +---------------------------------------+
6693 | saved CR | 4 8 (8-11)
6694 +---------------------------------------+
6696 +---------------------------------------+
6697 | reserved for compilers | 12 24
6698 +---------------------------------------+
6699 | reserved for binders | 16 32
6700 +---------------------------------------+
6701 | saved TOC pointer | 20 40
6702 +---------------------------------------+
6703 | Parameter save area (P) | 24 48
6704 +---------------------------------------+
6705 | Alloca space (A) | 24+P etc.
6706 +---------------------------------------+
6707 | Local variable space (L) | 24+P+A
6708 +---------------------------------------+
6709 | Float/int conversion temporary (X) | 24+P+A+L
6710 +---------------------------------------+
6711 | Save area for AltiVec registers (W) | 24+P+A+L+X
6712 +---------------------------------------+
6713 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
6714 +---------------------------------------+
6715 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
6716 +---------------------------------------+
6717 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
6718 +---------------------------------------+
6719 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
6720 +---------------------------------------+
6721 old SP->| back chain to caller's caller |
6722 +---------------------------------------+
6724 The required alignment for AIX configurations is two words (i.e., 8
6728 V.4 stack frames look like:
6730 SP----> +---------------------------------------+
6731 | back chain to caller | 0
6732 +---------------------------------------+
6733 | caller's saved LR | 4
6734 +---------------------------------------+
6735 | Parameter save area (P) | 8
6736 +---------------------------------------+
6737 | Alloca space (A) | 8+P
6738 +---------------------------------------+
6739 | Varargs save area (V) | 8+P+A
6740 +---------------------------------------+
6741 | Local variable space (L) | 8+P+A+V
6742 +---------------------------------------+
6743 | Float/int conversion temporary (X) | 8+P+A+V+L
6744 +---------------------------------------+
6745 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
6746 +---------------------------------------+
6747 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
6748 +---------------------------------------+
6749 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
6750 +---------------------------------------+
6751 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
6752 +---------------------------------------+
6753 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
6754 +---------------------------------------+
6755 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
6756 +---------------------------------------+
6757 old SP->| back chain to caller's caller |
6758 +---------------------------------------+
6760 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
6761 given. (But note below and in sysv4.h that we require only 8 and
6762 may round up the size of our stack frame anyways. The historical
6763 reason is early versions of powerpc-linux which didn't properly
6764 align the stack at program startup. A happy side-effect is that
6765 -mno-eabi libraries can be used with -meabi programs.)
6767 The EABI configuration defaults to the V.4 layout, unless
6768 -mcall-aix is used, in which case the AIX layout is used. However,
6769 the stack alignment requirements may differ. If -mno-eabi is not
6770 given, the required stack alignment is 8 bytes; if -mno-eabi is
6771 given, the required alignment is 16 bytes. (But see V.4 comment
6774 #ifndef ABI_STACK_BOUNDARY
6775 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
6779 rs6000_stack_info ()
6781 static rs6000_stack_t info, zero_info;
6782 rs6000_stack_t *info_ptr = &info;
6783 int reg_size = TARGET_POWERPC64 ? 8 : 4;
6784 enum rs6000_abi abi;
6788 /* Zero all fields portably. */
6791 /* Select which calling sequence. */
6792 info_ptr->abi = abi = DEFAULT_ABI;
6794 /* Calculate which registers need to be saved & save area size. */
6795 info_ptr->first_gp_reg_save = first_reg_to_save ();
6796 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
6797 even if it currently looks like we won't. */
6798 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
6799 || (flag_pic == 1 && abi == ABI_V4)
6800 || (flag_pic && abi == ABI_DARWIN))
6801 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
6802 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
6804 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
6806 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
6807 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
6809 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
6810 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
6811 - info_ptr->first_altivec_reg_save);
6813 /* Does this function call anything? */
6814 info_ptr->calls_p = (! current_function_is_leaf
6815 || cfun->machine->ra_needs_full_frame);
6817 /* Determine if we need to save the link register. */
6818 if (rs6000_ra_ever_killed ()
6819 || (DEFAULT_ABI == ABI_AIX && profile_flag)
6820 #ifdef TARGET_RELOCATABLE
6821 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
6823 || (info_ptr->first_fp_reg_save != 64
6824 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
6825 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
6826 || (abi == ABI_V4 && current_function_calls_alloca)
6827 || (DEFAULT_ABI == ABI_DARWIN
6829 && current_function_uses_pic_offset_table)
6830 || info_ptr->calls_p)
6832 info_ptr->lr_save_p = 1;
6833 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
6836 /* Determine if we need to save the condition code registers. */
6837 if (regs_ever_live[CR2_REGNO]
6838 || regs_ever_live[CR3_REGNO]
6839 || regs_ever_live[CR4_REGNO])
6841 info_ptr->cr_save_p = 1;
6843 info_ptr->cr_size = reg_size;
6846 /* If the current function calls __builtin_eh_return, then we need
6847 to allocate stack space for registers that will hold data for
6848 the exception handler. */
6849 if (current_function_calls_eh_return)
6852 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
6854 ehrd_size = i * UNITS_PER_WORD;
6859 /* Determine various sizes. */
6860 info_ptr->reg_size = reg_size;
6861 info_ptr->fixed_size = RS6000_SAVE_AREA;
6862 info_ptr->varargs_size = RS6000_VARARGS_AREA;
6863 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
6864 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
6867 if (TARGET_ALTIVEC_ABI)
6869 info_ptr->vrsave_mask = compute_vrsave_mask ();
6870 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
6874 info_ptr->vrsave_mask = 0;
6875 info_ptr->vrsave_size = 0;
6878 /* Calculate the offsets. */
6886 case ABI_AIX_NODESC:
6888 info_ptr->fp_save_offset = - info_ptr->fp_size;
6889 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
6891 if (TARGET_ALTIVEC_ABI)
6893 info_ptr->vrsave_save_offset
6894 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
6896 /* Align stack so vector save area is on a quadword boundary. */
6897 if (info_ptr->altivec_size != 0)
6898 info_ptr->altivec_padding_size
6899 = 16 - (-info_ptr->vrsave_save_offset % 16);
6901 info_ptr->altivec_padding_size = 0;
6903 info_ptr->altivec_save_offset
6904 = info_ptr->vrsave_save_offset
6905 - info_ptr->altivec_padding_size
6906 - info_ptr->altivec_size;
6908 /* Adjust for AltiVec case. */
6909 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
6912 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
6913 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
6914 info_ptr->lr_save_offset = 2*reg_size;
6918 info_ptr->fp_save_offset = - info_ptr->fp_size;
6919 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
6920 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
6922 if (TARGET_ALTIVEC_ABI)
6924 info_ptr->vrsave_save_offset
6925 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
6927 /* Align stack so vector save area is on a quadword boundary. */
6928 if (info_ptr->altivec_size != 0)
6929 info_ptr->altivec_padding_size
6930 = 16 - (-info_ptr->vrsave_save_offset % 16);
6932 info_ptr->altivec_padding_size = 0;
6934 info_ptr->altivec_save_offset
6935 = info_ptr->vrsave_save_offset
6936 - info_ptr->altivec_padding_size
6937 - info_ptr->altivec_size;
6939 /* Adjust for AltiVec case. */
6940 info_ptr->toc_save_offset
6941 = info_ptr->altivec_save_offset - info_ptr->toc_size;
6944 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
6945 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
6946 info_ptr->lr_save_offset = reg_size;
6950 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
6952 + info_ptr->altivec_size
6953 + info_ptr->altivec_padding_size
6954 + info_ptr->vrsave_size
6958 + info_ptr->vrsave_size
6959 + info_ptr->toc_size,
6960 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
6963 total_raw_size = (info_ptr->vars_size
6964 + info_ptr->parm_size
6965 + info_ptr->save_size
6966 + info_ptr->varargs_size
6967 + info_ptr->fixed_size);
6969 info_ptr->total_size =
6970 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
6972 /* Determine if we need to allocate any stack frame:
6974 For AIX we need to push the stack if a frame pointer is needed
6975 (because the stack might be dynamically adjusted), if we are
6976 debugging, if we make calls, or if the sum of fp_save, gp_save,
6977 and local variables are more than the space needed to save all
6978 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
6979 + 18*8 = 288 (GPR13 reserved).
6981 For V.4 we don't have the stack cushion that AIX uses, but assume
6982 that the debugger can handle stackless frames. */
6984 if (info_ptr->calls_p)
6985 info_ptr->push_p = 1;
6987 else if (abi == ABI_V4)
6988 info_ptr->push_p = (total_raw_size > info_ptr->fixed_size
6989 || info_ptr->calls_p);
6992 info_ptr->push_p = (frame_pointer_needed
6993 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
6994 || ((total_raw_size - info_ptr->fixed_size)
6995 > (TARGET_32BIT ? 220 : 288)));
6997 /* Zero offsets if we're not saving those registers. */
6998 if (info_ptr->fp_size == 0)
6999 info_ptr->fp_save_offset = 0;
7001 if (info_ptr->gp_size == 0)
7002 info_ptr->gp_save_offset = 0;
7004 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7005 info_ptr->altivec_save_offset = 0;
7007 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7008 info_ptr->vrsave_save_offset = 0;
7010 if (! info_ptr->lr_save_p)
7011 info_ptr->lr_save_offset = 0;
7013 if (! info_ptr->cr_save_p)
7014 info_ptr->cr_save_offset = 0;
7016 if (! info_ptr->toc_save_p)
7017 info_ptr->toc_save_offset = 0;
7023 debug_stack_info (info)
7024 rs6000_stack_t *info;
7026 const char *abi_string;
7029 info = rs6000_stack_info ();
7031 fprintf (stderr, "\nStack information for function %s:\n",
7032 ((current_function_decl && DECL_NAME (current_function_decl))
7033 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7038 default: abi_string = "Unknown"; break;
7039 case ABI_NONE: abi_string = "NONE"; break;
7041 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7042 case ABI_DARWIN: abi_string = "Darwin"; break;
7043 case ABI_V4: abi_string = "V.4"; break;
7046 fprintf (stderr, "\tABI = %5s\n", abi_string);
7048 if (TARGET_ALTIVEC_ABI)
7049 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7051 if (info->first_gp_reg_save != 32)
7052 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7054 if (info->first_fp_reg_save != 64)
7055 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7057 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7058 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7059 info->first_altivec_reg_save);
7061 if (info->lr_save_p)
7062 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7064 if (info->cr_save_p)
7065 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7067 if (info->toc_save_p)
7068 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7070 if (info->vrsave_mask)
7071 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7074 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7077 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7079 if (info->gp_save_offset)
7080 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7082 if (info->fp_save_offset)
7083 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7085 if (info->altivec_save_offset)
7086 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7087 info->altivec_save_offset);
7089 if (info->vrsave_save_offset)
7090 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7091 info->vrsave_save_offset);
7093 if (info->lr_save_offset)
7094 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7096 if (info->cr_save_offset)
7097 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7099 if (info->toc_save_offset)
7100 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7102 if (info->varargs_save_offset)
7103 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7105 if (info->total_size)
7106 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7108 if (info->varargs_size)
7109 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7111 if (info->vars_size)
7112 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7114 if (info->parm_size)
7115 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7117 if (info->fixed_size)
7118 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7121 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7124 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7126 if (info->altivec_size)
7127 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
7129 if (info->vrsave_size)
7130 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
7132 if (info->altivec_padding_size)
7133 fprintf (stderr, "\taltivec_padding_size= %5d\n",
7134 info->altivec_padding_size);
7137 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
7140 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
7143 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
7145 if (info->save_size)
7146 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
7148 if (info->reg_size != 4)
7149 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
7151 fprintf (stderr, "\n");
7155 rs6000_return_addr (count, frame)
7159 /* Currently we don't optimize very well between prolog and body
7160 code and for PIC code the code can be actually quite bad, so
7161 don't try to be too clever here. */
7164 || DEFAULT_ABI == ABI_AIX
7165 || DEFAULT_ABI == ABI_AIX_NODESC)
7167 cfun->machine->ra_needs_full_frame = 1;
7174 plus_constant (copy_to_reg
7175 (gen_rtx_MEM (Pmode,
7176 memory_address (Pmode, frame))),
7177 RETURN_ADDRESS_OFFSET)));
7180 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
7184 rs6000_ra_ever_killed ()
7188 #ifdef ASM_OUTPUT_MI_THUNK
7189 if (current_function_is_thunk)
7192 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
7193 || cfun->machine->ra_needs_full_frame)
7194 return regs_ever_live[LINK_REGISTER_REGNUM];
7196 push_topmost_sequence ();
7198 pop_topmost_sequence ();
7200 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
7204 /* Add a REG_MAYBE_DEAD note to the insn. */
7206 rs6000_maybe_dead (insn)
7209 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
7214 /* Emit instructions needed to load the TOC register.
7215 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
7216 a constant pool; or for SVR4 -fpic. */
7219 rs6000_emit_load_toc_table (fromprolog)
7223 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
7225 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
7227 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7229 rtx temp = (fromprolog
7230 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7231 : gen_reg_rtx (Pmode));
7232 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
7233 rs6000_maybe_dead (emit_move_insn (dest, temp));
7235 else if (flag_pic == 2)
7238 rtx tempLR = (fromprolog
7239 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
7240 : gen_reg_rtx (Pmode));
7241 rtx temp0 = (fromprolog
7242 ? gen_rtx_REG (Pmode, 0)
7243 : gen_reg_rtx (Pmode));
7246 /* possibly create the toc section */
7247 if (! toc_initialized)
7250 function_section (current_function_decl);
7257 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
7258 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7260 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
7261 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7263 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
7265 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7266 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
7273 static int reload_toc_labelno = 0;
7275 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
7277 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
7278 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7280 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
7283 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
7284 rs6000_maybe_dead (emit_move_insn (temp0,
7285 gen_rtx_MEM (Pmode, dest)));
7287 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
7289 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
7291 /* This is for AIX code running in non-PIC ELF. */
7294 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
7295 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
7297 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
7298 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
7306 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
7308 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
7313 get_TOC_alias_set ()
7315 static int set = -1;
7317 set = new_alias_set ();
7321 /* This retuns nonzero if the current function uses the TOC. This is
7322 determined by the presence of (unspec ... 7), which is generated by
7323 the various load_toc_* patterns. */
7330 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7333 rtx pat = PATTERN (insn);
7336 if (GET_CODE (pat) == PARALLEL)
7337 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7338 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
7339 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
7346 create_TOC_reference (symbol)
7349 return gen_rtx_PLUS (Pmode,
7350 gen_rtx_REG (Pmode, TOC_REGISTER),
7351 gen_rtx_CONST (Pmode,
7352 gen_rtx_MINUS (Pmode, symbol,
7353 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
7357 /* __throw will restore its own return address to be the same as the
7358 return address of the function that the throw is being made to.
7359 This is unfortunate, because we want to check the original
7360 return address to see if we need to restore the TOC.
7361 So we have to squirrel it away here.
7362 This is used only in compiling __throw and __rethrow.
7364 Most of this code should be removed by CSE. */
7365 static rtx insn_after_throw;
7367 /* This does the saving... */
7369 rs6000_aix_emit_builtin_unwind_init ()
7372 rtx stack_top = gen_reg_rtx (Pmode);
7373 rtx opcode_addr = gen_reg_rtx (Pmode);
7375 insn_after_throw = gen_reg_rtx (SImode);
7377 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
7378 emit_move_insn (stack_top, mem);
7380 mem = gen_rtx_MEM (Pmode,
7381 gen_rtx_PLUS (Pmode, stack_top,
7382 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
7383 emit_move_insn (opcode_addr, mem);
7384 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
7387 /* Emit insns to _restore_ the TOC register, at runtime (specifically
7388 in _eh.o). Only used on AIX.
7390 The idea is that on AIX, function calls look like this:
7391 bl somefunction-trampoline
7395 somefunction-trampoline:
7397 ... load function address in the count register ...
7399 or like this, if the linker determines that this is not a cross-module call
7400 and so the TOC need not be restored:
7403 or like this, if the compiler could determine that this is not a
7406 now, the tricky bit here is that register 2 is saved and restored
7407 by the _linker_, so we can't readily generate debugging information
7408 for it. So we need to go back up the call chain looking at the
7409 insns at return addresses to see which calls saved the TOC register
7410 and so see where it gets restored from.
7412 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
7413 just before the actual epilogue.
7415 On the bright side, this incurs no space or time overhead unless an
7416 exception is thrown, except for the extra code in libgcc.a.
7418 The parameter STACKSIZE is a register containing (at runtime)
7419 the amount to be popped off the stack in addition to the stack frame
7420 of this routine (which will be __throw or __rethrow, and so is
7421 guaranteed to have a stack frame). */
7424 rs6000_emit_eh_toc_restore (stacksize)
7428 rtx bottom_of_stack = gen_reg_rtx (Pmode);
7429 rtx tocompare = gen_reg_rtx (SImode);
7430 rtx opcode = gen_reg_rtx (SImode);
7431 rtx opcode_addr = gen_reg_rtx (Pmode);
7433 rtx loop_start = gen_label_rtx ();
7434 rtx no_toc_restore_needed = gen_label_rtx ();
7435 rtx loop_exit = gen_label_rtx ();
7437 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
7438 set_mem_alias_set (mem, rs6000_sr_alias_set);
7439 emit_move_insn (bottom_of_stack, mem);
7441 top_of_stack = expand_binop (Pmode, add_optab,
7442 bottom_of_stack, stacksize,
7443 NULL_RTX, 1, OPTAB_WIDEN);
7445 emit_move_insn (tocompare,
7446 GEN_INT (trunc_int_for_mode (TARGET_32BIT
7448 : 0xE8410028, SImode)));
7450 if (insn_after_throw == NULL_RTX)
7452 emit_move_insn (opcode, insn_after_throw);
7454 emit_note (NULL, NOTE_INSN_LOOP_BEG);
7455 emit_label (loop_start);
7457 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
7458 SImode, NULL_RTX, NULL_RTX,
7459 no_toc_restore_needed);
7461 mem = gen_rtx_MEM (Pmode,
7462 gen_rtx_PLUS (Pmode, bottom_of_stack,
7463 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
7464 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
7466 emit_label (no_toc_restore_needed);
7467 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
7468 Pmode, NULL_RTX, NULL_RTX,
7471 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
7472 set_mem_alias_set (mem, rs6000_sr_alias_set);
7473 emit_move_insn (bottom_of_stack, mem);
7475 mem = gen_rtx_MEM (Pmode,
7476 gen_rtx_PLUS (Pmode, bottom_of_stack,
7477 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
7478 emit_move_insn (opcode_addr, mem);
7479 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
7481 emit_note (NULL, NOTE_INSN_LOOP_CONT);
7482 emit_jump (loop_start);
7483 emit_note (NULL, NOTE_INSN_LOOP_END);
7484 emit_label (loop_exit);
7486 #endif /* TARGET_AIX */
7488 /* This ties together stack memory (MEM with an alias set of
7489 rs6000_sr_alias_set) and the change to the stack pointer. */
7492 rs6000_emit_stack_tie ()
7494 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
7496 set_mem_alias_set (mem, rs6000_sr_alias_set);
7497 emit_insn (gen_stack_tie (mem));
7500 /* Emit the correct code for allocating stack space, as insns.
7501 If COPY_R12, make sure a copy of the old frame is left in r12.
7502 The generated code may use hard register 0 as a temporary. */
7505 rs6000_emit_allocate_stack (size, copy_r12)
7510 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7511 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
7512 rtx todec = GEN_INT (-size);
7514 if (current_function_limit_stack)
7516 if (REG_P (stack_limit_rtx)
7517 && REGNO (stack_limit_rtx) > 1
7518 && REGNO (stack_limit_rtx) <= 31)
7520 emit_insn (Pmode == SImode
7521 ? gen_addsi3 (tmp_reg,
7524 : gen_adddi3 (tmp_reg,
7528 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
7531 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
7533 && DEFAULT_ABI == ABI_V4)
7535 rtx toload = gen_rtx_CONST (VOIDmode,
7536 gen_rtx_PLUS (Pmode,
7540 emit_insn (gen_elf_high (tmp_reg, toload));
7541 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
7542 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
7546 warning ("stack limit expression is not supported");
7549 if (copy_r12 || ! TARGET_UPDATE)
7550 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
7556 /* Need a note here so that try_split doesn't get confused. */
7557 if (get_last_insn() == NULL_RTX)
7558 emit_note (0, NOTE_INSN_DELETED);
7559 insn = emit_move_insn (tmp_reg, todec);
7560 try_split (PATTERN (insn), insn, 0);
7564 if (Pmode == SImode)
7565 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
7568 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
7573 if (Pmode == SImode)
7574 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
7576 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
7577 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
7578 gen_rtx_REG (Pmode, 12));
7581 RTX_FRAME_RELATED_P (insn) = 1;
7583 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7584 gen_rtx_SET (VOIDmode, stack_reg,
7585 gen_rtx_PLUS (Pmode, stack_reg,
7590 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
7593 (mem (plus (blah) (regXX)))
7597 (mem (plus (blah) (const VALUE_OF_REGXX))). */
7600 altivec_frame_fixup (insn, reg, val)
7606 real = copy_rtx (PATTERN (insn));
7608 real = replace_rtx (real, reg, GEN_INT (val));
7610 RTX_FRAME_RELATED_P (insn) = 1;
7611 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7616 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
7617 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
7618 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
7619 deduce these equivalences by itself so it wasn't necessary to hold
7620 its hand so much. */
7623 rs6000_frame_related (insn, reg, val, reg2, rreg)
7632 real = copy_rtx (PATTERN (insn));
7634 real = replace_rtx (real, reg,
7635 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
7636 STACK_POINTER_REGNUM),
7639 /* We expect that 'real' is either a SET or a PARALLEL containing
7640 SETs (and possibly other stuff). In a PARALLEL, all the SETs
7641 are important so they all have to be marked RTX_FRAME_RELATED_P. */
7643 if (GET_CODE (real) == SET)
7647 temp = simplify_rtx (SET_SRC (set));
7649 SET_SRC (set) = temp;
7650 temp = simplify_rtx (SET_DEST (set));
7652 SET_DEST (set) = temp;
7653 if (GET_CODE (SET_DEST (set)) == MEM)
7655 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
7657 XEXP (SET_DEST (set), 0) = temp;
7660 else if (GET_CODE (real) == PARALLEL)
7663 for (i = 0; i < XVECLEN (real, 0); i++)
7664 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
7666 rtx set = XVECEXP (real, 0, i);
7668 temp = simplify_rtx (SET_SRC (set));
7670 SET_SRC (set) = temp;
7671 temp = simplify_rtx (SET_DEST (set));
7673 SET_DEST (set) = temp;
7674 if (GET_CODE (SET_DEST (set)) == MEM)
7676 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
7678 XEXP (SET_DEST (set), 0) = temp;
7680 RTX_FRAME_RELATED_P (set) = 1;
7686 if (reg2 != NULL_RTX)
7687 real = replace_rtx (real, reg2, rreg);
7689 RTX_FRAME_RELATED_P (insn) = 1;
7690 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7695 /* Returns an insn that has a vrsave set operation with the
7696 appropriate CLOBBERs. */
7699 generate_set_vrsave (reg, info, epiloguep)
7701 rs6000_stack_t *info;
7705 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
7707 clobs[0] = gen_set_vrsave (reg);
7711 /* We need to clobber the registers in the mask so the scheduler
7712 does not move sets to VRSAVE before sets of AltiVec registers.
7714 However, if the function receives nonlocal gotos, reload will set
7715 all call saved registers live. We will end up with:
7717 (set (reg 999) (mem))
7718 (parallel [ (set (reg vrsave) (unspec blah))
7719 (clobber (reg 999))])
7721 The clobber will cause the store into reg 999 to be dead, and
7722 flow will attempt to delete an epilogue insn. In this case, we
7723 need an unspec use/set of the register. */
7725 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7726 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
7728 if (!epiloguep || call_used_regs [i])
7729 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
7730 gen_rtx_REG (V4SImode, i));
7733 rtx reg = gen_rtx_REG (V4SImode, i);
7734 rtvec r = rtvec_alloc (1);
7736 RTVEC_ELT (r, 0) = reg;
7739 = gen_rtx_SET (VOIDmode, reg, gen_rtx_UNSPEC (V4SImode, r, 27));
7743 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
7745 for (i = 0; i < nclobs; ++i)
7746 XVECEXP (insn, 0, i) = clobs[i];
7751 /* Emit function prologue as insns. */
7754 rs6000_emit_prologue ()
7756 rs6000_stack_t *info = rs6000_stack_info ();
7757 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
7758 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7759 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7760 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
7761 rtx frame_reg_rtx = sp_reg_rtx;
7762 rtx cr_save_rtx = NULL;
7764 int saving_FPRs_inline;
7765 int using_store_multiple;
7766 HOST_WIDE_INT sp_offset = 0;
7768 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
7769 && info->first_gp_reg_save < 31);
7770 saving_FPRs_inline = (info->first_fp_reg_save == 64
7771 || FP_SAVE_INLINE (info->first_fp_reg_save));
7773 /* For V.4, update stack before we do any saving and set back pointer. */
7774 if (info->push_p && DEFAULT_ABI == ABI_V4)
7776 if (info->total_size < 32767)
7777 sp_offset = info->total_size;
7779 frame_reg_rtx = frame_ptr_rtx;
7780 rs6000_emit_allocate_stack (info->total_size,
7781 (frame_reg_rtx != sp_reg_rtx
7784 || info->first_fp_reg_save < 64
7785 || info->first_gp_reg_save < 32
7787 if (frame_reg_rtx != sp_reg_rtx)
7788 rs6000_emit_stack_tie ();
7791 /* Save AltiVec registers if needed. */
7792 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
7796 /* There should be a non inline version of this, for when we
7797 are saving lots of vector registers. */
7798 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
7799 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
7801 rtx areg, savereg, mem;
7804 offset = info->altivec_save_offset + sp_offset
7805 + 16 * (i - info->first_altivec_reg_save);
7807 savereg = gen_rtx_REG (V4SImode, i);
7809 areg = gen_rtx_REG (Pmode, 0);
7810 emit_move_insn (areg, GEN_INT (offset));
7812 /* AltiVec addressing mode is [reg+reg]. */
7813 mem = gen_rtx_MEM (V4SImode,
7814 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
7816 set_mem_alias_set (mem, rs6000_sr_alias_set);
7818 insn = emit_move_insn (mem, savereg);
7820 altivec_frame_fixup (insn, areg, offset);
7824 /* VRSAVE is a bit vector representing which AltiVec registers
7825 are used. The OS uses this to determine which vector
7826 registers to save on a context switch. We need to save
7827 VRSAVE on the stack frame, add whatever AltiVec registers we
7828 used in this function, and do the corresponding magic in the
7831 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
7836 /* Get VRSAVE onto a GPR. */
7837 reg = gen_rtx_REG (SImode, 12);
7838 emit_insn (gen_get_vrsave (reg));
7841 offset = info->vrsave_save_offset + sp_offset;
7843 = gen_rtx_MEM (SImode,
7844 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
7845 set_mem_alias_set (mem, rs6000_sr_alias_set);
7846 insn = emit_move_insn (mem, reg);
7848 /* Include the registers in the mask. */
7849 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
7851 insn = emit_insn (generate_set_vrsave (reg, info, 0));
7854 /* If we use the link register, get it into r0. */
7855 if (info->lr_save_p)
7856 emit_move_insn (gen_rtx_REG (Pmode, 0),
7857 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
7859 /* If we need to save CR, put it into r12. */
7860 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
7862 cr_save_rtx = gen_rtx_REG (SImode, 12);
7863 emit_insn (gen_movesi_from_cr (cr_save_rtx));
7866 /* Do any required saving of fpr's. If only one or two to save, do
7867 it ourselves. Otherwise, call function. */
7868 if (saving_FPRs_inline)
7871 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
7872 if ((regs_ever_live[info->first_fp_reg_save+i]
7873 && ! call_used_regs[info->first_fp_reg_save+i]))
7876 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
7877 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7878 GEN_INT (info->fp_save_offset
7881 mem = gen_rtx_MEM (DFmode, addr);
7882 set_mem_alias_set (mem, rs6000_sr_alias_set);
7884 insn = emit_move_insn (mem, reg);
7885 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7886 NULL_RTX, NULL_RTX);
7889 else if (info->first_fp_reg_save != 64)
7893 const char *alloc_rname;
7895 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
7897 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
7899 LINK_REGISTER_REGNUM));
7900 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
7901 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
7902 alloc_rname = ggc_strdup (rname);
7903 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
7904 gen_rtx_SYMBOL_REF (Pmode,
7906 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
7909 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
7910 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7911 GEN_INT (info->fp_save_offset
7912 + sp_offset + 8*i));
7913 mem = gen_rtx_MEM (DFmode, addr);
7914 set_mem_alias_set (mem, rs6000_sr_alias_set);
7916 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
7918 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
7919 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7920 NULL_RTX, NULL_RTX);
7923 /* Save GPRs. This is done as a PARALLEL if we are using
7924 the store-multiple instructions. */
7925 if (using_store_multiple)
7929 p = rtvec_alloc (32 - info->first_gp_reg_save);
7930 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
7931 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
7934 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
7935 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7936 GEN_INT (info->gp_save_offset
7939 mem = gen_rtx_MEM (reg_mode, addr);
7940 set_mem_alias_set (mem, rs6000_sr_alias_set);
7942 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
7944 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
7945 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7946 NULL_RTX, NULL_RTX);
7951 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
7952 if ((regs_ever_live[info->first_gp_reg_save+i]
7953 && ! call_used_regs[info->first_gp_reg_save+i])
7954 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
7955 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7956 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
7959 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
7960 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
7961 GEN_INT (info->gp_save_offset
7964 mem = gen_rtx_MEM (reg_mode, addr);
7965 set_mem_alias_set (mem, rs6000_sr_alias_set);
7967 insn = emit_move_insn (mem, reg);
7968 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7969 NULL_RTX, NULL_RTX);
7973 /* ??? There's no need to emit actual instructions here, but it's the
7974 easiest way to get the frame unwind information emitted. */
7975 if (current_function_calls_eh_return)
7977 unsigned int i, regno;
7983 regno = EH_RETURN_DATA_REGNO (i);
7984 if (regno == INVALID_REGNUM)
7987 reg = gen_rtx_REG (reg_mode, regno);
7988 addr = plus_constant (frame_reg_rtx,
7989 info->ehrd_offset + sp_offset
7990 + reg_size * (int) i);
7991 mem = gen_rtx_MEM (reg_mode, addr);
7992 set_mem_alias_set (mem, rs6000_sr_alias_set);
7994 insn = emit_move_insn (mem, reg);
7995 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
7996 NULL_RTX, NULL_RTX);
8000 /* Save lr if we used it. */
8001 if (info->lr_save_p)
8003 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8004 GEN_INT (info->lr_save_offset + sp_offset));
8005 rtx reg = gen_rtx_REG (Pmode, 0);
8006 rtx mem = gen_rtx_MEM (Pmode, addr);
8007 /* This should not be of rs6000_sr_alias_set, because of
8008 __builtin_return_address. */
8010 insn = emit_move_insn (mem, reg);
8011 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8012 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8015 /* Save CR if we use any that must be preserved. */
8016 if (info->cr_save_p)
8018 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8019 GEN_INT (info->cr_save_offset + sp_offset));
8020 rtx mem = gen_rtx_MEM (SImode, addr);
8022 set_mem_alias_set (mem, rs6000_sr_alias_set);
8024 /* If r12 was used to hold the original sp, copy cr into r0 now
8026 if (REGNO (frame_reg_rtx) == 12)
8028 cr_save_rtx = gen_rtx_REG (SImode, 0);
8029 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8031 insn = emit_move_insn (mem, cr_save_rtx);
8033 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8034 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8035 OK. All we have to do is specify that _one_ condition code
8036 register is saved in this stack slot. The thrower's epilogue
8037 will then restore all the call-saved registers.
8038 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8039 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8040 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8043 /* Update stack and set back pointer unless this is V.4,
8044 for which it was done previously. */
8045 if (info->push_p && DEFAULT_ABI != ABI_V4)
8046 rs6000_emit_allocate_stack (info->total_size, FALSE);
8048 /* Set frame pointer, if needed. */
8049 if (frame_pointer_needed)
8051 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8053 RTX_FRAME_RELATED_P (insn) = 1;
8056 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8057 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8058 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8059 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
8061 /* If emit_load_toc_table will use the link register, we need to save
8062 it. We use R11 for this purpose because emit_load_toc_table
8063 can use register 0. This allows us to use a plain 'blr' to return
8064 from the procedure more often. */
8065 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8066 && ! info->lr_save_p
8067 && EXIT_BLOCK_PTR->pred != NULL);
8068 if (save_LR_around_toc_setup)
8069 emit_move_insn (gen_rtx_REG (Pmode, 11),
8070 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8072 rs6000_emit_load_toc_table (TRUE);
8074 if (save_LR_around_toc_setup)
8075 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8076 gen_rtx_REG (Pmode, 11));
8079 if (DEFAULT_ABI == ABI_DARWIN
8080 && flag_pic && current_function_uses_pic_offset_table)
8082 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8084 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8087 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
8088 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8092 /* Write function prologue. */
8095 rs6000_output_function_prologue (file, size)
8097 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8099 rs6000_stack_t *info = rs6000_stack_info ();
8101 if (TARGET_DEBUG_STACK)
8102 debug_stack_info (info);
8104 /* Write .extern for any function we will call to save and restore
8106 if (info->first_fp_reg_save < 64
8107 && !FP_SAVE_INLINE (info->first_fp_reg_save))
8108 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8109 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8110 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8113 /* Write .extern for AIX common mode routines, if needed. */
8114 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8116 fputs ("\t.extern __mulh\n", file);
8117 fputs ("\t.extern __mull\n", file);
8118 fputs ("\t.extern __divss\n", file);
8119 fputs ("\t.extern __divus\n", file);
8120 fputs ("\t.extern __quoss\n", file);
8121 fputs ("\t.extern __quous\n", file);
8122 common_mode_defined = 1;
8125 if (! HAVE_prologue)
8129 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8130 the "toplevel" insn chain. */
8131 emit_note (0, NOTE_INSN_DELETED);
8132 rs6000_emit_prologue ();
8133 emit_note (0, NOTE_INSN_DELETED);
8135 if (TARGET_DEBUG_STACK)
8136 debug_rtx_list (get_insns (), 100);
8137 final (get_insns (), file, FALSE, FALSE);
8141 rs6000_pic_labelno++;
8144 /* Emit function epilogue as insns.
8146 At present, dwarf2out_frame_debug_expr doesn't understand
8147 register restores, so we don't bother setting RTX_FRAME_RELATED_P
8148 anywhere in the epilogue. Most of the insns below would in any case
8149 need special notes to explain where r11 is in relation to the stack. */
8152 rs6000_emit_epilogue (sibcall)
8155 rs6000_stack_t *info;
8156 int restoring_FPRs_inline;
8157 int using_load_multiple;
8158 int using_mfcr_multiple;
8159 int use_backchain_to_restore_sp;
8161 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
8162 rtx frame_reg_rtx = sp_reg_rtx;
8163 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8164 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8167 info = rs6000_stack_info ();
8168 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8169 && info->first_gp_reg_save < 31);
8170 restoring_FPRs_inline = (sibcall
8171 || current_function_calls_eh_return
8172 || info->first_fp_reg_save == 64
8173 || FP_SAVE_INLINE (info->first_fp_reg_save));
8174 use_backchain_to_restore_sp = (frame_pointer_needed
8175 || current_function_calls_alloca
8176 || info->total_size > 32767);
8177 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
8178 || rs6000_cpu == PROCESSOR_PPC603
8179 || rs6000_cpu == PROCESSOR_PPC750
8182 /* If we have a frame pointer, a call to alloca, or a large stack
8183 frame, restore the old stack pointer using the backchain. Otherwise,
8184 we know what size to update it with. */
8185 if (use_backchain_to_restore_sp)
8187 /* Under V.4, don't reset the stack pointer until after we're done
8188 loading the saved registers. */
8189 if (DEFAULT_ABI == ABI_V4)
8190 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
8192 emit_move_insn (frame_reg_rtx,
8193 gen_rtx_MEM (Pmode, sp_reg_rtx));
8196 else if (info->push_p)
8198 if (DEFAULT_ABI == ABI_V4)
8199 sp_offset = info->total_size;
8202 emit_insn (TARGET_32BIT
8203 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8204 GEN_INT (info->total_size))
8205 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8206 GEN_INT (info->total_size)));
8210 /* Restore AltiVec registers if needed. */
8211 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8215 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8216 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8218 rtx addr, areg, mem;
8220 areg = gen_rtx_REG (Pmode, 0);
8222 (areg, GEN_INT (info->altivec_save_offset
8224 + 16 * (i - info->first_altivec_reg_save)));
8226 /* AltiVec addressing mode is [reg+reg]. */
8227 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
8228 mem = gen_rtx_MEM (V4SImode, addr);
8229 set_mem_alias_set (mem, rs6000_sr_alias_set);
8231 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
8235 /* Restore VRSAVE if needed. */
8236 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
8240 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8241 GEN_INT (info->vrsave_save_offset + sp_offset));
8242 mem = gen_rtx_MEM (SImode, addr);
8243 set_mem_alias_set (mem, rs6000_sr_alias_set);
8244 reg = gen_rtx_REG (SImode, 12);
8245 emit_move_insn (reg, mem);
8247 emit_insn (generate_set_vrsave (reg, info, 1));
8250 /* Get the old lr if we saved it. */
8251 if (info->lr_save_p)
8253 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8254 GEN_INT (info->lr_save_offset + sp_offset));
8255 rtx mem = gen_rtx_MEM (Pmode, addr);
8257 set_mem_alias_set (mem, rs6000_sr_alias_set);
8259 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
8262 /* Get the old cr if we saved it. */
8263 if (info->cr_save_p)
8265 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8266 GEN_INT (info->cr_save_offset + sp_offset));
8267 rtx mem = gen_rtx_MEM (SImode, addr);
8269 set_mem_alias_set (mem, rs6000_sr_alias_set);
8271 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
8274 /* Set LR here to try to overlap restores below. */
8275 if (info->lr_save_p)
8276 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8277 gen_rtx_REG (Pmode, 0));
8279 /* Load exception handler data registers, if needed. */
8280 if (current_function_calls_eh_return)
8282 unsigned int i, regno;
8288 regno = EH_RETURN_DATA_REGNO (i);
8289 if (regno == INVALID_REGNUM)
8292 addr = plus_constant (frame_reg_rtx,
8293 info->ehrd_offset + sp_offset
8294 + reg_size * (int) i);
8295 mem = gen_rtx_MEM (reg_mode, addr);
8296 set_mem_alias_set (mem, rs6000_sr_alias_set);
8298 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
8302 /* Restore GPRs. This is done as a PARALLEL if we are using
8303 the load-multiple instructions. */
8304 if (using_load_multiple)
8307 p = rtvec_alloc (32 - info->first_gp_reg_save);
8308 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8310 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8311 GEN_INT (info->gp_save_offset
8314 rtx mem = gen_rtx_MEM (reg_mode, addr);
8316 set_mem_alias_set (mem, rs6000_sr_alias_set);
8319 gen_rtx_SET (VOIDmode,
8320 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
8323 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8326 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8327 if ((regs_ever_live[info->first_gp_reg_save+i]
8328 && ! call_used_regs[info->first_gp_reg_save+i])
8329 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8330 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8331 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8333 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8334 GEN_INT (info->gp_save_offset
8337 rtx mem = gen_rtx_MEM (reg_mode, addr);
8339 set_mem_alias_set (mem, rs6000_sr_alias_set);
8341 emit_move_insn (gen_rtx_REG (reg_mode,
8342 info->first_gp_reg_save + i),
8346 /* Restore fpr's if we need to do it without calling a function. */
8347 if (restoring_FPRs_inline)
8348 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8349 if ((regs_ever_live[info->first_fp_reg_save+i]
8350 && ! call_used_regs[info->first_fp_reg_save+i]))
8353 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8354 GEN_INT (info->fp_save_offset
8357 mem = gen_rtx_MEM (DFmode, addr);
8358 set_mem_alias_set (mem, rs6000_sr_alias_set);
8360 emit_move_insn (gen_rtx_REG (DFmode,
8361 info->first_fp_reg_save + i),
8365 /* If we saved cr, restore it here. Just those that were used. */
8366 if (info->cr_save_p)
8368 rtx r12_rtx = gen_rtx_REG (SImode, 12);
8371 if (using_mfcr_multiple)
8373 for (i = 0; i < 8; i++)
8374 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8380 if (using_mfcr_multiple && count > 1)
8385 p = rtvec_alloc (count);
8388 for (i = 0; i < 8; i++)
8389 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8391 rtvec r = rtvec_alloc (2);
8392 RTVEC_ELT (r, 0) = r12_rtx;
8393 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
8394 RTVEC_ELT (p, ndx) =
8395 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
8396 gen_rtx_UNSPEC (CCmode, r, 20));
8399 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8404 for (i = 0; i < 8; i++)
8405 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
8407 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
8413 /* If this is V.4, unwind the stack pointer after all of the loads
8414 have been done. We need to emit a block here so that sched
8415 doesn't decide to move the sp change before the register restores
8416 (which may not have any obvious dependency on the stack). This
8417 doesn't hurt performance, because there is no scheduling that can
8418 be done after this point. */
8419 if (DEFAULT_ABI == ABI_V4)
8421 if (frame_reg_rtx != sp_reg_rtx)
8422 rs6000_emit_stack_tie ();
8424 if (use_backchain_to_restore_sp)
8426 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
8428 else if (sp_offset != 0)
8430 emit_insn (Pmode == SImode
8431 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
8432 GEN_INT (sp_offset))
8433 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
8434 GEN_INT (sp_offset)));
8438 if (current_function_calls_eh_return)
8440 rtx sa = EH_RETURN_STACKADJ_RTX;
8441 emit_insn (Pmode == SImode
8442 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
8443 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
8449 if (! restoring_FPRs_inline)
8450 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
8452 p = rtvec_alloc (2);
8454 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
8455 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8457 LINK_REGISTER_REGNUM));
8459 /* If we have to restore more than two FP registers, branch to the
8460 restore function. It will return to our caller. */
8461 if (! restoring_FPRs_inline)
8465 const char *alloc_rname;
8467 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
8468 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
8469 alloc_rname = ggc_strdup (rname);
8470 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
8471 gen_rtx_SYMBOL_REF (Pmode,
8474 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8477 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
8478 GEN_INT (info->fp_save_offset + 8*i));
8479 mem = gen_rtx_MEM (DFmode, addr);
8480 set_mem_alias_set (mem, rs6000_sr_alias_set);
8482 RTVEC_ELT (p, i+3) =
8483 gen_rtx_SET (VOIDmode,
8484 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
8489 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
8493 /* Write function epilogue. */
8496 rs6000_output_function_epilogue (file, size)
8498 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8500 rs6000_stack_t *info = rs6000_stack_info ();
8502 if (! HAVE_epilogue)
8504 rtx insn = get_last_insn ();
8505 /* If the last insn was a BARRIER, we don't have to write anything except
8507 if (GET_CODE (insn) == NOTE)
8508 insn = prev_nonnote_insn (insn);
8509 if (insn == 0 || GET_CODE (insn) != BARRIER)
8511 /* This is slightly ugly, but at least we don't have two
8512 copies of the epilogue-emitting code. */
8515 /* A NOTE_INSN_DELETED is supposed to be at the start
8516 and end of the "toplevel" insn chain. */
8517 emit_note (0, NOTE_INSN_DELETED);
8518 rs6000_emit_epilogue (FALSE);
8519 emit_note (0, NOTE_INSN_DELETED);
8521 if (TARGET_DEBUG_STACK)
8522 debug_rtx_list (get_insns (), 100);
8523 final (get_insns (), file, FALSE, FALSE);
8528 /* Output a traceback table here. See /usr/include/sys/debug.h for info
8531 We don't output a traceback table if -finhibit-size-directive was
8532 used. The documentation for -finhibit-size-directive reads
8533 ``don't output a @code{.size} assembler directive, or anything
8534 else that would cause trouble if the function is split in the
8535 middle, and the two halves are placed at locations far apart in
8536 memory.'' The traceback table has this property, since it
8537 includes the offset from the start of the function to the
8538 traceback table itself.
8540 System V.4 Powerpc's (and the embedded ABI derived from it) use a
8541 different traceback table. */
8542 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
8544 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
8545 const char *language_string = lang_hooks.name;
8546 int fixed_parms, float_parms, parm_info;
8549 while (*fname == '.') /* V.4 encodes . in the name */
8552 /* Need label immediately before tbtab, so we can compute its offset
8553 from the function start. */
8556 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
8557 ASM_OUTPUT_LABEL (file, fname);
8559 /* The .tbtab pseudo-op can only be used for the first eight
8560 expressions, since it can't handle the possibly variable
8561 length fields that follow. However, if you omit the optional
8562 fields, the assembler outputs zeros for all optional fields
8563 anyways, giving each variable length field is minimum length
8564 (as defined in sys/debug.h). Thus we can not use the .tbtab
8565 pseudo-op at all. */
8567 /* An all-zero word flags the start of the tbtab, for debuggers
8568 that have to find it by searching forward from the entry
8569 point or from the current pc. */
8570 fputs ("\t.long 0\n", file);
8572 /* Tbtab format type. Use format type 0. */
8573 fputs ("\t.byte 0,", file);
8575 /* Language type. Unfortunately, there doesn't seem to be any
8576 official way to get this info, so we use language_string. C
8577 is 0. C++ is 9. No number defined for Obj-C, so use the
8578 value for C for now. There is no official value for Java,
8579 although IBM appears to be using 13. There is no official value
8580 for Chill, so we've chosen 44 pseudo-randomly. */
8581 if (! strcmp (language_string, "GNU C")
8582 || ! strcmp (language_string, "GNU Objective-C"))
8584 else if (! strcmp (language_string, "GNU F77"))
8586 else if (! strcmp (language_string, "GNU Ada"))
8588 else if (! strcmp (language_string, "GNU Pascal"))
8590 else if (! strcmp (language_string, "GNU C++"))
8592 else if (! strcmp (language_string, "GNU Java"))
8594 else if (! strcmp (language_string, "GNU CHILL"))
8598 fprintf (file, "%d,", i);
8600 /* 8 single bit fields: global linkage (not set for C extern linkage,
8601 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
8602 from start of procedure stored in tbtab, internal function, function
8603 has controlled storage, function has no toc, function uses fp,
8604 function logs/aborts fp operations. */
8605 /* Assume that fp operations are used if any fp reg must be saved. */
8606 fprintf (file, "%d,", (1 << 5) | ((info->first_fp_reg_save != 64) << 1));
8608 /* 6 bitfields: function is interrupt handler, name present in
8609 proc table, function calls alloca, on condition directives
8610 (controls stack walks, 3 bits), saves condition reg, saves
8612 /* The `function calls alloca' bit seems to be set whenever reg 31 is
8613 set up as a frame pointer, even when there is no alloca call. */
8614 fprintf (file, "%d,",
8615 ((1 << 6) | (frame_pointer_needed << 5)
8616 | (info->cr_save_p << 1) | (info->lr_save_p)));
8618 /* 3 bitfields: saves backchain, spare bit, number of fpr saved
8620 fprintf (file, "%d,",
8621 (info->push_p << 7) | (64 - info->first_fp_reg_save));
8623 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
8624 fprintf (file, "%d,", (32 - first_reg_to_save ()));
8627 /* Compute the parameter info from the function decl argument
8630 int next_parm_info_bit;
8632 next_parm_info_bit = 31;
8637 for (decl = DECL_ARGUMENTS (current_function_decl);
8638 decl; decl = TREE_CHAIN (decl))
8640 rtx parameter = DECL_INCOMING_RTL (decl);
8641 enum machine_mode mode = GET_MODE (parameter);
8643 if (GET_CODE (parameter) == REG)
8645 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
8653 else if (mode == DFmode)
8658 /* If only one bit will fit, don't or in this entry. */
8659 if (next_parm_info_bit > 0)
8660 parm_info |= (bits << (next_parm_info_bit - 1));
8661 next_parm_info_bit -= 2;
8665 fixed_parms += ((GET_MODE_SIZE (mode)
8666 + (UNITS_PER_WORD - 1))
8668 next_parm_info_bit -= 1;
8674 /* Number of fixed point parameters. */
8675 /* This is actually the number of words of fixed point parameters; thus
8676 an 8 byte struct counts as 2; and thus the maximum value is 8. */
8677 fprintf (file, "%d,", fixed_parms);
8679 /* 2 bitfields: number of floating point parameters (7 bits), parameters
8681 /* This is actually the number of fp registers that hold parameters;
8682 and thus the maximum value is 13. */
8683 /* Set parameters on stack bit if parameters are not in their original
8684 registers, regardless of whether they are on the stack? Xlc
8685 seems to set the bit when not optimizing. */
8686 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
8688 /* Optional fields follow. Some are variable length. */
8690 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
8692 /* There is an entry for each parameter in a register, in the order that
8693 they occur in the parameter list. Any intervening arguments on the
8694 stack are ignored. If the list overflows a long (max possible length
8695 34 bits) then completely leave off all elements that don't fit. */
8696 /* Only emit this long if there was at least one parameter. */
8697 if (fixed_parms || float_parms)
8698 fprintf (file, "\t.long %d\n", parm_info);
8700 /* Offset from start of code to tb table. */
8701 fputs ("\t.long ", file);
8702 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
8704 RS6000_OUTPUT_BASENAME (file, fname);
8706 assemble_name (file, fname);
8710 RS6000_OUTPUT_BASENAME (file, fname);
8712 assemble_name (file, fname);
8716 /* Interrupt handler mask. */
8717 /* Omit this long, since we never set the interrupt handler bit
8720 /* Number of CTL (controlled storage) anchors. */
8721 /* Omit this long, since the has_ctl bit is never set above. */
8723 /* Displacement into stack of each CTL anchor. */
8724 /* Omit this list of longs, because there are no CTL anchors. */
8726 /* Length of function name. */
8727 fprintf (file, "\t.short %d\n", (int) strlen (fname));
8729 /* Function name. */
8730 assemble_string (fname, strlen (fname));
8732 /* Register for alloca automatic storage; this is always reg 31.
8733 Only emit this if the alloca bit was set above. */
8734 if (frame_pointer_needed)
8735 fputs ("\t.byte 31\n", file);
8739 /* A C compound statement that outputs the assembler code for a thunk
8740 function, used to implement C++ virtual function calls with
8741 multiple inheritance. The thunk acts as a wrapper around a virtual
8742 function, adjusting the implicit object parameter before handing
8743 control off to the real function.
8745 First, emit code to add the integer DELTA to the location that
8746 contains the incoming first argument. Assume that this argument
8747 contains a pointer, and is the one used to pass the `this' pointer
8748 in C++. This is the incoming argument *before* the function
8749 prologue, e.g. `%o0' on a sparc. The addition must preserve the
8750 values of all other incoming arguments.
8752 After the addition, emit code to jump to FUNCTION, which is a
8753 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
8754 not touch the return address. Hence returning from FUNCTION will
8755 return to whoever called the current `thunk'.
8757 The effect must be as if FUNCTION had been called directly with the
8758 adjusted first argument. This macro is responsible for emitting
8759 all of the code for a thunk function; output_function_prologue()
8760 and output_function_epilogue() are not invoked.
8762 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
8763 been extracted from it.) It might possibly be useful on some
8764 targets, but probably not.
8766 If you do not define this macro, the target-independent code in the
8767 C++ frontend will generate a less efficient heavyweight thunk that
8768 calls FUNCTION instead of jumping to it. The generic approach does
8769 not support varargs. */
8772 output_mi_thunk (file, thunk_fndecl, delta, function)
8774 tree thunk_fndecl ATTRIBUTE_UNUSED;
8778 const char *this_reg =
8779 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
8782 const char *r0 = reg_names[0];
8783 const char *toc = reg_names[2];
8784 const char *schain = reg_names[11];
8785 const char *r12 = reg_names[12];
8787 static int labelno = 0;
8789 /* Small constants that can be done by one add instruction. */
8790 if (delta >= -32768 && delta <= 32767)
8792 if (! TARGET_NEW_MNEMONICS)
8793 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
8795 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
8798 /* Large constants that can be done by one addis instruction. */
8799 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
8800 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
8803 /* 32-bit constants that can be done by an add and addis instruction. */
8804 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
8806 /* Break into two pieces, propagating the sign bit from the low
8807 word to the upper word. */
8808 int delta_high = delta >> 16;
8809 int delta_low = delta & 0xffff;
8810 if ((delta_low & 0x8000) != 0)
8813 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
8816 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
8819 if (! TARGET_NEW_MNEMONICS)
8820 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
8822 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
8825 /* 64-bit constants, fixme */
8829 /* Get the prefix in front of the names. */
8830 switch (DEFAULT_ABI)
8840 case ABI_AIX_NODESC:
8845 /* If the function is compiled in this module, jump to it directly.
8846 Otherwise, load up its address and jump to it. */
8848 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
8850 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
8851 && ! lookup_attribute ("longcall",
8852 TYPE_ATTRIBUTES (TREE_TYPE (function))))
8854 fprintf (file, "\tb %s", prefix);
8855 assemble_name (file, fname);
8856 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
8862 switch (DEFAULT_ABI)
8868 /* Set up a TOC entry for the function. */
8869 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
8871 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
8874 if (TARGET_MINIMAL_TOC)
8875 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
8878 fputs ("\t.tc ", file);
8879 assemble_name (file, fname);
8880 fputs ("[TC],", file);
8882 assemble_name (file, fname);
8885 if (TARGET_MINIMAL_TOC)
8886 asm_fprintf (file, (TARGET_32BIT)
8887 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
8888 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
8889 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
8890 assemble_name (file, buf);
8891 if (TARGET_ELF && TARGET_MINIMAL_TOC)
8892 fputs ("-(.LCTOC1)", file);
8893 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
8895 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
8899 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
8902 asm_fprintf (file, "\tmtctr %s\n", r0);
8904 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
8907 asm_fprintf (file, "\tbctr\n");
8910 case ABI_AIX_NODESC:
8912 fprintf (file, "\tb %s", prefix);
8913 assemble_name (file, fname);
8914 if (flag_pic) fputs ("@plt", file);
8920 fprintf (file, "\tb %s", prefix);
8921 if (flag_pic && !machopic_name_defined_p (fname))
8922 assemble_name (file, machopic_stub_name (fname));
8924 assemble_name (file, fname);
8933 /* A quick summary of the various types of 'constant-pool tables'
8936 Target Flags Name One table per
8937 AIX (none) AIX TOC object file
8938 AIX -mfull-toc AIX TOC object file
8939 AIX -mminimal-toc AIX minimal TOC translation unit
8940 SVR4/EABI (none) SVR4 SDATA object file
8941 SVR4/EABI -fpic SVR4 pic object file
8942 SVR4/EABI -fPIC SVR4 PIC translation unit
8943 SVR4/EABI -mrelocatable EABI TOC function
8944 SVR4/EABI -maix AIX TOC object file
8945 SVR4/EABI -maix -mminimal-toc
8946 AIX minimal TOC translation unit
8948 Name Reg. Set by entries contains:
8949 made by addrs? fp? sum?
8951 AIX TOC 2 crt0 as Y option option
8952 AIX minimal TOC 30 prolog gcc Y Y option
8953 SVR4 SDATA 13 crt0 gcc N Y N
8954 SVR4 pic 30 prolog ld Y not yet N
8955 SVR4 PIC 30 prolog gcc Y option option
8956 EABI TOC 30 prolog gcc Y option option
8960 /* Hash table stuff for keeping track of TOC entries. */
8962 struct toc_hash_struct
8964 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
8965 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
8967 enum machine_mode key_mode;
8971 static htab_t toc_hash_table;
8973 /* Hash functions for the hash table. */
8976 rs6000_hash_constant (k)
8979 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
8980 const char *format = GET_RTX_FORMAT (GET_CODE (k));
8981 int flen = strlen (format);
8984 if (GET_CODE (k) == LABEL_REF)
8985 return result * 1231 + X0INT (XEXP (k, 0), 3);
8987 if (GET_CODE (k) == CONST_DOUBLE)
8989 else if (GET_CODE (k) == CODE_LABEL)
8994 for (; fidx < flen; fidx++)
8995 switch (format[fidx])
9000 const char *str = XSTR (k, fidx);
9002 result = result * 613 + len;
9003 for (i = 0; i < len; i++)
9004 result = result * 613 + (unsigned) str[i];
9009 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9013 result = result * 613 + (unsigned) XINT (k, fidx);
9016 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9017 result = result * 613 + (unsigned) XWINT (k, fidx);
9021 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9022 result = result * 613 + (unsigned) (XWINT (k, fidx)
9033 toc_hash_function (hash_entry)
9034 const void * hash_entry;
9036 const struct toc_hash_struct *thc =
9037 (const struct toc_hash_struct *) hash_entry;
9038 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9041 /* Compare H1 and H2 for equivalence. */
9044 toc_hash_eq (h1, h2)
9048 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9049 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9051 if (((const struct toc_hash_struct *) h1)->key_mode
9052 != ((const struct toc_hash_struct *) h2)->key_mode)
9055 /* Gotcha: One of these const_doubles will be in memory.
9056 The other may be on the constant-pool chain.
9057 So rtx_equal_p will think they are different... */
9060 if (GET_CODE (r1) != GET_CODE (r2)
9061 || GET_MODE (r1) != GET_MODE (r2))
9063 if (GET_CODE (r1) == CONST_DOUBLE)
9065 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9067 for (i = 1; i < format_len; i++)
9068 if (XWINT (r1, i) != XWINT (r2, i))
9073 else if (GET_CODE (r1) == LABEL_REF)
9074 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9075 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9077 return rtx_equal_p (r1, r2);
9080 /* Mark the hash table-entry HASH_ENTRY. */
9083 toc_hash_mark_entry (hash_slot, unused)
9085 void * unused ATTRIBUTE_UNUSED;
9087 const struct toc_hash_struct * hash_entry =
9088 *(const struct toc_hash_struct **) hash_slot;
9089 rtx r = hash_entry->key;
9090 ggc_set_mark (hash_entry);
9091 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
9092 if (GET_CODE (r) == LABEL_REF)
9095 ggc_set_mark (XEXP (r, 0));
9102 /* Mark all the elements of the TOC hash-table *HT. */
9105 toc_hash_mark_table (vht)
9110 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9113 /* These are the names given by the C++ front-end to vtables, and
9114 vtable-like objects. Ideally, this logic should not be here;
9115 instead, there should be some programmatic way of inquiring as
9116 to whether or not an object is a vtable. */
9118 #define VTABLE_NAME_P(NAME) \
9119 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
9120 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
9121 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
9122 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9125 rs6000_output_symbol_ref (file, x)
9129 /* Currently C++ toc references to vtables can be emitted before it
9130 is decided whether the vtable is public or private. If this is
9131 the case, then the linker will eventually complain that there is
9132 a reference to an unknown section. Thus, for vtables only,
9133 we emit the TOC reference to reference the symbol and not the
9135 const char *name = XSTR (x, 0);
9137 if (VTABLE_NAME_P (name))
9139 RS6000_OUTPUT_BASENAME (file, name);
9142 assemble_name (file, name);
9145 /* Output a TOC entry. We derive the entry name from what is being
9149 output_toc (file, x, labelno, mode)
9153 enum machine_mode mode;
9156 const char *name = buf;
9157 const char *real_name;
9164 /* When the linker won't eliminate them, don't output duplicate
9165 TOC entries (this happens on AIX if there is any kind of TOC,
9166 and on SVR4 under -fPIC or -mrelocatable). */
9169 struct toc_hash_struct *h;
9172 h = ggc_alloc (sizeof (*h));
9175 h->labelno = labelno;
9177 found = htab_find_slot (toc_hash_table, h, 1);
9180 else /* This is indeed a duplicate.
9181 Set this label equal to that label. */
9183 fputs ("\t.set ", file);
9184 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9185 fprintf (file, "%d,", labelno);
9186 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
9187 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
9193 /* If we're going to put a double constant in the TOC, make sure it's
9194 aligned properly when strict alignment is on. */
9195 if (GET_CODE (x) == CONST_DOUBLE
9197 && GET_MODE_BITSIZE (mode) >= 64
9198 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
9199 ASM_OUTPUT_ALIGN (file, 3);
9202 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
9204 /* Handle FP constants specially. Note that if we have a minimal
9205 TOC, things we put here aren't actually in the TOC, so we can allow
9207 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
9212 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9213 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
9217 if (TARGET_MINIMAL_TOC)
9218 fputs (DOUBLE_INT_ASM_OP, file);
9220 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9221 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
9226 if (TARGET_MINIMAL_TOC)
9227 fputs ("\t.long ", file);
9229 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
9230 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
9234 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
9239 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
9240 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
9244 if (TARGET_MINIMAL_TOC)
9245 fputs (DOUBLE_INT_ASM_OP, file);
9247 fprintf (file, "\t.tc FS_%lx[TC],", l);
9248 fprintf (file, "0x%lx00000000\n", l);
9253 if (TARGET_MINIMAL_TOC)
9254 fputs ("\t.long ", file);
9256 fprintf (file, "\t.tc FS_%lx[TC],", l);
9257 fprintf (file, "0x%lx\n", l);
9261 else if (GET_MODE (x) == VOIDmode
9262 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
9264 unsigned HOST_WIDE_INT low;
9267 if (GET_CODE (x) == CONST_DOUBLE)
9269 low = CONST_DOUBLE_LOW (x);
9270 high = CONST_DOUBLE_HIGH (x);
9273 #if HOST_BITS_PER_WIDE_INT == 32
9276 high = (low & 0x80000000) ? ~0 : 0;
9280 low = INTVAL (x) & 0xffffffff;
9281 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
9285 /* TOC entries are always Pmode-sized, but since this
9286 is a bigendian machine then if we're putting smaller
9287 integer constants in the TOC we have to pad them.
9288 (This is still a win over putting the constants in
9289 a separate constant pool, because then we'd have
9290 to have both a TOC entry _and_ the actual constant.)
9292 For a 32-bit target, CONST_INT values are loaded and shifted
9293 entirely within `low' and can be stored in one TOC entry. */
9295 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
9296 abort ();/* It would be easy to make this work, but it doesn't now. */
9298 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
9299 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
9300 POINTER_SIZE, &low, &high, 0);
9304 if (TARGET_MINIMAL_TOC)
9305 fputs (DOUBLE_INT_ASM_OP, file);
9307 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long)high, (long)low);
9308 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
9313 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
9315 if (TARGET_MINIMAL_TOC)
9316 fputs ("\t.long ", file);
9318 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
9319 (long)high, (long)low);
9320 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
9324 if (TARGET_MINIMAL_TOC)
9325 fputs ("\t.long ", file);
9327 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
9328 fprintf (file, "0x%lx\n", (long) low);
9334 if (GET_CODE (x) == CONST)
9336 if (GET_CODE (XEXP (x, 0)) != PLUS)
9339 base = XEXP (XEXP (x, 0), 0);
9340 offset = INTVAL (XEXP (XEXP (x, 0), 1));
9343 if (GET_CODE (base) == SYMBOL_REF)
9344 name = XSTR (base, 0);
9345 else if (GET_CODE (base) == LABEL_REF)
9346 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
9347 else if (GET_CODE (base) == CODE_LABEL)
9348 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
9352 STRIP_NAME_ENCODING (real_name, name);
9353 if (TARGET_MINIMAL_TOC)
9354 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9357 fprintf (file, "\t.tc %s", real_name);
9360 fprintf (file, ".N%d", - offset);
9362 fprintf (file, ".P%d", offset);
9364 fputs ("[TC],", file);
9367 /* Currently C++ toc references to vtables can be emitted before it
9368 is decided whether the vtable is public or private. If this is
9369 the case, then the linker will eventually complain that there is
9370 a TOC reference to an unknown section. Thus, for vtables only,
9371 we emit the TOC reference to reference the symbol and not the
9373 if (VTABLE_NAME_P (name))
9375 RS6000_OUTPUT_BASENAME (file, name);
9377 fprintf (file, "%d", offset);
9378 else if (offset > 0)
9379 fprintf (file, "+%d", offset);
9382 output_addr_const (file, x);
9386 /* Output an assembler pseudo-op to write an ASCII string of N characters
9387 starting at P to FILE.
9389 On the RS/6000, we have to do this using the .byte operation and
9390 write out special characters outside the quoted string.
9391 Also, the assembler is broken; very long strings are truncated,
9392 so we must artificially break them up early. */
9395 output_ascii (file, p, n)
9401 int i, count_string;
9402 const char *for_string = "\t.byte \"";
9403 const char *for_decimal = "\t.byte ";
9404 const char *to_close = NULL;
9407 for (i = 0; i < n; i++)
9410 if (c >= ' ' && c < 0177)
9413 fputs (for_string, file);
9416 /* Write two quotes to get one. */
9424 for_decimal = "\"\n\t.byte ";
9428 if (count_string >= 512)
9430 fputs (to_close, file);
9432 for_string = "\t.byte \"";
9433 for_decimal = "\t.byte ";
9441 fputs (for_decimal, file);
9442 fprintf (file, "%d", c);
9444 for_string = "\n\t.byte \"";
9451 /* Now close the string if we have written one. Then end the line. */
9453 fputs (to_close, file);
9456 /* Generate a unique section name for FILENAME for a section type
9457 represented by SECTION_DESC. Output goes into BUF.
9459 SECTION_DESC can be any string, as long as it is different for each
9460 possible section type.
9462 We name the section in the same manner as xlc. The name begins with an
9463 underscore followed by the filename (after stripping any leading directory
9464 names) with the last period replaced by the string SECTION_DESC. If
9465 FILENAME does not contain a period, SECTION_DESC is appended to the end of
9469 rs6000_gen_section_name (buf, filename, section_desc)
9471 const char *filename;
9472 const char *section_desc;
9474 const char *q, *after_last_slash, *last_period = 0;
9478 after_last_slash = filename;
9479 for (q = filename; *q; q++)
9482 after_last_slash = q + 1;
9487 len = strlen (after_last_slash) + strlen (section_desc) + 2;
9488 *buf = (char *) permalloc (len);
9493 for (q = after_last_slash; *q; q++)
9495 if (q == last_period)
9497 strcpy (p, section_desc);
9498 p += strlen (section_desc);
9501 else if (ISALNUM (*q))
9505 if (last_period == 0)
9506 strcpy (p, section_desc);
9511 /* Emit profile function. */
9514 output_profile_hook (labelno)
9517 if (DEFAULT_ABI == ABI_AIX)
9520 const char *label_name;
9525 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
9526 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
9527 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
9529 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
9532 else if (DEFAULT_ABI == ABI_DARWIN)
9534 const char *mcount_name = RS6000_MCOUNT;
9535 int caller_addr_regno = LINK_REGISTER_REGNUM;
9537 /* Be conservative and always set this, at least for now. */
9538 current_function_uses_pic_offset_table = 1;
9541 /* For PIC code, set up a stub and collect the caller's address
9542 from r0, which is where the prologue puts it. */
9545 mcount_name = machopic_stub_name (mcount_name);
9546 if (current_function_uses_pic_offset_table)
9547 caller_addr_regno = 0;
9550 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
9552 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
9556 /* Write function profiler code. */
9559 output_function_profiler (file, labelno)
9565 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
9566 switch (DEFAULT_ABI)
9572 case ABI_AIX_NODESC:
9573 fprintf (file, "\tmflr %s\n", reg_names[0]);
9576 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
9577 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9578 reg_names[0], reg_names[1]);
9579 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
9580 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
9581 assemble_name (file, buf);
9582 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
9584 else if (flag_pic > 1)
9586 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9587 reg_names[0], reg_names[1]);
9588 /* Now, we need to get the address of the label. */
9589 fputs ("\tbl 1f\n\t.long ", file);
9590 assemble_name (file, buf);
9591 fputs ("-.\n1:", file);
9592 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
9593 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
9594 reg_names[0], reg_names[11]);
9595 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
9596 reg_names[0], reg_names[0], reg_names[11]);
9600 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
9601 assemble_name (file, buf);
9602 fputs ("@ha\n", file);
9603 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
9604 reg_names[0], reg_names[1]);
9605 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
9606 assemble_name (file, buf);
9607 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
9610 if (current_function_needs_context)
9611 asm_fprintf (file, "\tmr %s,%s\n",
9612 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
9613 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
9614 if (current_function_needs_context)
9615 asm_fprintf (file, "\tmr %s,%s\n",
9616 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
9621 /* Don't do anything, done in output_profile_hook (). */
9627 /* Adjust the cost of a scheduling dependency. Return the new cost of
9628 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
9631 rs6000_adjust_cost (insn, link, dep_insn, cost)
9634 rtx dep_insn ATTRIBUTE_UNUSED;
9637 if (! recog_memoized (insn))
9640 if (REG_NOTE_KIND (link) != 0)
9643 if (REG_NOTE_KIND (link) == 0)
9645 /* Data dependency; DEP_INSN writes a register that INSN reads
9646 some cycles later. */
9647 switch (get_attr_type (insn))
9650 /* Tell the first scheduling pass about the latency between
9651 a mtctr and bctr (and mtlr and br/blr). The first
9652 scheduling pass will not know about this latency since
9653 the mtctr instruction, which has the latency associated
9654 to it, will be generated by reload. */
9655 return TARGET_POWER ? 5 : 4;
9657 /* Leave some extra cycles between a compare and its
9658 dependent branch, to inhibit expensive mispredicts. */
9659 if ((rs6000_cpu_attr == CPU_PPC750
9660 || rs6000_cpu_attr == CPU_PPC7400
9661 || rs6000_cpu_attr == CPU_PPC7450)
9662 && recog_memoized (dep_insn)
9663 && (INSN_CODE (dep_insn) >= 0)
9664 && (get_attr_type (dep_insn) == TYPE_COMPARE
9665 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
9666 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
9667 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
9672 /* Fall out to return default cost. */
9678 /* A C statement (sans semicolon) to update the integer scheduling
9679 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
9680 INSN earlier, increase the priority to execute INSN later. Do not
9681 define this macro if you do not need to adjust the scheduling
9682 priorities of insns. */
9685 rs6000_adjust_priority (insn, priority)
9686 rtx insn ATTRIBUTE_UNUSED;
9689 /* On machines (like the 750) which have asymmetric integer units,
9690 where one integer unit can do multiply and divides and the other
9691 can't, reduce the priority of multiply/divide so it is scheduled
9692 before other integer operations. */
9695 if (! INSN_P (insn))
9698 if (GET_CODE (PATTERN (insn)) == USE)
9701 switch (rs6000_cpu_attr) {
9703 switch (get_attr_type (insn))
9710 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
9711 priority, priority);
9712 if (priority >= 0 && priority < 0x01000000)
9722 /* Return how many instructions the machine can issue per cycle. */
9725 rs6000_issue_rate ()
9727 switch (rs6000_cpu_attr) {
9728 case CPU_RIOS1: /* ? */
9730 case CPU_PPC601: /* ? */
9749 /* Length in units of the trampoline for entering a nested function. */
9752 rs6000_trampoline_size ()
9756 switch (DEFAULT_ABI)
9762 ret = (TARGET_32BIT) ? 12 : 24;
9767 case ABI_AIX_NODESC:
9768 ret = (TARGET_32BIT) ? 40 : 48;
9775 /* Emit RTL insns to initialize the variable parts of a trampoline.
9776 FNADDR is an RTX for the address of the function's pure code.
9777 CXT is an RTX for the static chain value for the function. */
9780 rs6000_initialize_trampoline (addr, fnaddr, cxt)
9785 enum machine_mode pmode = Pmode;
9786 int regsize = (TARGET_32BIT) ? 4 : 8;
9787 rtx ctx_reg = force_reg (pmode, cxt);
9789 switch (DEFAULT_ABI)
9794 /* Macros to shorten the code expansions below. */
9795 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
9796 #define MEM_PLUS(addr,offset) \
9797 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
9799 /* Under AIX, just build the 3 word function descriptor */
9802 rtx fn_reg = gen_reg_rtx (pmode);
9803 rtx toc_reg = gen_reg_rtx (pmode);
9804 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
9805 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
9806 emit_move_insn (MEM_DEREF (addr), fn_reg);
9807 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
9808 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
9812 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
9815 case ABI_AIX_NODESC:
9816 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
9819 GEN_INT (rs6000_trampoline_size ()), SImode,
9829 /* Table of valid machine attributes. */
9831 const struct attribute_spec rs6000_attribute_table[] =
9833 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
9834 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
9835 { NULL, 0, 0, false, false, false, NULL }
9838 /* Handle a "longcall" attribute; arguments as in struct
9839 attribute_spec.handler. */
9842 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
9845 tree args ATTRIBUTE_UNUSED;
9846 int flags ATTRIBUTE_UNUSED;
9849 if (TREE_CODE (*node) != FUNCTION_TYPE
9850 && TREE_CODE (*node) != FIELD_DECL
9851 && TREE_CODE (*node) != TYPE_DECL)
9853 warning ("`%s' attribute only applies to functions",
9854 IDENTIFIER_POINTER (name));
9855 *no_add_attrs = true;
9861 /* Return a reference suitable for calling a function with the
9862 longcall attribute. */
9865 rs6000_longcall_ref (call_ref)
9868 const char *call_name;
9871 if (GET_CODE (call_ref) != SYMBOL_REF)
9874 /* System V adds '.' to the internal name, so skip them. */
9875 call_name = XSTR (call_ref, 0);
9876 if (*call_name == '.')
9878 while (*call_name == '.')
9881 node = get_identifier (call_name);
9882 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
9885 return force_reg (Pmode, call_ref);
9889 /* A C statement or statements to switch to the appropriate section
9890 for output of RTX in mode MODE. You can assume that RTX is some
9891 kind of constant in RTL. The argument MODE is redundant except in
9892 the case of a `const_int' rtx. Select the section by calling
9893 `text_section' or one of the alternatives for other sections.
9895 Do not define this macro if you put all constants in the read-only
9898 #ifdef USING_ELFOS_H
9901 rs6000_select_rtx_section (mode, x)
9902 enum machine_mode mode;
9905 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
9911 /* A C statement or statements to switch to the appropriate
9912 section for output of DECL. DECL is either a `VAR_DECL' node
9913 or a constant of some sort. RELOC indicates whether forming
9914 the initial value of DECL requires link-time relocations. */
9917 rs6000_select_section (decl, reloc)
9921 int size = int_size_in_bytes (TREE_TYPE (decl));
9924 static void (* const sec_funcs[4]) PARAMS ((void)) = {
9931 needs_sdata = (size > 0
9932 && size <= g_switch_value
9933 && rs6000_sdata != SDATA_NONE
9934 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
9936 if (TREE_CODE (decl) == STRING_CST)
9937 readonly = ! flag_writable_strings;
9938 else if (TREE_CODE (decl) == VAR_DECL)
9939 readonly = (! (flag_pic && reloc)
9940 && TREE_READONLY (decl)
9941 && ! TREE_SIDE_EFFECTS (decl)
9942 && DECL_INITIAL (decl)
9943 && DECL_INITIAL (decl) != error_mark_node
9944 && TREE_CONSTANT (DECL_INITIAL (decl)));
9945 else if (TREE_CODE (decl) == CONSTRUCTOR)
9946 readonly = (! (flag_pic && reloc)
9947 && ! TREE_SIDE_EFFECTS (decl)
9948 && TREE_CONSTANT (decl));
9951 if (needs_sdata && rs6000_sdata != SDATA_EABI)
9954 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
9957 /* A C statement to build up a unique section name, expressed as a
9958 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
9959 RELOC indicates whether the initial value of EXP requires
9960 link-time relocations. If you do not define this macro, GCC will use
9961 the symbol name prefixed by `.' as the section name. Note - this
9962 macro can now be called for uninitialized data items as well as
9963 initialised data and functions. */
9966 rs6000_unique_section (decl, reloc)
9976 static const char *const prefixes[7][2] =
9978 { ".rodata.", ".gnu.linkonce.r." },
9979 { ".sdata2.", ".gnu.linkonce.s2." },
9980 { ".data.", ".gnu.linkonce.d." },
9981 { ".sdata.", ".gnu.linkonce.s." },
9982 { ".bss.", ".gnu.linkonce.b." },
9983 { ".sbss.", ".gnu.linkonce.sb." },
9984 { ".text.", ".gnu.linkonce.t." }
9987 if (TREE_CODE (decl) == FUNCTION_DECL)
9996 if (TREE_CODE (decl) == STRING_CST)
9997 readonly = ! flag_writable_strings;
9998 else if (TREE_CODE (decl) == VAR_DECL)
9999 readonly = (! (flag_pic && reloc)
10000 && TREE_READONLY (decl)
10001 && ! TREE_SIDE_EFFECTS (decl)
10002 && TREE_CONSTANT (DECL_INITIAL (decl)));
10004 size = int_size_in_bytes (TREE_TYPE (decl));
10005 needs_sdata = (size > 0
10006 && size <= g_switch_value
10007 && rs6000_sdata != SDATA_NONE
10008 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10010 if (DECL_INITIAL (decl) == 0
10011 || DECL_INITIAL (decl) == error_mark_node)
10013 else if (! readonly)
10020 /* .sdata2 is only for EABI. */
10021 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10027 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10028 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10029 len = strlen (name) + strlen (prefix);
10030 string = alloca (len + 1);
10032 sprintf (string, "%s%s", prefix, name);
10034 DECL_SECTION_NAME (decl) = build_string (len, string);
10038 /* If we are referencing a function that is static or is known to be
10039 in this file, make the SYMBOL_REF special. We can use this to indicate
10040 that we can branch to this function without emitting a no-op after the
10041 call. For real AIX calling sequences, we also replace the
10042 function name with the real name (1 or 2 leading .'s), rather than
10043 the function descriptor name. This saves a lot of overriding code
10044 to read the prefixes. */
10047 rs6000_encode_section_info (decl)
10050 if (TREE_CODE (decl) == FUNCTION_DECL)
10052 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10053 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10054 && ! DECL_WEAK (decl))
10055 SYMBOL_REF_FLAG (sym_ref) = 1;
10057 if (DEFAULT_ABI == ABI_AIX)
10059 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10060 size_t len2 = strlen (XSTR (sym_ref, 0));
10061 char *str = alloca (len1 + len2 + 1);
10064 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10066 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10069 else if (rs6000_sdata != SDATA_NONE
10070 && DEFAULT_ABI == ABI_V4
10071 && TREE_CODE (decl) == VAR_DECL)
10073 int size = int_size_in_bytes (TREE_TYPE (decl));
10074 tree section_name = DECL_SECTION_NAME (decl);
10075 const char *name = (char *)0;
10080 if (TREE_CODE (section_name) == STRING_CST)
10082 name = TREE_STRING_POINTER (section_name);
10083 len = TREE_STRING_LENGTH (section_name);
10089 if ((size > 0 && size <= g_switch_value)
10091 && ((len == sizeof (".sdata") - 1
10092 && strcmp (name, ".sdata") == 0)
10093 || (len == sizeof (".sdata2") - 1
10094 && strcmp (name, ".sdata2") == 0)
10095 || (len == sizeof (".sbss") - 1
10096 && strcmp (name, ".sbss") == 0)
10097 || (len == sizeof (".sbss2") - 1
10098 && strcmp (name, ".sbss2") == 0)
10099 || (len == sizeof (".PPC.EMB.sdata0") - 1
10100 && strcmp (name, ".PPC.EMB.sdata0") == 0)
10101 || (len == sizeof (".PPC.EMB.sbss0") - 1
10102 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10104 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10105 size_t len = strlen (XSTR (sym_ref, 0));
10106 char *str = alloca (len + 2);
10109 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
10110 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
10115 #endif /* USING_ELFOS_H */
10118 /* Return a REG that occurs in ADDR with coefficient 1.
10119 ADDR can be effectively incremented by incrementing REG.
10121 r0 is special and we must not select it as an address
10122 register by this routine since our caller will try to
10123 increment the returned register via an "la" instruction. */
10126 find_addr_reg (addr)
10129 while (GET_CODE (addr) == PLUS)
10131 if (GET_CODE (XEXP (addr, 0)) == REG
10132 && REGNO (XEXP (addr, 0)) != 0)
10133 addr = XEXP (addr, 0);
10134 else if (GET_CODE (XEXP (addr, 1)) == REG
10135 && REGNO (XEXP (addr, 1)) != 0)
10136 addr = XEXP (addr, 1);
10137 else if (CONSTANT_P (XEXP (addr, 0)))
10138 addr = XEXP (addr, 1);
10139 else if (CONSTANT_P (XEXP (addr, 1)))
10140 addr = XEXP (addr, 0);
10144 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
10150 rs6000_fatal_bad_address (op)
10153 fatal_insn ("bad address", op);
10156 /* Called to register all of our global variables with the garbage
10160 rs6000_add_gc_roots ()
10162 ggc_add_rtx_root (&rs6000_compare_op0, 1);
10163 ggc_add_rtx_root (&rs6000_compare_op1, 1);
10165 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
10166 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
10167 toc_hash_mark_table);
10170 machopic_add_gc_roots ();
10177 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
10178 reference and a constant. */
10181 symbolic_operand (op)
10184 switch (GET_CODE (op))
10191 return (GET_CODE (op) == SYMBOL_REF ||
10192 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
10193 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
10194 && GET_CODE (XEXP (op, 1)) == CONST_INT);
10201 #ifdef RS6000_LONG_BRANCH
10203 static tree stub_list = 0;
10205 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
10206 procedure calls to the linked list. */
10209 add_compiler_stub (label_name, function_name, line_number)
10211 tree function_name;
10214 tree stub = build_tree_list (function_name, label_name);
10215 TREE_TYPE (stub) = build_int_2 (line_number, 0);
10216 TREE_CHAIN (stub) = stub_list;
10220 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
10221 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
10222 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
10224 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
10225 handling procedure calls from the linked list and initializes the
10229 output_compiler_stub ()
10232 char label_buf[256];
10234 tree tmp_stub, stub;
10237 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10239 fprintf (asm_out_file,
10240 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
10242 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10243 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10244 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
10245 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10247 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
10249 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
10252 label_buf[0] = '_';
10253 strcpy (label_buf+1,
10254 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
10257 strcpy (tmp_buf, "lis r12,hi16(");
10258 strcat (tmp_buf, label_buf);
10259 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
10260 strcat (tmp_buf, label_buf);
10261 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
10262 output_asm_insn (tmp_buf, 0);
10264 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
10265 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
10266 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
10267 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
10273 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
10274 already there or not. */
10277 no_previous_def (function_name)
10278 tree function_name;
10281 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10282 if (function_name == STUB_FUNCTION_NAME (stub))
10287 /* GET_PREV_LABEL gets the label name from the previous definition of
10291 get_prev_label (function_name)
10292 tree function_name;
10295 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
10296 if (function_name == STUB_FUNCTION_NAME (stub))
10297 return STUB_LABEL_NAME (stub);
10301 /* INSN is either a function call or a millicode call. It may have an
10302 unconditional jump in its delay slot.
10304 CALL_DEST is the routine we are calling. */
10307 output_call (insn, call_dest, operand_number)
10310 int operand_number;
10312 static char buf[256];
10313 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
10316 tree funname = get_identifier (XSTR (call_dest, 0));
10318 if (no_previous_def (funname))
10321 rtx label_rtx = gen_label_rtx ();
10322 char *label_buf, temp_buf[256];
10323 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
10324 CODE_LABEL_NUMBER (label_rtx));
10325 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
10326 labelname = get_identifier (label_buf);
10327 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
10329 line_number = NOTE_LINE_NUMBER (insn);
10330 add_compiler_stub (labelname, funname, line_number);
10333 labelname = get_prev_label (funname);
10335 sprintf (buf, "jbsr %%z%d,%.246s",
10336 operand_number, IDENTIFIER_POINTER (labelname));
10341 sprintf (buf, "bl %%z%d", operand_number);
10346 #endif /* RS6000_LONG_BRANCH */
10348 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
10350 const char *const symbol_ = (SYMBOL); \
10351 char *buffer_ = (BUF); \
10352 if (symbol_[0] == '"') \
10354 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
10356 else if (name_needs_quotes(symbol_)) \
10358 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
10362 sprintf(buffer_, "L%d$%s", (N), symbol_); \
10367 /* Generate PIC and indirect symbol stubs. */
10370 machopic_output_stub (file, symb, stub)
10372 const char *symb, *stub;
10374 unsigned int length;
10375 char *symbol_name, *lazy_ptr_name;
10376 char *local_label_0;
10377 static int label = 0;
10379 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
10380 STRIP_NAME_ENCODING (symb, symb);
10384 length = strlen (symb);
10385 symbol_name = alloca (length + 32);
10386 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
10388 lazy_ptr_name = alloca (length + 32);
10389 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
10391 local_label_0 = alloca (length + 32);
10392 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
10395 machopic_picsymbol_stub_section ();
10397 machopic_symbol_stub_section ();
10399 fprintf (file, "%s:\n", stub);
10400 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
10404 fprintf (file, "\tmflr r0\n");
10405 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
10406 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
10407 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
10408 lazy_ptr_name, local_label_0);
10409 fprintf (file, "\tmtlr r0\n");
10410 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
10411 lazy_ptr_name, local_label_0);
10412 fprintf (file, "\tmtctr r12\n");
10413 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
10414 lazy_ptr_name, local_label_0);
10415 fprintf (file, "\tbctr\n");
10418 fprintf (file, "non-pure not supported\n");
10420 machopic_lazy_symbol_ptr_section ();
10421 fprintf (file, "%s:\n", lazy_ptr_name);
10422 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
10423 fprintf (file, "\t.long dyld_stub_binding_helper\n");
10426 /* Legitimize PIC addresses. If the address is already
10427 position-independent, we return ORIG. Newly generated
10428 position-independent addresses go into a reg. This is REG if non
10429 zero, otherwise we allocate register(s) as necessary. */
10431 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
10434 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
10436 enum machine_mode mode;
10441 if (reg == NULL && ! reload_in_progress && ! reload_completed)
10442 reg = gen_reg_rtx (Pmode);
10444 if (GET_CODE (orig) == CONST)
10446 if (GET_CODE (XEXP (orig, 0)) == PLUS
10447 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
10450 if (GET_CODE (XEXP (orig, 0)) == PLUS)
10453 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
10456 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
10462 if (GET_CODE (offset) == CONST_INT)
10464 if (SMALL_INT (offset))
10465 return plus_constant (base, INTVAL (offset));
10466 else if (! reload_in_progress && ! reload_completed)
10467 offset = force_reg (Pmode, offset);
10470 rtx mem = force_const_mem (Pmode, orig);
10471 return machopic_legitimize_pic_address (mem, Pmode, reg);
10474 return gen_rtx (PLUS, Pmode, base, offset);
10477 /* Fall back on generic machopic code. */
10478 return machopic_legitimize_pic_address (orig, mode, reg);
10481 /* This is just a placeholder to make linking work without having to
10482 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
10483 ever needed for Darwin (not too likely!) this would have to get a
10484 real definition. */
10491 #endif /* TARGET_MACHO */
10494 static unsigned int
10495 rs6000_elf_section_type_flags (decl, name, reloc)
10500 unsigned int flags = default_section_type_flags (decl, name, reloc);
10502 if (TARGET_RELOCATABLE)
10503 flags |= SECTION_WRITE;
10508 /* Record an element in the table of global constructors. SYMBOL is
10509 a SYMBOL_REF of the function to be called; PRIORITY is a number
10510 between 0 and MAX_INIT_PRIORITY.
10512 This differs from default_named_section_asm_out_constructor in
10513 that we have special handling for -mrelocatable. */
10516 rs6000_elf_asm_out_constructor (symbol, priority)
10520 const char *section = ".ctors";
10523 if (priority != DEFAULT_INIT_PRIORITY)
10525 sprintf (buf, ".ctors.%.5u",
10526 /* Invert the numbering so the linker puts us in the proper
10527 order; constructors are run from right to left, and the
10528 linker sorts in increasing order. */
10529 MAX_INIT_PRIORITY - priority);
10533 named_section_flags (section, SECTION_WRITE);
10534 assemble_align (POINTER_SIZE);
10536 if (TARGET_RELOCATABLE)
10538 fputs ("\t.long (", asm_out_file);
10539 output_addr_const (asm_out_file, symbol);
10540 fputs (")@fixup\n", asm_out_file);
10543 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
10547 rs6000_elf_asm_out_destructor (symbol, priority)
10551 const char *section = ".dtors";
10554 if (priority != DEFAULT_INIT_PRIORITY)
10556 sprintf (buf, ".dtors.%.5u",
10557 /* Invert the numbering so the linker puts us in the proper
10558 order; constructors are run from right to left, and the
10559 linker sorts in increasing order. */
10560 MAX_INIT_PRIORITY - priority);
10564 named_section_flags (section, SECTION_WRITE);
10565 assemble_align (POINTER_SIZE);
10567 if (TARGET_RELOCATABLE)
10569 fputs ("\t.long (", asm_out_file);
10570 output_addr_const (asm_out_file, symbol);
10571 fputs (")@fixup\n", asm_out_file);
10574 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
10578 #ifdef OBJECT_FORMAT_COFF
10580 xcoff_asm_named_section (name, flags)
10582 unsigned int flags ATTRIBUTE_UNUSED;
10584 fprintf (asm_out_file, "\t.csect %s\n", name);