1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined;
80 /* Save information from a "cmpxx" operation until the branch or scc is
82 rtx rs6000_compare_op0, rs6000_compare_op1;
83 int rs6000_compare_fp_p;
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno;
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name = RS6000_ABI_NAME;
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
96 /* Which small data model to use */
97 const char *rs6000_sdata_name = (char *)0;
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno = 0;
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi;
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string;
110 const char *rs6000_debug_name;
111 int rs6000_debug_stack; /* debug stack applications */
112 int rs6000_debug_arg; /* debug argument handling */
114 /* Flag to say the TOC is initialized */
116 char toc_label_name[10];
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set;
121 static void rs6000_add_gc_roots PARAMS ((void));
122 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code, enum machine_mode));
126 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127 static void rs6000_maybe_dead PARAMS ((rtx));
128 static void rs6000_emit_stack_tie PARAMS ((void));
129 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131 static unsigned rs6000_hash_constant PARAMS ((rtx));
132 static unsigned toc_hash_function PARAMS ((const void *));
133 static int toc_hash_eq PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry PARAMS ((void **, void *));
135 static void toc_hash_mark_table PARAMS ((void *));
136 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137 static void rs6000_free_machine_status PARAMS ((struct function *));
138 static void rs6000_init_machine_status PARAMS ((struct function *));
139 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
167 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
168 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
169 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
170 static void rs6000_parse_abi_options PARAMS ((void));
171 static int first_altivec_reg_to_save PARAMS ((void));
172 static unsigned int compute_vrsave_mask PARAMS ((void));
173 static void is_altivec_return_reg PARAMS ((rtx, void *));
174 int vrsave_operation PARAMS ((rtx, enum machine_mode));
175 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
176 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
178 /* Default register names. */
179 char rs6000_reg_names[][8] =
181 "0", "1", "2", "3", "4", "5", "6", "7",
182 "8", "9", "10", "11", "12", "13", "14", "15",
183 "16", "17", "18", "19", "20", "21", "22", "23",
184 "24", "25", "26", "27", "28", "29", "30", "31",
185 "0", "1", "2", "3", "4", "5", "6", "7",
186 "8", "9", "10", "11", "12", "13", "14", "15",
187 "16", "17", "18", "19", "20", "21", "22", "23",
188 "24", "25", "26", "27", "28", "29", "30", "31",
189 "mq", "lr", "ctr","ap",
190 "0", "1", "2", "3", "4", "5", "6", "7",
192 /* AltiVec registers. */
193 "0", "1", "2", "3", "4", "5", "6", "7",
194 "8", "9", "10", "11", "12", "13", "14", "15",
195 "16", "17", "18", "19", "20", "21", "22", "23",
196 "24", "25", "26", "27", "28", "29", "30", "31",
200 #ifdef TARGET_REGNAMES
201 static const char alt_reg_names[][8] =
203 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
204 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
205 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
206 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
207 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
208 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
209 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
210 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
211 "mq", "lr", "ctr", "ap",
212 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
214 /* AltiVec registers. */
215 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
216 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
217 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
218 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
223 #ifndef MASK_STRICT_ALIGN
224 #define MASK_STRICT_ALIGN 0
227 /* Initialize the GCC target structure. */
228 #undef TARGET_ATTRIBUTE_TABLE
229 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
231 #undef TARGET_ASM_ALIGNED_DI_OP
232 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
234 /* Default unaligned ops are only provided for ELF. Find the ops needed
235 for non-ELF systems. */
236 #ifndef OBJECT_FORMAT_ELF
237 #ifdef OBJECT_FORMAT_COFF
238 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
240 #undef TARGET_ASM_UNALIGNED_HI_OP
241 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
242 #undef TARGET_ASM_UNALIGNED_SI_OP
243 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
244 #undef TARGET_ASM_UNALIGNED_DI_OP
245 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
248 #undef TARGET_ASM_UNALIGNED_HI_OP
249 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
250 #undef TARGET_ASM_UNALIGNED_SI_OP
251 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
255 /* This hook deals with fixups for relocatable code and DI-mode objects
257 #undef TARGET_ASM_INTEGER
258 #define TARGET_ASM_INTEGER rs6000_assemble_integer
260 #undef TARGET_ASM_FUNCTION_PROLOGUE
261 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
262 #undef TARGET_ASM_FUNCTION_EPILOGUE
263 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
266 #undef TARGET_SECTION_TYPE_FLAGS
267 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
270 #undef TARGET_SCHED_ISSUE_RATE
271 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
272 #undef TARGET_SCHED_ADJUST_COST
273 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
274 #undef TARGET_SCHED_ADJUST_PRIORITY
275 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
277 #undef TARGET_INIT_BUILTINS
278 #define TARGET_INIT_BUILTINS rs6000_init_builtins
280 #undef TARGET_EXPAND_BUILTIN
281 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
283 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
284 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
286 struct gcc_target targetm = TARGET_INITIALIZER;
288 /* Override command line options. Mostly we process the processor
289 type and sometimes adjust other TARGET_ options. */
292 rs6000_override_options (default_cpu)
293 const char *default_cpu;
296 struct rs6000_cpu_select *ptr;
298 /* Simplify the entries below by making a mask for any POWER
299 variant and any PowerPC variant. */
301 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
302 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
303 | MASK_PPC_GFXOPT | MASK_POWERPC64)
304 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
308 const char *const name; /* Canonical processor name. */
309 const enum processor_type processor; /* Processor type enum value. */
310 const int target_enable; /* Target flags to enable. */
311 const int target_disable; /* Target flags to disable. */
312 } const processor_target_table[]
313 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
314 POWER_MASKS | POWERPC_MASKS},
315 {"power", PROCESSOR_POWER,
316 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
317 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
318 {"power2", PROCESSOR_POWER,
319 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
320 POWERPC_MASKS | MASK_NEW_MNEMONICS},
321 {"power3", PROCESSOR_PPC630,
322 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
323 POWER_MASKS | MASK_PPC_GPOPT},
324 {"powerpc", PROCESSOR_POWERPC,
325 MASK_POWERPC | MASK_NEW_MNEMONICS,
326 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
327 {"powerpc64", PROCESSOR_POWERPC64,
328 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
329 POWER_MASKS | POWERPC_OPT_MASKS},
330 {"rios", PROCESSOR_RIOS1,
331 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
332 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
333 {"rios1", PROCESSOR_RIOS1,
334 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
335 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
336 {"rsc", PROCESSOR_PPC601,
337 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
338 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
339 {"rsc1", PROCESSOR_PPC601,
340 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
341 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
342 {"rios2", PROCESSOR_RIOS2,
343 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
344 POWERPC_MASKS | MASK_NEW_MNEMONICS},
345 {"rs64a", PROCESSOR_RS64A,
346 MASK_POWERPC | MASK_NEW_MNEMONICS,
347 POWER_MASKS | POWERPC_OPT_MASKS},
348 {"401", PROCESSOR_PPC403,
349 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
350 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
351 {"403", PROCESSOR_PPC403,
352 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
353 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
354 {"405", PROCESSOR_PPC405,
355 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
356 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
357 {"505", PROCESSOR_MPCCORE,
358 MASK_POWERPC | MASK_NEW_MNEMONICS,
359 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
360 {"601", PROCESSOR_PPC601,
361 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
362 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
363 {"602", PROCESSOR_PPC603,
364 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
365 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
366 {"603", PROCESSOR_PPC603,
367 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
368 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
369 {"603e", PROCESSOR_PPC603,
370 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
371 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
372 {"ec603e", PROCESSOR_PPC603,
373 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
374 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
375 {"604", PROCESSOR_PPC604,
376 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
377 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
378 {"604e", PROCESSOR_PPC604e,
379 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
380 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
381 {"620", PROCESSOR_PPC620,
382 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
383 POWER_MASKS | MASK_PPC_GPOPT},
384 {"630", PROCESSOR_PPC630,
385 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
386 POWER_MASKS | MASK_PPC_GPOPT},
387 {"740", PROCESSOR_PPC750,
388 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
389 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
390 {"750", PROCESSOR_PPC750,
391 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
392 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
393 {"7400", PROCESSOR_PPC7400,
394 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
395 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
396 {"7450", PROCESSOR_PPC7450,
397 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
398 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
399 {"801", PROCESSOR_MPCCORE,
400 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
401 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
402 {"821", PROCESSOR_MPCCORE,
403 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
404 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
405 {"823", PROCESSOR_MPCCORE,
406 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
407 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
408 {"860", PROCESSOR_MPCCORE,
409 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
410 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
412 size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
414 /* Save current -mmultiple/-mno-multiple status. */
415 int multiple = TARGET_MULTIPLE;
416 /* Save current -mstring/-mno-string status. */
417 int string = TARGET_STRING;
419 /* Identify the processor type. */
420 rs6000_select[0].string = default_cpu;
421 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
423 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
425 ptr = &rs6000_select[i];
426 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
428 for (j = 0; j < ptt_size; j++)
429 if (! strcmp (ptr->string, processor_target_table[j].name))
432 rs6000_cpu = processor_target_table[j].processor;
436 target_flags |= processor_target_table[j].target_enable;
437 target_flags &= ~processor_target_table[j].target_disable;
443 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
447 /* If we are optimizing big endian systems for space, use the store
448 multiple instructions. */
449 if (BYTES_BIG_ENDIAN && optimize_size)
450 target_flags |= MASK_MULTIPLE;
452 /* If -mmultiple or -mno-multiple was explicitly used, don't
453 override with the processor default */
454 if (TARGET_MULTIPLE_SET)
455 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
457 /* If -mstring or -mno-string was explicitly used, don't override
458 with the processor default. */
459 if (TARGET_STRING_SET)
460 target_flags = (target_flags & ~MASK_STRING) | string;
462 /* Don't allow -mmultiple or -mstring on little endian systems
463 unless the cpu is a 750, because the hardware doesn't support the
464 instructions used in little endian mode, and causes an alignment
465 trap. The 750 does not cause an alignment trap (except when the
466 target is unaligned). */
468 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
472 target_flags &= ~MASK_MULTIPLE;
473 if (TARGET_MULTIPLE_SET)
474 warning ("-mmultiple is not supported on little endian systems");
479 target_flags &= ~MASK_STRING;
480 if (TARGET_STRING_SET)
481 warning ("-mstring is not supported on little endian systems");
485 if (flag_pic && DEFAULT_ABI == ABI_AIX)
487 warning ("-f%s ignored (all code is position independent)",
488 (flag_pic > 1) ? "PIC" : "pic");
492 #ifdef XCOFF_DEBUGGING_INFO
493 if (flag_function_sections && (write_symbols != NO_DEBUG)
494 && DEFAULT_ABI == ABI_AIX)
496 warning ("-ffunction-sections disabled on AIX when debugging");
497 flag_function_sections = 0;
500 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
502 warning ("-fdata-sections not supported on AIX");
503 flag_data_sections = 0;
507 /* Set debug flags */
508 if (rs6000_debug_name)
510 if (! strcmp (rs6000_debug_name, "all"))
511 rs6000_debug_stack = rs6000_debug_arg = 1;
512 else if (! strcmp (rs6000_debug_name, "stack"))
513 rs6000_debug_stack = 1;
514 else if (! strcmp (rs6000_debug_name, "arg"))
515 rs6000_debug_arg = 1;
517 error ("unknown -mdebug-%s switch", rs6000_debug_name);
520 /* Set size of long double */
521 rs6000_long_double_type_size = 64;
522 if (rs6000_long_double_size_string)
525 int size = strtol (rs6000_long_double_size_string, &tail, 10);
526 if (*tail != '\0' || (size != 64 && size != 128))
527 error ("Unknown switch -mlong-double-%s",
528 rs6000_long_double_size_string);
530 rs6000_long_double_type_size = size;
533 /* Handle -mabi= options. */
534 rs6000_parse_abi_options ();
536 #ifdef TARGET_REGNAMES
537 /* If the user desires alternate register names, copy in the
538 alternate names now. */
540 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
543 #ifdef SUBTARGET_OVERRIDE_OPTIONS
544 SUBTARGET_OVERRIDE_OPTIONS;
546 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
547 SUBSUBTARGET_OVERRIDE_OPTIONS;
550 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
551 If -maix-struct-return or -msvr4-struct-return was explicitly
552 used, don't override with the ABI default. */
553 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
555 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
556 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
558 target_flags |= MASK_AIX_STRUCT_RET;
561 /* Register global variables with the garbage collector. */
562 rs6000_add_gc_roots ();
564 /* Allocate an alias set for register saves & restores from stack. */
565 rs6000_sr_alias_set = new_alias_set ();
568 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
570 /* We can only guarantee the availability of DI pseudo-ops when
571 assembling for 64-bit targets. */
574 targetm.asm_out.aligned_op.di = NULL;
575 targetm.asm_out.unaligned_op.di = NULL;
578 /* Arrange to save and restore machine status around nested functions. */
579 init_machine_status = rs6000_init_machine_status;
580 free_machine_status = rs6000_free_machine_status;
583 /* Handle -mabi= options. */
585 rs6000_parse_abi_options ()
587 if (rs6000_abi_string == 0)
589 else if (! strcmp (rs6000_abi_string, "altivec"))
590 rs6000_altivec_abi = 1;
592 error ("unknown ABI specified: '%s'", rs6000_abi_string);
596 optimization_options (level, size)
597 int level ATTRIBUTE_UNUSED;
598 int size ATTRIBUTE_UNUSED;
602 /* Do anything needed at the start of the asm file. */
605 rs6000_file_start (file, default_cpu)
607 const char *default_cpu;
611 const char *start = buffer;
612 struct rs6000_cpu_select *ptr;
614 if (flag_verbose_asm)
616 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
617 rs6000_select[0].string = default_cpu;
619 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
621 ptr = &rs6000_select[i];
622 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
624 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
630 switch (rs6000_sdata)
632 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
633 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
634 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
635 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
638 if (rs6000_sdata && g_switch_value)
640 fprintf (file, "%s -G %d", start, g_switch_value);
651 /* Create a CONST_DOUBLE from a string. */
654 rs6000_float_const (string, mode)
656 enum machine_mode mode;
658 REAL_VALUE_TYPE value;
659 value = REAL_VALUE_ATOF (string, mode);
660 return immed_real_const_1 (value, mode);
663 /* Return non-zero if this function is known to have a null epilogue. */
668 if (reload_completed)
670 rs6000_stack_t *info = rs6000_stack_info ();
672 if (info->first_gp_reg_save == 32
673 && info->first_fp_reg_save == 64
674 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
677 && info->vrsave_mask == 0
685 /* Returns 1 always. */
688 any_operand (op, mode)
689 rtx op ATTRIBUTE_UNUSED;
690 enum machine_mode mode ATTRIBUTE_UNUSED;
695 /* Returns 1 if op is the count register. */
697 count_register_operand (op, mode)
699 enum machine_mode mode ATTRIBUTE_UNUSED;
701 if (GET_CODE (op) != REG)
704 if (REGNO (op) == COUNT_REGISTER_REGNUM)
707 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
714 xer_operand (op, mode)
716 enum machine_mode mode ATTRIBUTE_UNUSED;
718 if (GET_CODE (op) != REG)
721 if (XER_REGNO_P (REGNO (op)))
727 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
728 by such constants completes more quickly. */
731 s8bit_cint_operand (op, mode)
733 enum machine_mode mode ATTRIBUTE_UNUSED;
735 return ( GET_CODE (op) == CONST_INT
736 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
739 /* Return 1 if OP is a constant that can fit in a D field. */
742 short_cint_operand (op, mode)
744 enum machine_mode mode ATTRIBUTE_UNUSED;
746 return (GET_CODE (op) == CONST_INT
747 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
750 /* Similar for an unsigned D field. */
753 u_short_cint_operand (op, mode)
755 enum machine_mode mode ATTRIBUTE_UNUSED;
757 return (GET_CODE (op) == CONST_INT
758 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'));
761 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
764 non_short_cint_operand (op, mode)
766 enum machine_mode mode ATTRIBUTE_UNUSED;
768 return (GET_CODE (op) == CONST_INT
769 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
772 /* Returns 1 if OP is a CONST_INT that is a positive value
773 and an exact power of 2. */
776 exact_log2_cint_operand (op, mode)
778 enum machine_mode mode ATTRIBUTE_UNUSED;
780 return (GET_CODE (op) == CONST_INT
782 && exact_log2 (INTVAL (op)) >= 0);
785 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
789 gpc_reg_operand (op, mode)
791 enum machine_mode mode;
793 return (register_operand (op, mode)
794 && (GET_CODE (op) != REG
795 || (REGNO (op) >= ARG_POINTER_REGNUM
796 && !XER_REGNO_P (REGNO (op)))
797 || REGNO (op) < MQ_REGNO));
800 /* Returns 1 if OP is either a pseudo-register or a register denoting a
804 cc_reg_operand (op, mode)
806 enum machine_mode mode;
808 return (register_operand (op, mode)
809 && (GET_CODE (op) != REG
810 || REGNO (op) >= FIRST_PSEUDO_REGISTER
811 || CR_REGNO_P (REGNO (op))));
814 /* Returns 1 if OP is either a pseudo-register or a register denoting a
815 CR field that isn't CR0. */
818 cc_reg_not_cr0_operand (op, mode)
820 enum machine_mode mode;
822 return (register_operand (op, mode)
823 && (GET_CODE (op) != REG
824 || REGNO (op) >= FIRST_PSEUDO_REGISTER
825 || CR_REGNO_NOT_CR0_P (REGNO (op))));
828 /* Returns 1 if OP is either a constant integer valid for a D-field or
829 a non-special register. If a register, it must be in the proper
830 mode unless MODE is VOIDmode. */
833 reg_or_short_operand (op, mode)
835 enum machine_mode mode;
837 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
840 /* Similar, except check if the negation of the constant would be
841 valid for a D-field. */
844 reg_or_neg_short_operand (op, mode)
846 enum machine_mode mode;
848 if (GET_CODE (op) == CONST_INT)
849 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
851 return gpc_reg_operand (op, mode);
854 /* Returns 1 if OP is either a constant integer valid for a DS-field or
855 a non-special register. If a register, it must be in the proper
856 mode unless MODE is VOIDmode. */
859 reg_or_aligned_short_operand (op, mode)
861 enum machine_mode mode;
863 if (gpc_reg_operand (op, mode))
865 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
872 /* Return 1 if the operand is either a register or an integer whose
873 high-order 16 bits are zero. */
876 reg_or_u_short_operand (op, mode)
878 enum machine_mode mode;
880 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
883 /* Return 1 is the operand is either a non-special register or ANY
887 reg_or_cint_operand (op, mode)
889 enum machine_mode mode;
891 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
894 /* Return 1 is the operand is either a non-special register or ANY
895 32-bit signed constant integer. */
898 reg_or_arith_cint_operand (op, mode)
900 enum machine_mode mode;
902 return (gpc_reg_operand (op, mode)
903 || (GET_CODE (op) == CONST_INT
904 #if HOST_BITS_PER_WIDE_INT != 32
905 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
906 < (unsigned HOST_WIDE_INT) 0x100000000ll)
911 /* Return 1 is the operand is either a non-special register or a 32-bit
912 signed constant integer valid for 64-bit addition. */
915 reg_or_add_cint64_operand (op, mode)
917 enum machine_mode mode;
919 return (gpc_reg_operand (op, mode)
920 || (GET_CODE (op) == CONST_INT
921 && INTVAL (op) < 0x7fff8000
922 #if HOST_BITS_PER_WIDE_INT != 32
923 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
929 /* Return 1 is the operand is either a non-special register or a 32-bit
930 signed constant integer valid for 64-bit subtraction. */
933 reg_or_sub_cint64_operand (op, mode)
935 enum machine_mode mode;
937 return (gpc_reg_operand (op, mode)
938 || (GET_CODE (op) == CONST_INT
939 && (- INTVAL (op)) < 0x7fff8000
940 #if HOST_BITS_PER_WIDE_INT != 32
941 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
947 /* Return 1 is the operand is either a non-special register or ANY
948 32-bit unsigned constant integer. */
951 reg_or_logical_cint_operand (op, mode)
953 enum machine_mode mode;
955 if (GET_CODE (op) == CONST_INT)
957 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
959 if (GET_MODE_BITSIZE (mode) <= 32)
966 return ((INTVAL (op) & GET_MODE_MASK (mode)
967 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
969 else if (GET_CODE (op) == CONST_DOUBLE)
971 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
975 return CONST_DOUBLE_HIGH (op) == 0;
978 return gpc_reg_operand (op, mode);
981 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
984 got_operand (op, mode)
986 enum machine_mode mode ATTRIBUTE_UNUSED;
988 return (GET_CODE (op) == SYMBOL_REF
989 || GET_CODE (op) == CONST
990 || GET_CODE (op) == LABEL_REF);
993 /* Return 1 if the operand is a simple references that can be loaded via
994 the GOT (labels involving addition aren't allowed). */
997 got_no_const_operand (op, mode)
999 enum machine_mode mode ATTRIBUTE_UNUSED;
1001 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1004 /* Return the number of instructions it takes to form a constant in an
1005 integer register. */
1008 num_insns_constant_wide (value)
1009 HOST_WIDE_INT value;
1011 /* signed constant loadable with {cal|addi} */
1012 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1015 /* constant loadable with {cau|addis} */
1016 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1019 #if HOST_BITS_PER_WIDE_INT == 64
1020 else if (TARGET_POWERPC64)
1022 HOST_WIDE_INT low = value & 0xffffffff;
1023 HOST_WIDE_INT high = value >> 32;
1025 low = (low ^ 0x80000000) - 0x80000000; /* sign extend */
1027 if (high == 0 && (low & 0x80000000) == 0)
1030 else if (high == -1 && (low & 0x80000000) != 0)
1034 return num_insns_constant_wide (high) + 1;
1037 return (num_insns_constant_wide (high)
1038 + num_insns_constant_wide (low) + 1);
1047 num_insns_constant (op, mode)
1049 enum machine_mode mode;
1051 if (GET_CODE (op) == CONST_INT)
1053 #if HOST_BITS_PER_WIDE_INT == 64
1054 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1055 && mask64_operand (op, mode))
1059 return num_insns_constant_wide (INTVAL (op));
1062 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1067 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1068 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1069 return num_insns_constant_wide ((HOST_WIDE_INT)l);
1072 else if (GET_CODE (op) == CONST_DOUBLE)
1078 int endian = (WORDS_BIG_ENDIAN == 0);
1080 if (mode == VOIDmode || mode == DImode)
1082 high = CONST_DOUBLE_HIGH (op);
1083 low = CONST_DOUBLE_LOW (op);
1087 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1088 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1090 low = l[1 - endian];
1094 return (num_insns_constant_wide (low)
1095 + num_insns_constant_wide (high));
1099 if (high == 0 && (low & 0x80000000) == 0)
1100 return num_insns_constant_wide (low);
1102 else if (high == -1 && (low & 0x80000000) != 0)
1103 return num_insns_constant_wide (low);
1105 else if (mask64_operand (op, mode))
1109 return num_insns_constant_wide (high) + 1;
1112 return (num_insns_constant_wide (high)
1113 + num_insns_constant_wide (low) + 1);
1121 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1122 register with one instruction per word. We only do this if we can
1123 safely read CONST_DOUBLE_{LOW,HIGH}. */
1126 easy_fp_constant (op, mode)
1128 enum machine_mode mode;
1130 if (GET_CODE (op) != CONST_DOUBLE
1131 || GET_MODE (op) != mode
1132 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1135 /* Consider all constants with -msoft-float to be easy. */
1136 if (TARGET_SOFT_FLOAT && mode != DImode)
1139 /* If we are using V.4 style PIC, consider all constants to be hard. */
1140 if (flag_pic && DEFAULT_ABI == ABI_V4)
1143 #ifdef TARGET_RELOCATABLE
1144 /* Similarly if we are using -mrelocatable, consider all constants
1146 if (TARGET_RELOCATABLE)
1155 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1156 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1158 return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1159 && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1162 else if (mode == SFmode)
1167 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1168 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1170 return num_insns_constant_wide (l) == 1;
1173 else if (mode == DImode)
1174 return ((TARGET_POWERPC64
1175 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1176 || (num_insns_constant (op, DImode) <= 2));
1178 else if (mode == SImode)
1184 /* Return 1 if the operand is 0.0. */
1186 zero_fp_constant (op, mode)
1188 enum machine_mode mode;
1190 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1193 /* Return 1 if the operand is in volatile memory. Note that during
1194 the RTL generation phase, memory_operand does not return TRUE for
1195 volatile memory references. So this function allows us to
1196 recognize volatile references where its safe. */
1199 volatile_mem_operand (op, mode)
1201 enum machine_mode mode;
1203 if (GET_CODE (op) != MEM)
1206 if (!MEM_VOLATILE_P (op))
1209 if (mode != GET_MODE (op))
1212 if (reload_completed)
1213 return memory_operand (op, mode);
1215 if (reload_in_progress)
1216 return strict_memory_address_p (mode, XEXP (op, 0));
1218 return memory_address_p (mode, XEXP (op, 0));
1221 /* Return 1 if the operand is an offsettable memory operand. */
1224 offsettable_mem_operand (op, mode)
1226 enum machine_mode mode;
1228 return ((GET_CODE (op) == MEM)
1229 && offsettable_address_p (reload_completed || reload_in_progress,
1230 mode, XEXP (op, 0)));
1233 /* Return 1 if the operand is either an easy FP constant (see above) or
1237 mem_or_easy_const_operand (op, mode)
1239 enum machine_mode mode;
1241 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1244 /* Return 1 if the operand is either a non-special register or an item
1245 that can be used as the operand of a `mode' add insn. */
1248 add_operand (op, mode)
1250 enum machine_mode mode;
1252 if (GET_CODE (op) == CONST_INT)
1253 return (CONST_OK_FOR_LETTER_P (INTVAL(op), 'I')
1254 || CONST_OK_FOR_LETTER_P (INTVAL(op), 'L'));
1256 return gpc_reg_operand (op, mode);
1259 /* Return 1 if OP is a constant but not a valid add_operand. */
1262 non_add_cint_operand (op, mode)
1264 enum machine_mode mode ATTRIBUTE_UNUSED;
1266 return (GET_CODE (op) == CONST_INT
1267 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000
1268 && ! CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1271 /* Return 1 if the operand is a non-special register or a constant that
1272 can be used as the operand of an OR or XOR insn on the RS/6000. */
1275 logical_operand (op, mode)
1277 enum machine_mode mode;
1279 HOST_WIDE_INT opl, oph;
1281 if (gpc_reg_operand (op, mode))
1284 if (GET_CODE (op) == CONST_INT)
1286 opl = INTVAL (op) & GET_MODE_MASK (mode);
1288 #if HOST_BITS_PER_WIDE_INT <= 32
1289 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1293 else if (GET_CODE (op) == CONST_DOUBLE)
1295 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1298 opl = CONST_DOUBLE_LOW (op);
1299 oph = CONST_DOUBLE_HIGH (op);
1306 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1307 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1310 /* Return 1 if C is a constant that is not a logical operand (as
1311 above), but could be split into one. */
1314 non_logical_cint_operand (op, mode)
1316 enum machine_mode mode;
1318 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1319 && ! logical_operand (op, mode)
1320 && reg_or_logical_cint_operand (op, mode));
1323 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1324 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1325 Reject all ones and all zeros, since these should have been optimized
1326 away and confuse the making of MB and ME. */
1329 mask_operand (op, mode)
1331 enum machine_mode mode ATTRIBUTE_UNUSED;
1333 HOST_WIDE_INT c, lsb;
1335 if (GET_CODE (op) != CONST_INT)
1340 /* We don't change the number of transitions by inverting,
1341 so make sure we start with the LS bit zero. */
1345 /* Reject all zeros or all ones. */
1349 /* Find the first transition. */
1352 /* Invert to look for a second transition. */
1355 /* Erase first transition. */
1358 /* Find the second transition (if any). */
1361 /* Match if all the bits above are 1's (or c is zero). */
1365 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1366 It is if there are no more than one 1->0 or 0->1 transitions.
1367 Reject all ones and all zeros, since these should have been optimized
1368 away and confuse the making of MB and ME. */
1371 mask64_operand (op, mode)
1373 enum machine_mode mode;
1375 if (GET_CODE (op) == CONST_INT)
1377 HOST_WIDE_INT c, lsb;
1379 /* We don't change the number of transitions by inverting,
1380 so make sure we start with the LS bit zero. */
1385 /* Reject all zeros or all ones. */
1389 /* Find the transition, and check that all bits above are 1's. */
1393 else if (GET_CODE (op) == CONST_DOUBLE
1394 && (mode == VOIDmode || mode == DImode))
1396 HOST_WIDE_INT low, high, lsb;
1398 if (HOST_BITS_PER_WIDE_INT < 64)
1399 high = CONST_DOUBLE_HIGH (op);
1401 low = CONST_DOUBLE_LOW (op);
1404 if (HOST_BITS_PER_WIDE_INT < 64)
1411 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1415 return high == -lsb;
1419 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1425 /* Return 1 if the operand is either a non-special register or a constant
1426 that can be used as the operand of a PowerPC64 logical AND insn. */
1429 and64_operand (op, mode)
1431 enum machine_mode mode;
1433 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1434 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1436 return (logical_operand (op, mode) || mask64_operand (op, mode));
1439 /* Return 1 if the operand is either a non-special register or a
1440 constant that can be used as the operand of an RS/6000 logical AND insn. */
1443 and_operand (op, mode)
1445 enum machine_mode mode;
1447 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1448 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1450 return (logical_operand (op, mode) || mask_operand (op, mode));
1453 /* Return 1 if the operand is a general register or memory operand. */
1456 reg_or_mem_operand (op, mode)
1458 enum machine_mode mode;
1460 return (gpc_reg_operand (op, mode)
1461 || memory_operand (op, mode)
1462 || volatile_mem_operand (op, mode));
1465 /* Return 1 if the operand is a general register or memory operand without
1466 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1470 lwa_operand (op, mode)
1472 enum machine_mode mode;
1476 if (reload_completed && GET_CODE (inner) == SUBREG)
1477 inner = SUBREG_REG (inner);
1479 return gpc_reg_operand (inner, mode)
1480 || (memory_operand (inner, mode)
1481 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1482 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1483 && (GET_CODE (XEXP (inner, 0)) != PLUS
1484 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1485 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1488 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1489 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1493 call_operand (op, mode)
1495 enum machine_mode mode;
1497 if (mode != VOIDmode && GET_MODE (op) != mode)
1500 return (GET_CODE (op) == SYMBOL_REF
1501 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1504 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1505 this file and the function is not weakly defined. */
1508 current_file_function_operand (op, mode)
1510 enum machine_mode mode ATTRIBUTE_UNUSED;
1512 return (GET_CODE (op) == SYMBOL_REF
1513 && (SYMBOL_REF_FLAG (op)
1514 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1515 && ! DECL_WEAK (current_function_decl))));
1518 /* Return 1 if this operand is a valid input for a move insn. */
1521 input_operand (op, mode)
1523 enum machine_mode mode;
1525 /* Memory is always valid. */
1526 if (memory_operand (op, mode))
1529 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1530 if (GET_CODE (op) == CONSTANT_P_RTX)
1533 /* For floating-point, easy constants are valid. */
1534 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1536 && easy_fp_constant (op, mode))
1539 /* Allow any integer constant. */
1540 if (GET_MODE_CLASS (mode) == MODE_INT
1541 && (GET_CODE (op) == CONST_INT
1542 || GET_CODE (op) == CONST_DOUBLE))
1545 /* For floating-point or multi-word mode, the only remaining valid type
1547 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1548 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1549 return register_operand (op, mode);
1551 /* The only cases left are integral modes one word or smaller (we
1552 do not get called for MODE_CC values). These can be in any
1554 if (register_operand (op, mode))
1557 /* A SYMBOL_REF referring to the TOC is valid. */
1558 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1561 /* A constant pool expression (relative to the TOC) is valid */
1562 if (TOC_RELATIVE_EXPR_P (op))
1565 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1567 if (DEFAULT_ABI == ABI_V4
1568 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1569 && small_data_operand (op, Pmode))
1575 /* Return 1 for an operand in small memory on V.4/eabi. */
1578 small_data_operand (op, mode)
1579 rtx op ATTRIBUTE_UNUSED;
1580 enum machine_mode mode ATTRIBUTE_UNUSED;
1585 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1588 if (DEFAULT_ABI != ABI_V4)
1591 if (GET_CODE (op) == SYMBOL_REF)
1594 else if (GET_CODE (op) != CONST
1595 || GET_CODE (XEXP (op, 0)) != PLUS
1596 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1597 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1602 rtx sum = XEXP (op, 0);
1603 HOST_WIDE_INT summand;
1605 /* We have to be careful here, because it is the referenced address
1606 that must be 32k from _SDA_BASE_, not just the symbol. */
1607 summand = INTVAL (XEXP (sum, 1));
1608 if (summand < 0 || summand > g_switch_value)
1611 sym_ref = XEXP (sum, 0);
1614 if (*XSTR (sym_ref, 0) != '@')
1625 constant_pool_expr_1 (op, have_sym, have_toc)
1630 switch (GET_CODE(op))
1633 if (CONSTANT_POOL_ADDRESS_P (op))
1635 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1643 else if (! strcmp (XSTR (op, 0), toc_label_name))
1652 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc) &&
1653 constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc);
1655 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1664 constant_pool_expr_p (op)
1669 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1673 toc_relative_expr_p (op)
1678 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1681 /* Try machine-dependent ways of modifying an illegitimate address
1682 to be legitimate. If we find one, return the new, valid address.
1683 This is used from only one place: `memory_address' in explow.c.
1685 OLDX is the address as it was before break_out_memory_refs was
1686 called. In some cases it is useful to look at this to decide what
1689 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1691 It is always safe for this function to do nothing. It exists to
1692 recognize opportunities to optimize the output.
1694 On RS/6000, first check for the sum of a register with a constant
1695 integer that is out of range. If so, generate code to add the
1696 constant with the low-order 16 bits masked to the register and force
1697 this result into another register (this can be done with `cau').
1698 Then generate an address of REG+(CONST&0xffff), allowing for the
1699 possibility of bit 16 being a one.
1701 Then check for the sum of a register and something not constant, try to
1702 load the other things into a register and return the sum. */
1704 rs6000_legitimize_address (x, oldx, mode)
1706 rtx oldx ATTRIBUTE_UNUSED;
1707 enum machine_mode mode;
1709 if (GET_CODE (x) == PLUS
1710 && GET_CODE (XEXP (x, 0)) == REG
1711 && GET_CODE (XEXP (x, 1)) == CONST_INT
1712 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1714 HOST_WIDE_INT high_int, low_int;
1716 high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1717 low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1718 if (low_int & 0x8000)
1719 high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1720 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1721 GEN_INT (high_int)), 0);
1722 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1724 else if (GET_CODE (x) == PLUS
1725 && GET_CODE (XEXP (x, 0)) == REG
1726 && GET_CODE (XEXP (x, 1)) != CONST_INT
1727 && GET_MODE_NUNITS (mode) == 1
1728 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1729 && (TARGET_POWERPC64 || mode != DImode)
1732 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1733 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1735 else if (ALTIVEC_VECTOR_MODE (mode))
1739 /* Make sure both operands are registers. */
1740 if (GET_CODE (x) == PLUS)
1741 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1742 force_reg (Pmode, XEXP (x, 1)));
1744 reg = force_reg (Pmode, x);
1747 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1748 && GET_CODE (x) != CONST_INT
1749 && GET_CODE (x) != CONST_DOUBLE
1751 && GET_MODE_NUNITS (mode) == 1
1752 && (GET_MODE_BITSIZE (mode) <= 32
1753 || (TARGET_HARD_FLOAT && mode == DFmode)))
1755 rtx reg = gen_reg_rtx (Pmode);
1756 emit_insn (gen_elf_high (reg, (x)));
1757 return gen_rtx_LO_SUM (Pmode, reg, (x));
1759 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1761 && GET_CODE (x) != CONST_INT
1762 && GET_CODE (x) != CONST_DOUBLE
1764 && (TARGET_HARD_FLOAT || mode != DFmode)
1768 rtx reg = gen_reg_rtx (Pmode);
1769 emit_insn (gen_macho_high (reg, (x)));
1770 return gen_rtx_LO_SUM (Pmode, reg, (x));
1773 && CONSTANT_POOL_EXPR_P (x)
1774 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1776 return create_TOC_reference (x);
1782 /* The convention appears to be to define this wherever it is used.
1783 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1784 is now used here. */
1785 #ifndef REG_MODE_OK_FOR_BASE_P
1786 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1789 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1790 replace the input X, or the original X if no replacement is called for.
1791 The output parameter *WIN is 1 if the calling macro should goto WIN,
1794 For RS/6000, we wish to handle large displacements off a base
1795 register by splitting the addend across an addiu/addis and the mem insn.
1796 This cuts number of extra insns needed from 3 to 1.
1798 On Darwin, we use this to generate code for floating point constants.
1799 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1800 The Darwin code is inside #if TARGET_MACHO because only then is
1801 machopic_function_base_name() defined. */
1803 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1805 enum machine_mode mode;
1808 int ind_levels ATTRIBUTE_UNUSED;
1811 /* We must recognize output that we have already generated ourselves. */
1812 if (GET_CODE (x) == PLUS
1813 && GET_CODE (XEXP (x, 0)) == PLUS
1814 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1815 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1816 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1818 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1819 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1820 opnum, (enum reload_type)type);
1826 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1827 && GET_CODE (x) == LO_SUM
1828 && GET_CODE (XEXP (x, 0)) == PLUS
1829 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1830 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1831 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1832 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1833 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1834 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1835 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1837 /* Result of previous invocation of this function on Darwin
1838 floating point constant. */
1839 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1840 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1841 opnum, (enum reload_type)type);
1846 if (GET_CODE (x) == PLUS
1847 && GET_CODE (XEXP (x, 0)) == REG
1848 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1849 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1850 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1852 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1853 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1855 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1857 /* Check for 32-bit overflow. */
1858 if (high + low != val)
1864 /* Reload the high part into a base reg; leave the low part
1865 in the mem directly. */
1867 x = gen_rtx_PLUS (GET_MODE (x),
1868 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1872 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1873 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1874 opnum, (enum reload_type)type);
1879 if (GET_CODE (x) == SYMBOL_REF
1880 && DEFAULT_ABI == ABI_DARWIN
1883 /* Darwin load of floating point constant. */
1884 rtx offset = gen_rtx (CONST, Pmode,
1885 gen_rtx (MINUS, Pmode, x,
1886 gen_rtx (SYMBOL_REF, Pmode,
1887 machopic_function_base_name ())));
1888 x = gen_rtx (LO_SUM, GET_MODE (x),
1889 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1890 gen_rtx (HIGH, Pmode, offset)), offset);
1891 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1892 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1893 opnum, (enum reload_type)type);
1899 && CONSTANT_POOL_EXPR_P (x)
1900 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1902 (x) = create_TOC_reference (x);
1910 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1911 that is a valid memory address for an instruction.
1912 The MODE argument is the machine mode for the MEM expression
1913 that wants to use this address.
1915 On the RS/6000, there are four valid address: a SYMBOL_REF that
1916 refers to a constant pool entry of an address (or the sum of it
1917 plus a constant), a short (16-bit signed) constant plus a register,
1918 the sum of two registers, or a register indirect, possibly with an
1919 auto-increment. For DFmode and DImode with an constant plus register,
1920 we must ensure that both words are addressable or PowerPC64 with offset
1923 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1924 32-bit DImode, TImode), indexed addressing cannot be used because
1925 adjacent memory cells are accessed by adding word-sized offsets
1926 during assembly output. */
1928 rs6000_legitimate_address (mode, x, reg_ok_strict)
1929 enum machine_mode mode;
1933 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
1935 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
1937 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
1939 if (LEGITIMATE_SMALL_DATA_P (mode, x))
1941 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
1943 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
1945 && GET_CODE (x) == PLUS
1946 && GET_CODE (XEXP (x, 0)) == REG
1947 && XEXP (x, 0) == virtual_stack_vars_rtx
1948 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1950 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
1953 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1954 && (TARGET_POWERPC64 || mode != DImode)
1955 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
1957 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
1962 /* Try to output insns to set TARGET equal to the constant C if it can
1963 be done in less than N insns. Do all computations in MODE.
1964 Returns the place where the output has been placed if it can be
1965 done and the insns have been emitted. If it would take more than N
1966 insns, zero is returned and no insns and emitted. */
1969 rs6000_emit_set_const (dest, mode, source, n)
1971 enum machine_mode mode;
1972 int n ATTRIBUTE_UNUSED;
1974 HOST_WIDE_INT c0, c1;
1976 if (mode == QImode || mode == HImode || mode == SImode)
1979 dest = gen_reg_rtx (mode);
1980 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
1984 if (GET_CODE (source) == CONST_INT)
1986 c0 = INTVAL (source);
1989 else if (GET_CODE (source) == CONST_DOUBLE)
1991 #if HOST_BITS_PER_WIDE_INT >= 64
1992 c0 = CONST_DOUBLE_LOW (source);
1995 c0 = CONST_DOUBLE_LOW (source);
1996 c1 = CONST_DOUBLE_HIGH (source);
2002 return rs6000_emit_set_long_const (dest, c0, c1);
2005 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2006 fall back to a straight forward decomposition. We do this to avoid
2007 exponential run times encountered when looking for longer sequences
2008 with rs6000_emit_set_const. */
2010 rs6000_emit_set_long_const (dest, c1, c2)
2012 HOST_WIDE_INT c1, c2;
2014 if (!TARGET_POWERPC64)
2016 rtx operand1, operand2;
2018 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2020 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2022 emit_move_insn (operand1, GEN_INT (c1));
2023 emit_move_insn (operand2, GEN_INT (c2));
2027 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2030 ud2 = (c1 & 0xffff0000) >> 16;
2031 #if HOST_BITS_PER_WIDE_INT >= 64
2035 ud4 = (c2 & 0xffff0000) >> 16;
2037 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2038 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2041 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2043 emit_move_insn (dest, GEN_INT (ud1));
2046 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2047 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2050 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2053 emit_move_insn (dest, GEN_INT (ud2 << 16));
2055 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2057 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2058 || (ud4 == 0 && ! (ud3 & 0x8000)))
2061 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2064 emit_move_insn (dest, GEN_INT (ud3 << 16));
2067 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2068 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2070 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2075 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2078 emit_move_insn (dest, GEN_INT (ud4 << 16));
2081 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2083 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2085 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2086 GEN_INT (ud2 << 16)));
2088 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2094 /* Emit a move from SOURCE to DEST in mode MODE. */
2096 rs6000_emit_move (dest, source, mode)
2099 enum machine_mode mode;
2103 operands[1] = source;
2105 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2106 if (GET_CODE (operands[1]) == CONST_DOUBLE
2107 && ! FLOAT_MODE_P (mode)
2108 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2110 /* FIXME. This should never happen. */
2111 /* Since it seems that it does, do the safe thing and convert
2114 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2116 if (GET_CODE (operands[1]) == CONST_DOUBLE
2117 && ! FLOAT_MODE_P (mode)
2118 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2119 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2120 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2121 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2124 /* Check if GCC is setting up a block move that will end up using FP
2125 registers as temporaries. We must make sure this is acceptable. */
2126 if (GET_CODE (operands[0]) == MEM
2127 && GET_CODE (operands[1]) == MEM
2129 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2130 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2131 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2132 ? 32 : MEM_ALIGN (operands[0])))
2133 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2135 : MEM_ALIGN (operands[1]))))
2136 && ! MEM_VOLATILE_P (operands [0])
2137 && ! MEM_VOLATILE_P (operands [1]))
2139 emit_move_insn (adjust_address (operands[0], SImode, 0),
2140 adjust_address (operands[1], SImode, 0));
2141 emit_move_insn (adjust_address (operands[0], SImode, 4),
2142 adjust_address (operands[1], SImode, 4));
2146 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2147 operands[1] = force_reg (mode, operands[1]);
2149 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2150 && GET_CODE (operands[0]) == MEM)
2154 if (reload_in_progress || reload_completed)
2155 regnum = true_regnum (operands[1]);
2156 else if (GET_CODE (operands[1]) == REG)
2157 regnum = REGNO (operands[1]);
2161 /* If operands[1] is a register, on POWER it may have
2162 double-precision data in it, so truncate it to single
2164 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2167 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2168 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2169 operands[1] = newreg;
2173 /* Handle the case where reload calls us with an invalid address;
2174 and the case of CONSTANT_P_RTX. */
2175 if (! general_operand (operands[1], mode)
2176 || ! nonimmediate_operand (operands[0], mode)
2177 || GET_CODE (operands[1]) == CONSTANT_P_RTX)
2179 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2183 /* FIXME: In the long term, this switch statement should go away
2184 and be replaced by a sequence of tests based on things like
2190 if (CONSTANT_P (operands[1])
2191 && GET_CODE (operands[1]) != CONST_INT)
2192 operands[1] = force_const_mem (mode, operands[1]);
2198 if (CONSTANT_P (operands[1])
2199 && ! easy_fp_constant (operands[1], mode))
2200 operands[1] = force_const_mem (mode, operands[1]);
2207 /* fixme: aldyh -- allow vector constants when they are implemented. */
2208 if (CONSTANT_P (operands[1]))
2209 operands[1] = force_const_mem (mode, operands[1]);
2214 /* Use default pattern for address of ELF small data */
2217 && DEFAULT_ABI == ABI_V4
2218 && (GET_CODE (operands[1]) == SYMBOL_REF
2219 || GET_CODE (operands[1]) == CONST)
2220 && small_data_operand (operands[1], mode))
2222 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2226 if (DEFAULT_ABI == ABI_V4
2227 && mode == Pmode && mode == SImode
2228 && flag_pic == 1 && got_operand (operands[1], mode))
2230 emit_insn (gen_movsi_got (operands[0], operands[1]));
2234 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2235 && TARGET_NO_TOC && ! flag_pic
2237 && CONSTANT_P (operands[1])
2238 && GET_CODE (operands[1]) != HIGH
2239 && GET_CODE (operands[1]) != CONST_INT)
2241 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2243 /* If this is a function address on -mcall-aixdesc,
2244 convert it to the address of the descriptor. */
2245 if (DEFAULT_ABI == ABI_AIX
2246 && GET_CODE (operands[1]) == SYMBOL_REF
2247 && XSTR (operands[1], 0)[0] == '.')
2249 const char *name = XSTR (operands[1], 0);
2251 while (*name == '.')
2253 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2254 CONSTANT_POOL_ADDRESS_P (new_ref)
2255 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2256 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2257 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2258 operands[1] = new_ref;
2261 if (DEFAULT_ABI == ABI_DARWIN)
2263 emit_insn (gen_macho_high (target, operands[1]));
2264 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2268 emit_insn (gen_elf_high (target, operands[1]));
2269 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2273 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2274 and we have put it in the TOC, we just need to make a TOC-relative
2277 && GET_CODE (operands[1]) == SYMBOL_REF
2278 && CONSTANT_POOL_EXPR_P (operands[1])
2279 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2280 get_pool_mode (operands[1])))
2282 operands[1] = create_TOC_reference (operands[1]);
2284 else if (mode == Pmode
2285 && CONSTANT_P (operands[1])
2286 && ((GET_CODE (operands[1]) != CONST_INT
2287 && ! easy_fp_constant (operands[1], mode))
2288 || (GET_CODE (operands[1]) == CONST_INT
2289 && num_insns_constant (operands[1], mode) > 2)
2290 || (GET_CODE (operands[0]) == REG
2291 && FP_REGNO_P (REGNO (operands[0]))))
2292 && GET_CODE (operands[1]) != HIGH
2293 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2294 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2296 /* Emit a USE operation so that the constant isn't deleted if
2297 expensive optimizations are turned on because nobody
2298 references it. This should only be done for operands that
2299 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2300 This should not be done for operands that contain LABEL_REFs.
2301 For now, we just handle the obvious case. */
2302 if (GET_CODE (operands[1]) != LABEL_REF)
2303 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2306 /* Darwin uses a special PIC legitimizer. */
2307 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2310 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2312 if (operands[0] != operands[1])
2313 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2318 /* If we are to limit the number of things we put in the TOC and
2319 this is a symbol plus a constant we can add in one insn,
2320 just put the symbol in the TOC and add the constant. Don't do
2321 this if reload is in progress. */
2322 if (GET_CODE (operands[1]) == CONST
2323 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2324 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2325 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2326 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2327 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2328 && ! side_effects_p (operands[0]))
2331 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2332 rtx other = XEXP (XEXP (operands[1], 0), 1);
2334 sym = force_reg (mode, sym);
2336 emit_insn (gen_addsi3 (operands[0], sym, other));
2338 emit_insn (gen_adddi3 (operands[0], sym, other));
2342 operands[1] = force_const_mem (mode, operands[1]);
2345 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2346 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2347 get_pool_constant (XEXP (operands[1], 0)),
2348 get_pool_mode (XEXP (operands[1], 0))))
2351 = gen_rtx_MEM (mode,
2352 create_TOC_reference (XEXP (operands[1], 0)));
2353 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2354 RTX_UNCHANGING_P (operands[1]) = 1;
2360 if (GET_CODE (operands[0]) == MEM
2361 && GET_CODE (XEXP (operands[0], 0)) != REG
2362 && ! reload_in_progress)
2364 = replace_equiv_address (operands[0],
2365 copy_addr_to_reg (XEXP (operands[0], 0)));
2367 if (GET_CODE (operands[1]) == MEM
2368 && GET_CODE (XEXP (operands[1], 0)) != REG
2369 && ! reload_in_progress)
2371 = replace_equiv_address (operands[1],
2372 copy_addr_to_reg (XEXP (operands[1], 0)));
2379 /* Above, we may have called force_const_mem which may have returned
2380 an invalid address. If we can, fix this up; otherwise, reload will
2381 have to deal with it. */
2382 if (GET_CODE (operands[1]) == MEM
2383 && ! memory_address_p (mode, XEXP (operands[1], 0))
2384 && ! reload_in_progress)
2385 operands[1] = adjust_address (operands[1], mode, 0);
2387 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2391 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2392 for a call to a function whose data type is FNTYPE.
2393 For a library call, FNTYPE is 0.
2395 For incoming args we set the number of arguments in the prototype large
2396 so we never return a PARALLEL. */
2399 init_cumulative_args (cum, fntype, libname, incoming)
2400 CUMULATIVE_ARGS *cum;
2402 rtx libname ATTRIBUTE_UNUSED;
2405 static CUMULATIVE_ARGS zero_cumulative;
2407 *cum = zero_cumulative;
2409 cum->fregno = FP_ARG_MIN_REG;
2410 cum->vregno = ALTIVEC_ARG_MIN_REG;
2411 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2412 cum->call_cookie = CALL_NORMAL;
2413 cum->sysv_gregno = GP_ARG_MIN_REG;
2416 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2418 else if (cum->prototype)
2419 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2420 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2421 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2424 cum->nargs_prototype = 0;
2426 cum->orig_nargs = cum->nargs_prototype;
2428 /* Check for longcall's */
2429 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2430 cum->call_cookie = CALL_LONG;
2432 if (TARGET_DEBUG_ARG)
2434 fprintf (stderr, "\ninit_cumulative_args:");
2437 tree ret_type = TREE_TYPE (fntype);
2438 fprintf (stderr, " ret code = %s,",
2439 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2442 if (cum->call_cookie & CALL_LONG)
2443 fprintf (stderr, " longcall,");
2445 fprintf (stderr, " proto = %d, nargs = %d\n",
2446 cum->prototype, cum->nargs_prototype);
2450 /* If defined, a C expression which determines whether, and in which
2451 direction, to pad out an argument with extra space. The value
2452 should be of type `enum direction': either `upward' to pad above
2453 the argument, `downward' to pad below, or `none' to inhibit
2456 For the AIX ABI structs are always stored left shifted in their
2460 function_arg_padding (mode, type)
2461 enum machine_mode mode;
2464 if (type != 0 && AGGREGATE_TYPE_P (type))
2467 /* This is the default definition. */
2468 return (! BYTES_BIG_ENDIAN
2471 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2472 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2473 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2474 ? downward : upward));
2477 /* If defined, a C expression that gives the alignment boundary, in bits,
2478 of an argument with the specified mode and type. If it is not defined,
2479 PARM_BOUNDARY is used for all arguments.
2481 V.4 wants long longs to be double word aligned. */
2484 function_arg_boundary (mode, type)
2485 enum machine_mode mode;
2486 tree type ATTRIBUTE_UNUSED;
2488 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2490 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2493 return PARM_BOUNDARY;
2496 /* Update the data in CUM to advance over an argument
2497 of mode MODE and data type TYPE.
2498 (TYPE is null for libcalls where that information may not be available.) */
2501 function_arg_advance (cum, mode, type, named)
2502 CUMULATIVE_ARGS *cum;
2503 enum machine_mode mode;
2507 cum->nargs_prototype--;
2509 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2511 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2514 cum->words += RS6000_ARG_SIZE (mode, type);
2516 else if (DEFAULT_ABI == ABI_V4)
2518 if (TARGET_HARD_FLOAT
2519 && (mode == SFmode || mode == DFmode))
2521 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2526 cum->words += cum->words & 1;
2527 cum->words += RS6000_ARG_SIZE (mode, type);
2533 int gregno = cum->sysv_gregno;
2535 /* Aggregates and IEEE quad get passed by reference. */
2536 if ((type && AGGREGATE_TYPE_P (type))
2540 n_words = RS6000_ARG_SIZE (mode, type);
2542 /* Long long is put in odd registers. */
2543 if (n_words == 2 && (gregno & 1) == 0)
2546 /* Long long is not split between registers and stack. */
2547 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2549 /* Long long is aligned on the stack. */
2551 cum->words += cum->words & 1;
2552 cum->words += n_words;
2555 /* Note: continuing to accumulate gregno past when we've started
2556 spilling to the stack indicates the fact that we've started
2557 spilling to the stack to expand_builtin_saveregs. */
2558 cum->sysv_gregno = gregno + n_words;
2561 if (TARGET_DEBUG_ARG)
2563 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2564 cum->words, cum->fregno);
2565 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2566 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2567 fprintf (stderr, "mode = %4s, named = %d\n",
2568 GET_MODE_NAME (mode), named);
2573 int align = (TARGET_32BIT && (cum->words & 1) != 0
2574 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2576 cum->words += align + RS6000_ARG_SIZE (mode, type);
2578 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2581 if (TARGET_DEBUG_ARG)
2583 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2584 cum->words, cum->fregno);
2585 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2586 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2587 fprintf (stderr, "named = %d, align = %d\n", named, align);
2592 /* Determine where to put an argument to a function.
2593 Value is zero to push the argument on the stack,
2594 or a hard register in which to store the argument.
2596 MODE is the argument's machine mode.
2597 TYPE is the data type of the argument (as a tree).
2598 This is null for libcalls where that information may
2600 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2601 the preceding args and about the function being called.
2602 NAMED is nonzero if this argument is a named parameter
2603 (otherwise it is an extra parameter matching an ellipsis).
2605 On RS/6000 the first eight words of non-FP are normally in registers
2606 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2607 Under V.4, the first 8 FP args are in registers.
2609 If this is floating-point and no prototype is specified, we use
2610 both an FP and integer register (or possibly FP reg and stack). Library
2611 functions (when TYPE is zero) always have the proper types for args,
2612 so we can pass the FP value just in one register. emit_library_function
2613 doesn't support PARALLEL anyway. */
2616 function_arg (cum, mode, type, named)
2617 CUMULATIVE_ARGS *cum;
2618 enum machine_mode mode;
2622 enum rs6000_abi abi = DEFAULT_ABI;
2624 /* Return a marker to indicate whether CR1 needs to set or clear the
2625 bit that V.4 uses to say fp args were passed in registers.
2626 Assume that we don't need the marker for software floating point,
2627 or compiler generated library calls. */
2628 if (mode == VOIDmode)
2631 && TARGET_HARD_FLOAT
2632 && cum->nargs_prototype < 0
2633 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2635 return GEN_INT (cum->call_cookie
2636 | ((cum->fregno == FP_ARG_MIN_REG)
2637 ? CALL_V4_SET_FP_ARGS
2638 : CALL_V4_CLEAR_FP_ARGS));
2641 return GEN_INT (cum->call_cookie);
2644 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2646 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2647 return gen_rtx_REG (mode, cum->vregno);
2651 else if (abi == ABI_V4)
2653 if (TARGET_HARD_FLOAT
2654 && (mode == SFmode || mode == DFmode))
2656 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2657 return gen_rtx_REG (mode, cum->fregno);
2664 int gregno = cum->sysv_gregno;
2666 /* Aggregates and IEEE quad get passed by reference. */
2667 if ((type && AGGREGATE_TYPE_P (type))
2671 n_words = RS6000_ARG_SIZE (mode, type);
2673 /* Long long is put in odd registers. */
2674 if (n_words == 2 && (gregno & 1) == 0)
2677 /* Long long is not split between registers and stack. */
2678 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2679 return gen_rtx_REG (mode, gregno);
2686 int align = (TARGET_32BIT && (cum->words & 1) != 0
2687 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2688 int align_words = cum->words + align;
2690 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2693 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2696 || ((cum->nargs_prototype > 0)
2697 /* IBM AIX extended its linkage convention definition always
2698 to require FP args after register save area hole on the
2700 && (DEFAULT_ABI != ABI_AIX
2702 || (align_words < GP_ARG_NUM_REG))))
2703 return gen_rtx_REG (mode, cum->fregno);
2705 return gen_rtx_PARALLEL (mode,
2707 gen_rtx_EXPR_LIST (VOIDmode,
2708 ((align_words >= GP_ARG_NUM_REG)
2711 + RS6000_ARG_SIZE (mode, type)
2713 /* If this is partially on the stack, then
2714 we only include the portion actually
2715 in registers here. */
2716 ? gen_rtx_REG (SImode,
2717 GP_ARG_MIN_REG + align_words)
2718 : gen_rtx_REG (mode,
2719 GP_ARG_MIN_REG + align_words))),
2721 gen_rtx_EXPR_LIST (VOIDmode,
2722 gen_rtx_REG (mode, cum->fregno),
2725 else if (align_words < GP_ARG_NUM_REG)
2726 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2732 /* For an arg passed partly in registers and partly in memory,
2733 this is the number of registers used.
2734 For args passed entirely in registers or entirely in memory, zero. */
2737 function_arg_partial_nregs (cum, mode, type, named)
2738 CUMULATIVE_ARGS *cum;
2739 enum machine_mode mode;
2741 int named ATTRIBUTE_UNUSED;
2743 if (DEFAULT_ABI == ABI_V4)
2746 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2747 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2749 if (cum->nargs_prototype >= 0)
2753 if (cum->words < GP_ARG_NUM_REG
2754 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2756 int ret = GP_ARG_NUM_REG - cum->words;
2757 if (ret && TARGET_DEBUG_ARG)
2758 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2766 /* A C expression that indicates when an argument must be passed by
2767 reference. If nonzero for an argument, a copy of that argument is
2768 made in memory and a pointer to the argument is passed instead of
2769 the argument itself. The pointer is passed in whatever way is
2770 appropriate for passing a pointer to that type.
2772 Under V.4, structures and unions are passed by reference. */
2775 function_arg_pass_by_reference (cum, mode, type, named)
2776 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2777 enum machine_mode mode ATTRIBUTE_UNUSED;
2779 int named ATTRIBUTE_UNUSED;
2781 if (DEFAULT_ABI == ABI_V4
2782 && ((type && AGGREGATE_TYPE_P (type))
2785 if (TARGET_DEBUG_ARG)
2786 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2794 /* Perform any needed actions needed for a function that is receiving a
2795 variable number of arguments.
2799 MODE and TYPE are the mode and type of the current parameter.
2801 PRETEND_SIZE is a variable that should be set to the amount of stack
2802 that must be pushed by the prolog to pretend that our caller pushed
2805 Normally, this macro will push all remaining incoming registers on the
2806 stack and set PRETEND_SIZE to the length of the registers pushed. */
2809 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2810 CUMULATIVE_ARGS *cum;
2811 enum machine_mode mode;
2817 CUMULATIVE_ARGS next_cum;
2818 int reg_size = TARGET_32BIT ? 4 : 8;
2819 rtx save_area = NULL_RTX, mem;
2820 int first_reg_offset, set;
2824 fntype = TREE_TYPE (current_function_decl);
2825 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2826 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2827 != void_type_node));
2829 /* For varargs, we do not want to skip the dummy va_dcl argument.
2830 For stdargs, we do want to skip the last named argument. */
2833 function_arg_advance (&next_cum, mode, type, 1);
2835 if (DEFAULT_ABI == ABI_V4)
2837 /* Indicate to allocate space on the stack for varargs save area. */
2838 /* ??? Does this really have to be located at a magic spot on the
2839 stack, or can we allocate this with assign_stack_local instead. */
2840 cfun->machine->sysv_varargs_p = 1;
2842 save_area = plus_constant (virtual_stack_vars_rtx,
2843 - RS6000_VARARGS_SIZE);
2845 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2849 first_reg_offset = next_cum.words;
2850 save_area = virtual_incoming_args_rtx;
2851 cfun->machine->sysv_varargs_p = 0;
2853 if (MUST_PASS_IN_STACK (mode, type))
2854 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2857 set = get_varargs_alias_set ();
2858 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2860 mem = gen_rtx_MEM (BLKmode,
2861 plus_constant (save_area,
2862 first_reg_offset * reg_size)),
2863 set_mem_alias_set (mem, set);
2864 set_mem_align (mem, BITS_PER_WORD);
2867 (GP_ARG_MIN_REG + first_reg_offset, mem,
2868 GP_ARG_NUM_REG - first_reg_offset,
2869 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2871 /* ??? Does ABI_V4 need this at all? */
2872 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2875 /* Save FP registers if needed. */
2876 if (DEFAULT_ABI == ABI_V4
2877 && TARGET_HARD_FLOAT && ! no_rtl
2878 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2880 int fregno = next_cum.fregno;
2881 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2882 rtx lab = gen_label_rtx ();
2883 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2885 emit_jump_insn (gen_rtx_SET (VOIDmode,
2887 gen_rtx_IF_THEN_ELSE (VOIDmode,
2888 gen_rtx_NE (VOIDmode, cr1,
2890 gen_rtx_LABEL_REF (VOIDmode, lab),
2893 while (fregno <= FP_ARG_V4_MAX_REG)
2895 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2896 set_mem_alias_set (mem, set);
2897 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2906 /* Create the va_list data type. */
2909 rs6000_build_va_list ()
2911 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2913 /* For AIX, prefer 'char *' because that's what the system
2914 header files like. */
2915 if (DEFAULT_ABI != ABI_V4)
2916 return build_pointer_type (char_type_node);
2918 record = make_lang_type (RECORD_TYPE);
2919 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2921 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2922 unsigned_char_type_node);
2923 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2924 unsigned_char_type_node);
2925 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2927 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2930 DECL_FIELD_CONTEXT (f_gpr) = record;
2931 DECL_FIELD_CONTEXT (f_fpr) = record;
2932 DECL_FIELD_CONTEXT (f_ovf) = record;
2933 DECL_FIELD_CONTEXT (f_sav) = record;
2935 TREE_CHAIN (record) = type_decl;
2936 TYPE_NAME (record) = type_decl;
2937 TYPE_FIELDS (record) = f_gpr;
2938 TREE_CHAIN (f_gpr) = f_fpr;
2939 TREE_CHAIN (f_fpr) = f_ovf;
2940 TREE_CHAIN (f_ovf) = f_sav;
2942 layout_type (record);
2944 /* The correct type is an array type of one element. */
2945 return build_array_type (record, build_index_type (size_zero_node));
2948 /* Implement va_start. */
2951 rs6000_va_start (stdarg_p, valist, nextarg)
2956 HOST_WIDE_INT words, n_gpr, n_fpr;
2957 tree f_gpr, f_fpr, f_ovf, f_sav;
2958 tree gpr, fpr, ovf, sav, t;
2960 /* Only SVR4 needs something special. */
2961 if (DEFAULT_ABI != ABI_V4)
2963 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
2967 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
2968 f_fpr = TREE_CHAIN (f_gpr);
2969 f_ovf = TREE_CHAIN (f_fpr);
2970 f_sav = TREE_CHAIN (f_ovf);
2972 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
2973 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
2974 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
2975 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
2976 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
2978 /* Count number of gp and fp argument registers used. */
2979 words = current_function_args_info.words;
2980 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
2981 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
2983 if (TARGET_DEBUG_ARG)
2985 fputs ("va_start: words = ", stderr);
2986 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
2987 fputs (", n_gpr = ", stderr);
2988 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
2989 fputs (", n_fpr = ", stderr);
2990 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
2991 putc ('\n', stderr);
2994 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
2995 TREE_SIDE_EFFECTS (t) = 1;
2996 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2998 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
2999 TREE_SIDE_EFFECTS (t) = 1;
3000 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3002 /* Find the overflow area. */
3003 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3005 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3006 build_int_2 (words * UNITS_PER_WORD, 0));
3007 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3008 TREE_SIDE_EFFECTS (t) = 1;
3009 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3011 /* Find the register save area. */
3012 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3013 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3014 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3015 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3016 TREE_SIDE_EFFECTS (t) = 1;
3017 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3020 /* Implement va_arg. */
3023 rs6000_va_arg (valist, type)
3026 tree f_gpr, f_fpr, f_ovf, f_sav;
3027 tree gpr, fpr, ovf, sav, reg, t, u;
3028 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3029 rtx lab_false, lab_over, addr_rtx, r;
3031 /* For AIX, the rule is that structures are passed left-aligned in
3032 their stack slot. However, GCC does not presently do this:
3033 structures which are the same size as integer types are passed
3034 right-aligned, as if they were in fact integers. This only
3035 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
3036 if (DEFAULT_ABI != ABI_V4)
3038 HOST_WIDE_INT align, rounded_size;
3039 enum machine_mode mode;
3042 /* Compute the rounded size of the type. */
3043 align = PARM_BOUNDARY / BITS_PER_UNIT;
3044 rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
3049 mode = TYPE_MODE (type);
3050 if (mode != BLKmode)
3053 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
3054 if (rounded_size > align)
3057 addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3058 build_int_2 (rounded_size - adj, 0));
3061 addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3062 addr_rtx = copy_to_reg (addr_rtx);
3064 /* Compute new value for AP. */
3065 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3066 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3067 build_int_2 (rounded_size, 0)));
3068 TREE_SIDE_EFFECTS (t) = 1;
3069 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3074 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3075 f_fpr = TREE_CHAIN (f_gpr);
3076 f_ovf = TREE_CHAIN (f_fpr);
3077 f_sav = TREE_CHAIN (f_ovf);
3079 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3080 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3081 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3082 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3083 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3085 size = int_size_in_bytes (type);
3086 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3088 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3090 /* Aggregates and long doubles are passed by reference. */
3096 size = rsize = UNITS_PER_WORD;
3098 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3100 /* FP args go in FP registers, if present. */
3109 /* Otherwise into GP registers. */
3117 /* Pull the value out of the saved registers ... */
3119 lab_false = gen_label_rtx ();
3120 lab_over = gen_label_rtx ();
3121 addr_rtx = gen_reg_rtx (Pmode);
3123 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3124 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3127 /* Long long is aligned in the registers. */
3130 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3131 build_int_2 (n_reg - 1, 0));
3132 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3133 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3134 TREE_SIDE_EFFECTS (u) = 1;
3135 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3139 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3143 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0));
3144 TREE_SIDE_EFFECTS (u) = 1;
3146 u = build1 (CONVERT_EXPR, integer_type_node, u);
3147 TREE_SIDE_EFFECTS (u) = 1;
3149 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3150 TREE_SIDE_EFFECTS (u) = 1;
3152 t = build (PLUS_EXPR, ptr_type_node, t, u);
3153 TREE_SIDE_EFFECTS (t) = 1;
3155 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3157 emit_move_insn (addr_rtx, r);
3159 emit_jump_insn (gen_jump (lab_over));
3161 emit_label (lab_false);
3163 /* ... otherwise out of the overflow area. */
3165 /* Make sure we don't find reg 7 for the next int arg. */
3168 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3169 TREE_SIDE_EFFECTS (t) = 1;
3170 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3173 /* Care for on-stack alignment if needed. */
3178 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0));
3179 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1));
3183 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3185 emit_move_insn (addr_rtx, r);
3187 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3188 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3189 TREE_SIDE_EFFECTS (t) = 1;
3190 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3192 emit_label (lab_over);
3196 r = gen_rtx_MEM (Pmode, addr_rtx);
3197 set_mem_alias_set (r, get_varargs_alias_set ());
3198 emit_move_insn (addr_rtx, r);
3206 #define def_builtin(MASK, NAME, TYPE, CODE) \
3208 if ((MASK) & target_flags) \
3209 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3212 struct builtin_description
3214 const unsigned int mask;
3215 const enum insn_code icode;
3216 const char *const name;
3217 const enum rs6000_builtins code;
3220 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3222 static const struct builtin_description bdesc_3arg[] =
3224 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3225 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3226 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3227 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3228 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3229 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3230 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3231 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3232 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3233 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3234 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3235 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3236 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3237 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3238 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3239 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3240 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3241 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3242 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3243 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3244 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3245 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3246 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3249 /* DST operations: void foo (void *, const int, const char). */
3251 static const struct builtin_description bdesc_dst[] =
3253 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3254 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3255 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3256 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3259 /* Simple binary operations: VECc = foo (VECa, VECb). */
3261 static const struct builtin_description bdesc_2arg[] =
3263 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3264 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3265 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3266 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3267 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3268 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3269 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3270 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3271 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3272 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3273 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3274 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3275 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3276 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3277 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3278 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3279 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3280 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3281 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3282 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3283 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3284 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3285 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3286 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3287 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3288 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3289 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3290 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3291 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3292 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3293 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3294 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3295 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3296 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3297 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3298 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3299 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3300 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3301 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3302 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3303 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3304 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3305 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3306 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3307 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3308 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3309 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3310 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3311 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3312 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3313 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3314 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3315 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3316 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3317 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3318 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3319 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3320 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3321 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3322 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3323 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3324 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3325 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3326 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3327 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3328 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3329 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3330 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3331 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3332 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3333 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3334 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3335 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3336 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3337 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3338 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3339 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3340 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3341 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3342 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3343 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3344 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3345 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3346 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3347 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3348 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3349 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3350 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3351 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3359 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3360 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3361 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3362 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3375 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3378 /* AltiVec predicates. */
3380 struct builtin_description_predicates
3382 const unsigned int mask;
3383 const enum insn_code icode;
3385 const char *const name;
3386 const enum rs6000_builtins code;
3389 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3391 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3392 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3393 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3395 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3406 /* ABS* opreations. */
3408 static const struct builtin_description bdesc_abs[] =
3410 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3411 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3412 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3413 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3419 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3422 static const struct builtin_description bdesc_1arg[] =
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3426 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3427 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3428 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3429 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3430 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3431 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3432 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3433 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3434 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3435 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3436 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3444 altivec_expand_unop_builtin (icode, arglist, target)
3445 enum insn_code icode;
3450 tree arg0 = TREE_VALUE (arglist);
3451 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3452 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3453 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3455 /* If we got invalid arguments bail out before generating bad rtl. */
3456 if (arg0 == error_mark_node)
3460 || GET_MODE (target) != tmode
3461 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3462 target = gen_reg_rtx (tmode);
3464 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3465 op0 = copy_to_mode_reg (mode0, op0);
3467 pat = GEN_FCN (icode) (target, op0);
3476 altivec_expand_abs_builtin (icode, arglist, target)
3477 enum insn_code icode;
3481 rtx pat, scratch1, scratch2;
3482 tree arg0 = TREE_VALUE (arglist);
3483 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3484 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3485 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3487 /* If we have invalid arguments, bail out before generating bad rtl. */
3488 if (arg0 == error_mark_node)
3492 || GET_MODE (target) != tmode
3493 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3494 target = gen_reg_rtx (tmode);
3496 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3497 op0 = copy_to_mode_reg (mode0, op0);
3499 scratch1 = gen_reg_rtx (mode0);
3500 scratch2 = gen_reg_rtx (mode0);
3502 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3511 altivec_expand_binop_builtin (icode, arglist, target)
3512 enum insn_code icode;
3517 tree arg0 = TREE_VALUE (arglist);
3518 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3519 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3520 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3521 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3522 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3523 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3525 /* If we got invalid arguments bail out before generating bad rtl. */
3526 if (arg0 == error_mark_node || arg1 == error_mark_node)
3530 || GET_MODE (target) != tmode
3531 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3532 target = gen_reg_rtx (tmode);
3534 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3535 op0 = copy_to_mode_reg (mode0, op0);
3536 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3537 op1 = copy_to_mode_reg (mode1, op1);
3539 pat = GEN_FCN (icode) (target, op0, op1);
3548 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3549 enum insn_code icode;
3555 tree cr6_form = TREE_VALUE (arglist);
3556 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3557 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3558 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3559 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3560 enum machine_mode tmode = SImode;
3561 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3562 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3565 if (TREE_CODE (cr6_form) != INTEGER_CST)
3567 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3571 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3576 /* If we have invalid arguments, bail out before generating bad rtl. */
3577 if (arg0 == error_mark_node || arg1 == error_mark_node)
3581 || GET_MODE (target) != tmode
3582 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3583 target = gen_reg_rtx (tmode);
3585 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3586 op0 = copy_to_mode_reg (mode0, op0);
3587 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3588 op1 = copy_to_mode_reg (mode1, op1);
3590 scratch = gen_reg_rtx (mode0);
3592 pat = GEN_FCN (icode) (scratch, op0, op1,
3593 gen_rtx (SYMBOL_REF, Pmode, opcode));
3598 /* The vec_any* and vec_all* predicates use the same opcodes for two
3599 different operations, but the bits in CR6 will be different
3600 depending on what information we want. So we have to play tricks
3601 with CR6 to get the right bits out.
3603 If you think this is disgusting, look at the specs for the
3604 AltiVec predicates. */
3606 switch (cr6_form_int)
3609 emit_insn (gen_cr6_test_for_zero (target));
3612 emit_insn (gen_cr6_test_for_zero_reverse (target));
3615 emit_insn (gen_cr6_test_for_lt (target));
3618 emit_insn (gen_cr6_test_for_lt_reverse (target));
3621 error ("argument 1 of __builtin_altivec_predicate is out of range");
3629 altivec_expand_stv_builtin (icode, arglist)
3630 enum insn_code icode;
3633 tree arg0 = TREE_VALUE (arglist);
3634 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3635 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3636 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3637 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3638 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3640 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3641 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3642 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3644 /* Invalid arguments. Bail before doing anything stoopid! */
3645 if (arg0 == error_mark_node
3646 || arg1 == error_mark_node
3647 || arg2 == error_mark_node)
3650 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3651 op0 = copy_to_mode_reg (mode2, op0);
3652 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3653 op1 = copy_to_mode_reg (mode0, op1);
3654 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3655 op2 = copy_to_mode_reg (mode1, op2);
3657 pat = GEN_FCN (icode) (op1, op2, op0);
3664 altivec_expand_ternop_builtin (icode, arglist, target)
3665 enum insn_code icode;
3670 tree arg0 = TREE_VALUE (arglist);
3671 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3672 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3673 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3674 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3675 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3676 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3677 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3678 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3679 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3681 /* If we got invalid arguments bail out before generating bad rtl. */
3682 if (arg0 == error_mark_node
3683 || arg1 == error_mark_node
3684 || arg2 == error_mark_node)
3688 || GET_MODE (target) != tmode
3689 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3690 target = gen_reg_rtx (tmode);
3692 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3693 op0 = copy_to_mode_reg (mode0, op0);
3694 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3695 op1 = copy_to_mode_reg (mode1, op1);
3696 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3697 op2 = copy_to_mode_reg (mode2, op2);
3699 pat = GEN_FCN (icode) (target, op0, op1, op2);
3707 altivec_expand_builtin (exp, target)
3711 struct builtin_description *d;
3712 struct builtin_description_predicates *dp;
3714 enum insn_code icode;
3715 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3716 tree arglist = TREE_OPERAND (exp, 1);
3717 tree arg0, arg1, arg2;
3718 rtx op0, op1, op2, pat;
3719 enum machine_mode tmode, mode0, mode1, mode2;
3720 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3724 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3725 icode = CODE_FOR_altivec_lvx_16qi;
3726 arg0 = TREE_VALUE (arglist);
3727 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3728 tmode = insn_data[icode].operand[0].mode;
3729 mode0 = insn_data[icode].operand[1].mode;
3732 || GET_MODE (target) != tmode
3733 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3734 target = gen_reg_rtx (tmode);
3736 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3737 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3739 pat = GEN_FCN (icode) (target, op0);
3745 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3746 icode = CODE_FOR_altivec_lvx_8hi;
3747 arg0 = TREE_VALUE (arglist);
3748 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3749 tmode = insn_data[icode].operand[0].mode;
3750 mode0 = insn_data[icode].operand[1].mode;
3753 || GET_MODE (target) != tmode
3754 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3755 target = gen_reg_rtx (tmode);
3757 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3758 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3760 pat = GEN_FCN (icode) (target, op0);
3766 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3767 icode = CODE_FOR_altivec_lvx_4si;
3768 arg0 = TREE_VALUE (arglist);
3769 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3770 tmode = insn_data[icode].operand[0].mode;
3771 mode0 = insn_data[icode].operand[1].mode;
3774 || GET_MODE (target) != tmode
3775 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3776 target = gen_reg_rtx (tmode);
3778 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3779 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3781 pat = GEN_FCN (icode) (target, op0);
3787 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3788 icode = CODE_FOR_altivec_lvx_4sf;
3789 arg0 = TREE_VALUE (arglist);
3790 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3791 tmode = insn_data[icode].operand[0].mode;
3792 mode0 = insn_data[icode].operand[1].mode;
3795 || GET_MODE (target) != tmode
3796 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3797 target = gen_reg_rtx (tmode);
3799 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3800 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3802 pat = GEN_FCN (icode) (target, op0);
3808 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3809 icode = CODE_FOR_altivec_stvx_16qi;
3810 arg0 = TREE_VALUE (arglist);
3811 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3812 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3813 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3814 mode0 = insn_data[icode].operand[0].mode;
3815 mode1 = insn_data[icode].operand[1].mode;
3817 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3818 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3819 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3820 op1 = copy_to_mode_reg (mode1, op1);
3822 pat = GEN_FCN (icode) (op0, op1);
3827 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3828 icode = CODE_FOR_altivec_stvx_8hi;
3829 arg0 = TREE_VALUE (arglist);
3830 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3831 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3832 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3833 mode0 = insn_data[icode].operand[0].mode;
3834 mode1 = insn_data[icode].operand[1].mode;
3836 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3837 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3838 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3839 op1 = copy_to_mode_reg (mode1, op1);
3841 pat = GEN_FCN (icode) (op0, op1);
3846 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3847 icode = CODE_FOR_altivec_stvx_4si;
3848 arg0 = TREE_VALUE (arglist);
3849 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3850 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3851 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3852 mode0 = insn_data[icode].operand[0].mode;
3853 mode1 = insn_data[icode].operand[1].mode;
3855 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3856 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3857 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3858 op1 = copy_to_mode_reg (mode1, op1);
3860 pat = GEN_FCN (icode) (op0, op1);
3865 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3866 icode = CODE_FOR_altivec_stvx_4sf;
3867 arg0 = TREE_VALUE (arglist);
3868 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3869 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3870 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3871 mode0 = insn_data[icode].operand[0].mode;
3872 mode1 = insn_data[icode].operand[1].mode;
3874 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3875 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3876 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3877 op1 = copy_to_mode_reg (mode1, op1);
3879 pat = GEN_FCN (icode) (op0, op1);
3884 case ALTIVEC_BUILTIN_STVX:
3885 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3886 case ALTIVEC_BUILTIN_STVEBX:
3887 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3888 case ALTIVEC_BUILTIN_STVEHX:
3889 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3890 case ALTIVEC_BUILTIN_STVEWX:
3891 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3892 case ALTIVEC_BUILTIN_STVXL:
3893 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3895 case ALTIVEC_BUILTIN_MFVSCR:
3896 icode = CODE_FOR_altivec_mfvscr;
3897 tmode = insn_data[icode].operand[0].mode;
3900 || GET_MODE (target) != tmode
3901 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3902 target = gen_reg_rtx (tmode);
3904 pat = GEN_FCN (icode) (target);
3910 case ALTIVEC_BUILTIN_MTVSCR:
3911 icode = CODE_FOR_altivec_mtvscr;
3912 arg0 = TREE_VALUE (arglist);
3913 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3914 mode0 = insn_data[icode].operand[0].mode;
3916 /* If we got invalid arguments bail out before generating bad rtl. */
3917 if (arg0 == error_mark_node)
3920 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3921 op0 = copy_to_mode_reg (mode0, op0);
3923 pat = GEN_FCN (icode) (op0);
3928 case ALTIVEC_BUILTIN_DSSALL:
3929 emit_insn (gen_altivec_dssall ());
3932 case ALTIVEC_BUILTIN_DSS:
3933 icode = CODE_FOR_altivec_dss;
3934 arg0 = TREE_VALUE (arglist);
3935 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3936 mode0 = insn_data[icode].operand[0].mode;
3938 /* If we got invalid arguments bail out before generating bad rtl. */
3939 if (arg0 == error_mark_node)
3942 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3943 op0 = copy_to_mode_reg (mode0, op0);
3945 emit_insn (gen_altivec_dss (op0));
3949 /* Handle DST variants. */
3950 d = (struct builtin_description *) bdesc_dst;
3951 for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
3952 if (d->code == fcode)
3954 arg0 = TREE_VALUE (arglist);
3955 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3956 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3957 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3958 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3959 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3960 mode0 = insn_data[d->icode].operand[0].mode;
3961 mode1 = insn_data[d->icode].operand[1].mode;
3962 mode2 = insn_data[d->icode].operand[2].mode;
3964 /* Invalid arguments, bail out before generating bad rtl. */
3965 if (arg0 == error_mark_node
3966 || arg1 == error_mark_node
3967 || arg2 == error_mark_node)
3970 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
3971 op0 = copy_to_mode_reg (mode0, op0);
3972 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
3973 op1 = copy_to_mode_reg (mode1, op1);
3975 if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
3977 error ("argument 3 of `%s' must be a 2-bit literal", d->name);
3981 pat = GEN_FCN (d->icode) (op0, op1, op2);
3988 /* Expand abs* operations. */
3989 d = (struct builtin_description *) bdesc_abs;
3990 for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
3991 if (d->code == fcode)
3992 return altivec_expand_abs_builtin (d->icode, arglist, target);
3994 /* Handle simple unary operations. */
3995 d = (struct builtin_description *) bdesc_1arg;
3996 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
3997 if (d->code == fcode)
3998 return altivec_expand_unop_builtin (d->icode, arglist, target);
4000 /* Handle simple binary operations. */
4001 d = (struct builtin_description *) bdesc_2arg;
4002 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4003 if (d->code == fcode)
4004 return altivec_expand_binop_builtin (d->icode, arglist, target);
4006 /* Expand the AltiVec predicates. */
4007 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4008 for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4009 if (dp->code == fcode)
4010 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4012 /* LV* are funky. We initialized them differently. */
4015 case ALTIVEC_BUILTIN_LVSL:
4016 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4018 case ALTIVEC_BUILTIN_LVSR:
4019 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4021 case ALTIVEC_BUILTIN_LVEBX:
4022 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4024 case ALTIVEC_BUILTIN_LVEHX:
4025 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4027 case ALTIVEC_BUILTIN_LVEWX:
4028 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4030 case ALTIVEC_BUILTIN_LVXL:
4031 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4033 case ALTIVEC_BUILTIN_LVX:
4034 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4041 /* Handle simple ternary operations. */
4042 d = (struct builtin_description *) bdesc_3arg;
4043 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4044 if (d->code == fcode)
4045 return altivec_expand_ternop_builtin (d->icode, arglist, target);
4051 /* Expand an expression EXP that calls a built-in function,
4052 with result going to TARGET if that's convenient
4053 (and in mode MODE if that's convenient).
4054 SUBTARGET may be used as the target for computing one of EXP's operands.
4055 IGNORE is nonzero if the value is to be ignored. */
4058 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4061 rtx subtarget ATTRIBUTE_UNUSED;
4062 enum machine_mode mode ATTRIBUTE_UNUSED;
4063 int ignore ATTRIBUTE_UNUSED;
4066 return altivec_expand_builtin (exp, target);
4072 rs6000_init_builtins ()
4075 altivec_init_builtins ();
4079 altivec_init_builtins (void)
4081 struct builtin_description *d;
4082 struct builtin_description_predicates *dp;
4085 tree endlink = void_list_node;
4087 tree pint_type_node = build_pointer_type (integer_type_node);
4088 tree pvoid_type_node = build_pointer_type (void_type_node);
4089 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4090 tree pchar_type_node = build_pointer_type (char_type_node);
4091 tree pfloat_type_node = build_pointer_type (float_type_node);
4093 tree v4sf_ftype_v4sf_v4sf_v16qi
4094 = build_function_type (V4SF_type_node,
4095 tree_cons (NULL_TREE, V4SF_type_node,
4096 tree_cons (NULL_TREE, V4SF_type_node,
4097 tree_cons (NULL_TREE,
4100 tree v4si_ftype_v4si_v4si_v16qi
4101 = build_function_type (V4SI_type_node,
4102 tree_cons (NULL_TREE, V4SI_type_node,
4103 tree_cons (NULL_TREE, V4SI_type_node,
4104 tree_cons (NULL_TREE,
4107 tree v8hi_ftype_v8hi_v8hi_v16qi
4108 = build_function_type (V8HI_type_node,
4109 tree_cons (NULL_TREE, V8HI_type_node,
4110 tree_cons (NULL_TREE, V8HI_type_node,
4111 tree_cons (NULL_TREE,
4114 tree v16qi_ftype_v16qi_v16qi_v16qi
4115 = build_function_type (V16QI_type_node,
4116 tree_cons (NULL_TREE, V16QI_type_node,
4117 tree_cons (NULL_TREE, V16QI_type_node,
4118 tree_cons (NULL_TREE,
4122 /* V4SI foo (char). */
4123 tree v4si_ftype_char
4124 = build_function_type (V4SI_type_node,
4125 tree_cons (NULL_TREE, char_type_node, endlink));
4127 /* V8HI foo (char). */
4128 tree v8hi_ftype_char
4129 = build_function_type (V8HI_type_node,
4130 tree_cons (NULL_TREE, char_type_node, endlink));
4132 /* V16QI foo (char). */
4133 tree v16qi_ftype_char
4134 = build_function_type (V16QI_type_node,
4135 tree_cons (NULL_TREE, char_type_node, endlink));
4136 /* V4SF foo (V4SF). */
4137 tree v4sf_ftype_v4sf
4138 = build_function_type (V4SF_type_node,
4139 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4141 /* V4SI foo (int *). */
4142 tree v4si_ftype_pint
4143 = build_function_type (V4SI_type_node,
4144 tree_cons (NULL_TREE, pint_type_node, endlink));
4145 /* V8HI foo (short *). */
4146 tree v8hi_ftype_pshort
4147 = build_function_type (V8HI_type_node,
4148 tree_cons (NULL_TREE, pshort_type_node, endlink));
4149 /* V16QI foo (char *). */
4150 tree v16qi_ftype_pchar
4151 = build_function_type (V16QI_type_node,
4152 tree_cons (NULL_TREE, pchar_type_node, endlink));
4153 /* V4SF foo (float *). */
4154 tree v4sf_ftype_pfloat
4155 = build_function_type (V4SF_type_node,
4156 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4158 /* V8HI foo (V16QI). */
4159 tree v8hi_ftype_v16qi
4160 = build_function_type (V8HI_type_node,
4161 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4163 /* void foo (void *, int, char/literal). */
4164 tree void_ftype_pvoid_int_char
4165 = build_function_type (void_type_node,
4166 tree_cons (NULL_TREE, pvoid_type_node,
4167 tree_cons (NULL_TREE, integer_type_node,
4168 tree_cons (NULL_TREE,
4172 /* void foo (int *, V4SI). */
4173 tree void_ftype_pint_v4si
4174 = build_function_type (void_type_node,
4175 tree_cons (NULL_TREE, pint_type_node,
4176 tree_cons (NULL_TREE, V4SI_type_node,
4178 /* void foo (short *, V8HI). */
4179 tree void_ftype_pshort_v8hi
4180 = build_function_type (void_type_node,
4181 tree_cons (NULL_TREE, pshort_type_node,
4182 tree_cons (NULL_TREE, V8HI_type_node,
4184 /* void foo (char *, V16QI). */
4185 tree void_ftype_pchar_v16qi
4186 = build_function_type (void_type_node,
4187 tree_cons (NULL_TREE, pchar_type_node,
4188 tree_cons (NULL_TREE, V16QI_type_node,
4190 /* void foo (float *, V4SF). */
4191 tree void_ftype_pfloat_v4sf
4192 = build_function_type (void_type_node,
4193 tree_cons (NULL_TREE, pfloat_type_node,
4194 tree_cons (NULL_TREE, V4SF_type_node,
4197 /* void foo (V4SI). */
4198 tree void_ftype_v4si
4199 = build_function_type (void_type_node,
4200 tree_cons (NULL_TREE, V4SI_type_node,
4203 /* void foo (vint, int, void *). */
4204 tree void_ftype_v4si_int_pvoid
4205 = build_function_type (void_type_node,
4206 tree_cons (NULL_TREE, V4SI_type_node,
4207 tree_cons (NULL_TREE, integer_type_node,
4208 tree_cons (NULL_TREE,
4212 /* void foo (vchar, int, void *). */
4213 tree void_ftype_v16qi_int_pvoid
4214 = build_function_type (void_type_node,
4215 tree_cons (NULL_TREE, V16QI_type_node,
4216 tree_cons (NULL_TREE, integer_type_node,
4217 tree_cons (NULL_TREE,
4221 /* void foo (vshort, int, void *). */
4222 tree void_ftype_v8hi_int_pvoid
4223 = build_function_type (void_type_node,
4224 tree_cons (NULL_TREE, V8HI_type_node,
4225 tree_cons (NULL_TREE, integer_type_node,
4226 tree_cons (NULL_TREE,
4230 /* void foo (char). */
4232 = build_function_type (void_type_node,
4233 tree_cons (NULL_TREE, char_type_node,
4236 /* void foo (void). */
4237 tree void_ftype_void
4238 = build_function_type (void_type_node, void_list_node);
4240 /* vshort foo (void). */
4241 tree v8hi_ftype_void
4242 = build_function_type (V8HI_type_node, void_list_node);
4244 tree v4si_ftype_v4si_v4si
4245 = build_function_type (V4SI_type_node,
4246 tree_cons (NULL_TREE, V4SI_type_node,
4247 tree_cons (NULL_TREE, V4SI_type_node,
4250 /* These are for the unsigned 5 bit literals. */
4252 tree v4sf_ftype_v4si_char
4253 = build_function_type (V4SF_type_node,
4254 tree_cons (NULL_TREE, V4SI_type_node,
4255 tree_cons (NULL_TREE, char_type_node,
4257 tree v4si_ftype_v4sf_char
4258 = build_function_type (V4SI_type_node,
4259 tree_cons (NULL_TREE, V4SF_type_node,
4260 tree_cons (NULL_TREE, char_type_node,
4262 tree v4si_ftype_v4si_char
4263 = build_function_type (V4SI_type_node,
4264 tree_cons (NULL_TREE, V4SI_type_node,
4265 tree_cons (NULL_TREE, char_type_node,
4267 tree v8hi_ftype_v8hi_char
4268 = build_function_type (V8HI_type_node,
4269 tree_cons (NULL_TREE, V8HI_type_node,
4270 tree_cons (NULL_TREE, char_type_node,
4272 tree v16qi_ftype_v16qi_char
4273 = build_function_type (V16QI_type_node,
4274 tree_cons (NULL_TREE, V16QI_type_node,
4275 tree_cons (NULL_TREE, char_type_node,
4278 /* These are for the unsigned 4 bit literals. */
4280 tree v16qi_ftype_v16qi_v16qi_char
4281 = build_function_type (V16QI_type_node,
4282 tree_cons (NULL_TREE, V16QI_type_node,
4283 tree_cons (NULL_TREE, V16QI_type_node,
4284 tree_cons (NULL_TREE,
4288 tree v8hi_ftype_v8hi_v8hi_char
4289 = build_function_type (V8HI_type_node,
4290 tree_cons (NULL_TREE, V8HI_type_node,
4291 tree_cons (NULL_TREE, V8HI_type_node,
4292 tree_cons (NULL_TREE,
4296 tree v4si_ftype_v4si_v4si_char
4297 = build_function_type (V4SI_type_node,
4298 tree_cons (NULL_TREE, V4SI_type_node,
4299 tree_cons (NULL_TREE, V4SI_type_node,
4300 tree_cons (NULL_TREE,
4304 tree v4sf_ftype_v4sf_v4sf_char
4305 = build_function_type (V4SF_type_node,
4306 tree_cons (NULL_TREE, V4SF_type_node,
4307 tree_cons (NULL_TREE, V4SF_type_node,
4308 tree_cons (NULL_TREE,
4312 /* End of 4 bit literals. */
4314 tree v4sf_ftype_v4sf_v4sf
4315 = build_function_type (V4SF_type_node,
4316 tree_cons (NULL_TREE, V4SF_type_node,
4317 tree_cons (NULL_TREE, V4SF_type_node,
4319 tree v4sf_ftype_v4sf_v4sf_v4si
4320 = build_function_type (V4SF_type_node,
4321 tree_cons (NULL_TREE, V4SF_type_node,
4322 tree_cons (NULL_TREE, V4SF_type_node,
4323 tree_cons (NULL_TREE,
4326 tree v4sf_ftype_v4sf_v4sf_v4sf
4327 = build_function_type (V4SF_type_node,
4328 tree_cons (NULL_TREE, V4SF_type_node,
4329 tree_cons (NULL_TREE, V4SF_type_node,
4330 tree_cons (NULL_TREE,
4333 tree v4si_ftype_v4si_v4si_v4si
4334 = build_function_type (V4SI_type_node,
4335 tree_cons (NULL_TREE, V4SI_type_node,
4336 tree_cons (NULL_TREE, V4SI_type_node,
4337 tree_cons (NULL_TREE,
4341 tree v8hi_ftype_v8hi_v8hi
4342 = build_function_type (V8HI_type_node,
4343 tree_cons (NULL_TREE, V8HI_type_node,
4344 tree_cons (NULL_TREE, V8HI_type_node,
4346 tree v8hi_ftype_v8hi_v8hi_v8hi
4347 = build_function_type (V8HI_type_node,
4348 tree_cons (NULL_TREE, V8HI_type_node,
4349 tree_cons (NULL_TREE, V8HI_type_node,
4350 tree_cons (NULL_TREE,
4353 tree v4si_ftype_v8hi_v8hi_v4si
4354 = build_function_type (V4SI_type_node,
4355 tree_cons (NULL_TREE, V8HI_type_node,
4356 tree_cons (NULL_TREE, V8HI_type_node,
4357 tree_cons (NULL_TREE,
4360 tree v4si_ftype_v16qi_v16qi_v4si
4361 = build_function_type (V4SI_type_node,
4362 tree_cons (NULL_TREE, V16QI_type_node,
4363 tree_cons (NULL_TREE, V16QI_type_node,
4364 tree_cons (NULL_TREE,
4368 tree v16qi_ftype_v16qi_v16qi
4369 = build_function_type (V16QI_type_node,
4370 tree_cons (NULL_TREE, V16QI_type_node,
4371 tree_cons (NULL_TREE, V16QI_type_node,
4374 tree v4si_ftype_v4sf_v4sf
4375 = build_function_type (V4SI_type_node,
4376 tree_cons (NULL_TREE, V4SF_type_node,
4377 tree_cons (NULL_TREE, V4SF_type_node,
4380 tree v4si_ftype_v4si
4381 = build_function_type (V4SI_type_node,
4382 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4384 tree v8hi_ftype_v8hi
4385 = build_function_type (V8HI_type_node,
4386 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4388 tree v16qi_ftype_v16qi
4389 = build_function_type (V16QI_type_node,
4390 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4392 tree v8hi_ftype_v16qi_v16qi
4393 = build_function_type (V8HI_type_node,
4394 tree_cons (NULL_TREE, V16QI_type_node,
4395 tree_cons (NULL_TREE, V16QI_type_node,
4398 tree v4si_ftype_v8hi_v8hi
4399 = build_function_type (V4SI_type_node,
4400 tree_cons (NULL_TREE, V8HI_type_node,
4401 tree_cons (NULL_TREE, V8HI_type_node,
4404 tree v8hi_ftype_v4si_v4si
4405 = build_function_type (V8HI_type_node,
4406 tree_cons (NULL_TREE, V4SI_type_node,
4407 tree_cons (NULL_TREE, V4SI_type_node,
4410 tree v16qi_ftype_v8hi_v8hi
4411 = build_function_type (V16QI_type_node,
4412 tree_cons (NULL_TREE, V8HI_type_node,
4413 tree_cons (NULL_TREE, V8HI_type_node,
4416 tree v4si_ftype_v16qi_v4si
4417 = build_function_type (V4SI_type_node,
4418 tree_cons (NULL_TREE, V16QI_type_node,
4419 tree_cons (NULL_TREE, V4SI_type_node,
4422 tree v4si_ftype_v16qi_v16qi
4423 = build_function_type (V4SI_type_node,
4424 tree_cons (NULL_TREE, V16QI_type_node,
4425 tree_cons (NULL_TREE, V16QI_type_node,
4428 tree v4si_ftype_v8hi_v4si
4429 = build_function_type (V4SI_type_node,
4430 tree_cons (NULL_TREE, V8HI_type_node,
4431 tree_cons (NULL_TREE, V4SI_type_node,
4434 tree v4si_ftype_v8hi
4435 = build_function_type (V4SI_type_node,
4436 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4438 tree int_ftype_v4si_v4si
4439 = build_function_type (integer_type_node,
4440 tree_cons (NULL_TREE, V4SI_type_node,
4441 tree_cons (NULL_TREE, V4SI_type_node,
4444 tree int_ftype_v4sf_v4sf
4445 = build_function_type (integer_type_node,
4446 tree_cons (NULL_TREE, V4SF_type_node,
4447 tree_cons (NULL_TREE, V4SF_type_node,
4450 tree int_ftype_v16qi_v16qi
4451 = build_function_type (integer_type_node,
4452 tree_cons (NULL_TREE, V16QI_type_node,
4453 tree_cons (NULL_TREE, V16QI_type_node,
4456 tree int_ftype_int_v4si_v4si
4457 = build_function_type
4459 tree_cons (NULL_TREE, integer_type_node,
4460 tree_cons (NULL_TREE, V4SI_type_node,
4461 tree_cons (NULL_TREE, V4SI_type_node,
4464 tree int_ftype_int_v4sf_v4sf
4465 = build_function_type
4467 tree_cons (NULL_TREE, integer_type_node,
4468 tree_cons (NULL_TREE, V4SF_type_node,
4469 tree_cons (NULL_TREE, V4SF_type_node,
4472 tree int_ftype_int_v8hi_v8hi
4473 = build_function_type
4475 tree_cons (NULL_TREE, integer_type_node,
4476 tree_cons (NULL_TREE, V8HI_type_node,
4477 tree_cons (NULL_TREE, V8HI_type_node,
4480 tree int_ftype_int_v16qi_v16qi
4481 = build_function_type
4483 tree_cons (NULL_TREE, integer_type_node,
4484 tree_cons (NULL_TREE, V16QI_type_node,
4485 tree_cons (NULL_TREE, V16QI_type_node,
4488 tree v16qi_ftype_int_pvoid
4489 = build_function_type (V16QI_type_node,
4490 tree_cons (NULL_TREE, integer_type_node,
4491 tree_cons (NULL_TREE, pvoid_type_node,
4494 tree v4si_ftype_int_pvoid
4495 = build_function_type (V4SI_type_node,
4496 tree_cons (NULL_TREE, integer_type_node,
4497 tree_cons (NULL_TREE, pvoid_type_node,
4500 tree v8hi_ftype_int_pvoid
4501 = build_function_type (V8HI_type_node,
4502 tree_cons (NULL_TREE, integer_type_node,
4503 tree_cons (NULL_TREE, pvoid_type_node,
4506 tree int_ftype_v8hi_v8hi
4507 = build_function_type (integer_type_node,
4508 tree_cons (NULL_TREE, V8HI_type_node,
4509 tree_cons (NULL_TREE, V8HI_type_node,
4512 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4513 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4514 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4515 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4516 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4517 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4518 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4519 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4520 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4521 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4522 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4523 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4524 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4525 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4526 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4527 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4528 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4529 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4530 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4531 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4532 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4533 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4534 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4535 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4537 /* Add the simple ternary operators. */
4538 d = (struct builtin_description *) bdesc_3arg;
4539 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4542 enum machine_mode mode0, mode1, mode2, mode3;
4548 mode0 = insn_data[d->icode].operand[0].mode;
4549 mode1 = insn_data[d->icode].operand[1].mode;
4550 mode2 = insn_data[d->icode].operand[2].mode;
4551 mode3 = insn_data[d->icode].operand[3].mode;
4553 /* When all four are of the same mode. */
4554 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4559 type = v4si_ftype_v4si_v4si_v4si;
4562 type = v4sf_ftype_v4sf_v4sf_v4sf;
4565 type = v8hi_ftype_v8hi_v8hi_v8hi;
4568 type = v16qi_ftype_v16qi_v16qi_v16qi;
4574 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4579 type = v4si_ftype_v4si_v4si_v16qi;
4582 type = v4sf_ftype_v4sf_v4sf_v16qi;
4585 type = v8hi_ftype_v8hi_v8hi_v16qi;
4588 type = v16qi_ftype_v16qi_v16qi_v16qi;
4594 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4595 && mode3 == V4SImode)
4596 type = v4si_ftype_v16qi_v16qi_v4si;
4597 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4598 && mode3 == V4SImode)
4599 type = v4si_ftype_v8hi_v8hi_v4si;
4600 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4601 && mode3 == V4SImode)
4602 type = v4sf_ftype_v4sf_v4sf_v4si;
4604 /* vchar, vchar, vchar, 4 bit literal. */
4605 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4607 type = v16qi_ftype_v16qi_v16qi_char;
4609 /* vshort, vshort, vshort, 4 bit literal. */
4610 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4612 type = v8hi_ftype_v8hi_v8hi_char;
4614 /* vint, vint, vint, 4 bit literal. */
4615 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4617 type = v4si_ftype_v4si_v4si_char;
4619 /* vfloat, vfloat, vfloat, 4 bit literal. */
4620 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4622 type = v4sf_ftype_v4sf_v4sf_char;
4627 def_builtin (d->mask, d->name, type, d->code);
4630 /* Add the DST variants. */
4631 d = (struct builtin_description *) bdesc_dst;
4632 for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4633 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4635 /* Initialize the predicates. */
4636 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4637 for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4639 enum machine_mode mode1;
4642 mode1 = insn_data[dp->icode].operand[1].mode;
4647 type = int_ftype_int_v4si_v4si;
4650 type = int_ftype_int_v8hi_v8hi;
4653 type = int_ftype_int_v16qi_v16qi;
4656 type = int_ftype_int_v4sf_v4sf;
4662 def_builtin (dp->mask, dp->name, type, dp->code);
4665 /* Add the simple binary operators. */
4666 d = (struct builtin_description *) bdesc_2arg;
4667 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4669 enum machine_mode mode0, mode1, mode2;
4675 mode0 = insn_data[d->icode].operand[0].mode;
4676 mode1 = insn_data[d->icode].operand[1].mode;
4677 mode2 = insn_data[d->icode].operand[2].mode;
4679 /* When all three operands are of the same mode. */
4680 if (mode0 == mode1 && mode1 == mode2)
4685 type = v4sf_ftype_v4sf_v4sf;
4688 type = v4si_ftype_v4si_v4si;
4691 type = v16qi_ftype_v16qi_v16qi;
4694 type = v8hi_ftype_v8hi_v8hi;
4701 /* A few other combos we really don't want to do manually. */
4703 /* vint, vfloat, vfloat. */
4704 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4705 type = v4si_ftype_v4sf_v4sf;
4707 /* vshort, vchar, vchar. */
4708 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4709 type = v8hi_ftype_v16qi_v16qi;
4711 /* vint, vshort, vshort. */
4712 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4713 type = v4si_ftype_v8hi_v8hi;
4715 /* vshort, vint, vint. */
4716 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4717 type = v8hi_ftype_v4si_v4si;
4719 /* vchar, vshort, vshort. */
4720 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4721 type = v16qi_ftype_v8hi_v8hi;
4723 /* vint, vchar, vint. */
4724 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4725 type = v4si_ftype_v16qi_v4si;
4727 /* vint, vchar, vchar. */
4728 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4729 type = v4si_ftype_v16qi_v16qi;
4731 /* vint, vshort, vint. */
4732 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4733 type = v4si_ftype_v8hi_v4si;
4735 /* vint, vint, 5 bit literal. */
4736 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4737 type = v4si_ftype_v4si_char;
4739 /* vshort, vshort, 5 bit literal. */
4740 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4741 type = v8hi_ftype_v8hi_char;
4743 /* vchar, vchar, 5 bit literal. */
4744 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4745 type = v16qi_ftype_v16qi_char;
4747 /* vfloat, vint, 5 bit literal. */
4748 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4749 type = v4sf_ftype_v4si_char;
4751 /* vint, vfloat, 5 bit literal. */
4752 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4753 type = v4si_ftype_v4sf_char;
4756 else if (mode0 == SImode)
4761 type = int_ftype_v4si_v4si;
4764 type = int_ftype_v4sf_v4sf;
4767 type = int_ftype_v16qi_v16qi;
4770 type = int_ftype_v8hi_v8hi;
4780 def_builtin (d->mask, d->name, type, d->code);
4783 /* Initialize the abs* operators. */
4784 d = (struct builtin_description *) bdesc_abs;
4785 for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4787 enum machine_mode mode0;
4790 mode0 = insn_data[d->icode].operand[0].mode;
4795 type = v4si_ftype_v4si;
4798 type = v8hi_ftype_v8hi;
4801 type = v16qi_ftype_v16qi;
4804 type = v4sf_ftype_v4sf;
4810 def_builtin (d->mask, d->name, type, d->code);
4813 /* Add the simple unary operators. */
4814 d = (struct builtin_description *) bdesc_1arg;
4815 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4817 enum machine_mode mode0, mode1;
4823 mode0 = insn_data[d->icode].operand[0].mode;
4824 mode1 = insn_data[d->icode].operand[1].mode;
4826 if (mode0 == V4SImode && mode1 == QImode)
4827 type = v4si_ftype_char;
4828 else if (mode0 == V8HImode && mode1 == QImode)
4829 type = v8hi_ftype_char;
4830 else if (mode0 == V16QImode && mode1 == QImode)
4831 type = v16qi_ftype_char;
4832 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4833 type = v4sf_ftype_v4sf;
4834 else if (mode0 == V8HImode && mode1 == V16QImode)
4835 type = v8hi_ftype_v16qi;
4836 else if (mode0 == V4SImode && mode1 == V8HImode)
4837 type = v4si_ftype_v8hi;
4841 def_builtin (d->mask, d->name, type, d->code);
4846 /* Generate a memory reference for expand_block_move, copying volatile,
4847 and other bits from an original memory reference. */
4850 expand_block_move_mem (mode, addr, orig_mem)
4851 enum machine_mode mode;
4855 rtx mem = gen_rtx_MEM (mode, addr);
4857 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4861 /* Expand a block move operation, and return 1 if successful. Return 0
4862 if we should let the compiler generate normal code.
4864 operands[0] is the destination
4865 operands[1] is the source
4866 operands[2] is the length
4867 operands[3] is the alignment */
4869 #define MAX_MOVE_REG 4
4872 expand_block_move (operands)
4875 rtx orig_dest = operands[0];
4876 rtx orig_src = operands[1];
4877 rtx bytes_rtx = operands[2];
4878 rtx align_rtx = operands[3];
4879 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4890 rtx stores[MAX_MOVE_REG];
4893 /* If this is not a fixed size move, just call memcpy */
4897 /* If this is not a fixed size alignment, abort */
4898 if (GET_CODE (align_rtx) != CONST_INT)
4900 align = INTVAL (align_rtx);
4902 /* Anything to move? */
4903 bytes = INTVAL (bytes_rtx);
4907 /* store_one_arg depends on expand_block_move to handle at least the size of
4908 reg_parm_stack_space. */
4909 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4912 /* Move the address into scratch registers. */
4913 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4914 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4916 if (TARGET_STRING) /* string instructions are available */
4918 for ( ; bytes > 0; bytes -= move_bytes)
4920 if (bytes > 24 /* move up to 32 bytes at a time */
4928 && ! fixed_regs[12])
4930 move_bytes = (bytes > 32) ? 32 : bytes;
4931 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4934 expand_block_move_mem (BLKmode,
4937 GEN_INT ((move_bytes == 32)
4941 else if (bytes > 16 /* move up to 24 bytes at a time */
4947 && ! fixed_regs[10])
4949 move_bytes = (bytes > 24) ? 24 : bytes;
4950 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4953 expand_block_move_mem (BLKmode,
4956 GEN_INT (move_bytes),
4959 else if (bytes > 8 /* move up to 16 bytes at a time */
4965 move_bytes = (bytes > 16) ? 16 : bytes;
4966 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
4969 expand_block_move_mem (BLKmode,
4972 GEN_INT (move_bytes),
4975 else if (bytes >= 8 && TARGET_POWERPC64
4976 /* 64-bit loads and stores require word-aligned
4978 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
4981 tmp_reg = gen_reg_rtx (DImode);
4982 emit_move_insn (tmp_reg,
4983 expand_block_move_mem (DImode,
4984 src_reg, orig_src));
4985 emit_move_insn (expand_block_move_mem (DImode,
4986 dest_reg, orig_dest),
4989 else if (bytes > 4 && !TARGET_POWERPC64)
4990 { /* move up to 8 bytes at a time */
4991 move_bytes = (bytes > 8) ? 8 : bytes;
4992 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
4995 expand_block_move_mem (BLKmode,
4998 GEN_INT (move_bytes),
5001 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5002 { /* move 4 bytes */
5004 tmp_reg = gen_reg_rtx (SImode);
5005 emit_move_insn (tmp_reg,
5006 expand_block_move_mem (SImode,
5007 src_reg, orig_src));
5008 emit_move_insn (expand_block_move_mem (SImode,
5009 dest_reg, orig_dest),
5012 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5013 { /* move 2 bytes */
5015 tmp_reg = gen_reg_rtx (HImode);
5016 emit_move_insn (tmp_reg,
5017 expand_block_move_mem (HImode,
5018 src_reg, orig_src));
5019 emit_move_insn (expand_block_move_mem (HImode,
5020 dest_reg, orig_dest),
5023 else if (bytes == 1) /* move 1 byte */
5026 tmp_reg = gen_reg_rtx (QImode);
5027 emit_move_insn (tmp_reg,
5028 expand_block_move_mem (QImode,
5029 src_reg, orig_src));
5030 emit_move_insn (expand_block_move_mem (QImode,
5031 dest_reg, orig_dest),
5035 { /* move up to 4 bytes at a time */
5036 move_bytes = (bytes > 4) ? 4 : bytes;
5037 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5040 expand_block_move_mem (BLKmode,
5043 GEN_INT (move_bytes),
5047 if (bytes > move_bytes)
5049 if (! TARGET_POWERPC64)
5051 emit_insn (gen_addsi3 (src_reg, src_reg,
5052 GEN_INT (move_bytes)));
5053 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5054 GEN_INT (move_bytes)));
5058 emit_insn (gen_adddi3 (src_reg, src_reg,
5059 GEN_INT (move_bytes)));
5060 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5061 GEN_INT (move_bytes)));
5067 else /* string instructions not available */
5069 num_reg = offset = 0;
5070 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5072 /* Calculate the correct offset for src/dest */
5076 dest_addr = dest_reg;
5080 src_addr = plus_constant (src_reg, offset);
5081 dest_addr = plus_constant (dest_reg, offset);
5084 /* Generate the appropriate load and store, saving the stores
5086 if (bytes >= 8 && TARGET_POWERPC64
5087 /* 64-bit loads and stores require word-aligned
5089 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5092 tmp_reg = gen_reg_rtx (DImode);
5093 emit_insn (gen_movdi (tmp_reg,
5094 expand_block_move_mem (DImode,
5097 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5102 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5105 tmp_reg = gen_reg_rtx (SImode);
5106 emit_insn (gen_movsi (tmp_reg,
5107 expand_block_move_mem (SImode,
5110 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5115 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5118 tmp_reg = gen_reg_rtx (HImode);
5119 emit_insn (gen_movhi (tmp_reg,
5120 expand_block_move_mem (HImode,
5123 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5131 tmp_reg = gen_reg_rtx (QImode);
5132 emit_insn (gen_movqi (tmp_reg,
5133 expand_block_move_mem (QImode,
5136 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5142 if (num_reg >= MAX_MOVE_REG)
5144 for (i = 0; i < num_reg; i++)
5145 emit_insn (stores[i]);
5150 for (i = 0; i < num_reg; i++)
5151 emit_insn (stores[i]);
5158 /* Return 1 if OP is a load multiple operation. It is known to be a
5159 PARALLEL and the first section will be tested. */
5162 load_multiple_operation (op, mode)
5164 enum machine_mode mode ATTRIBUTE_UNUSED;
5166 int count = XVECLEN (op, 0);
5167 unsigned int dest_regno;
5171 /* Perform a quick check so we don't blow up below. */
5173 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5174 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5175 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5178 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5179 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5181 for (i = 1; i < count; i++)
5183 rtx elt = XVECEXP (op, 0, i);
5185 if (GET_CODE (elt) != SET
5186 || GET_CODE (SET_DEST (elt)) != REG
5187 || GET_MODE (SET_DEST (elt)) != SImode
5188 || REGNO (SET_DEST (elt)) != dest_regno + i
5189 || GET_CODE (SET_SRC (elt)) != MEM
5190 || GET_MODE (SET_SRC (elt)) != SImode
5191 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5192 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5193 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5194 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5201 /* Similar, but tests for store multiple. Here, the second vector element
5202 is a CLOBBER. It will be tested later. */
5205 store_multiple_operation (op, mode)
5207 enum machine_mode mode ATTRIBUTE_UNUSED;
5209 int count = XVECLEN (op, 0) - 1;
5210 unsigned int src_regno;
5214 /* Perform a quick check so we don't blow up below. */
5216 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5217 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5218 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5221 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5222 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5224 for (i = 1; i < count; i++)
5226 rtx elt = XVECEXP (op, 0, i + 1);
5228 if (GET_CODE (elt) != SET
5229 || GET_CODE (SET_SRC (elt)) != REG
5230 || GET_MODE (SET_SRC (elt)) != SImode
5231 || REGNO (SET_SRC (elt)) != src_regno + i
5232 || GET_CODE (SET_DEST (elt)) != MEM
5233 || GET_MODE (SET_DEST (elt)) != SImode
5234 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5235 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5236 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5237 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5244 /* Return 1 for a parallel vrsave operation. */
5247 vrsave_operation (op, mode)
5249 enum machine_mode mode ATTRIBUTE_UNUSED;
5251 int count = XVECLEN (op, 0);
5252 unsigned int dest_regno, src_regno;
5256 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5257 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5258 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5261 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5262 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5264 if (dest_regno != VRSAVE_REGNO
5265 && src_regno != VRSAVE_REGNO)
5268 for (i = 1; i < count; i++)
5270 rtx elt = XVECEXP (op, 0, i);
5272 if (GET_CODE (elt) != CLOBBER
5273 && GET_CODE (elt) != SET)
5280 /* Return 1 for an PARALLEL suitable for mtcrf. */
5283 mtcrf_operation (op, mode)
5285 enum machine_mode mode ATTRIBUTE_UNUSED;
5287 int count = XVECLEN (op, 0);
5291 /* Perform a quick check so we don't blow up below. */
5293 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5294 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5295 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5297 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5299 if (GET_CODE (src_reg) != REG
5300 || GET_MODE (src_reg) != SImode
5301 || ! INT_REGNO_P (REGNO (src_reg)))
5304 for (i = 0; i < count; i++)
5306 rtx exp = XVECEXP (op, 0, i);
5310 if (GET_CODE (exp) != SET
5311 || GET_CODE (SET_DEST (exp)) != REG
5312 || GET_MODE (SET_DEST (exp)) != CCmode
5313 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5315 unspec = SET_SRC (exp);
5316 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5318 if (GET_CODE (unspec) != UNSPEC
5319 || XINT (unspec, 1) != 20
5320 || XVECLEN (unspec, 0) != 2
5321 || XVECEXP (unspec, 0, 0) != src_reg
5322 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5323 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5329 /* Return 1 for an PARALLEL suitable for lmw. */
5332 lmw_operation (op, mode)
5334 enum machine_mode mode ATTRIBUTE_UNUSED;
5336 int count = XVECLEN (op, 0);
5337 unsigned int dest_regno;
5339 unsigned int base_regno;
5340 HOST_WIDE_INT offset;
5343 /* Perform a quick check so we don't blow up below. */
5345 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5346 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5347 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5350 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5351 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5354 || count != 32 - (int) dest_regno)
5357 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5360 base_regno = REGNO (src_addr);
5361 if (base_regno == 0)
5364 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5366 offset = INTVAL (XEXP (src_addr, 1));
5367 base_regno = REGNO (XEXP (src_addr, 0));
5372 for (i = 0; i < count; i++)
5374 rtx elt = XVECEXP (op, 0, i);
5377 HOST_WIDE_INT newoffset;
5379 if (GET_CODE (elt) != SET
5380 || GET_CODE (SET_DEST (elt)) != REG
5381 || GET_MODE (SET_DEST (elt)) != SImode
5382 || REGNO (SET_DEST (elt)) != dest_regno + i
5383 || GET_CODE (SET_SRC (elt)) != MEM
5384 || GET_MODE (SET_SRC (elt)) != SImode)
5386 newaddr = XEXP (SET_SRC (elt), 0);
5387 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5392 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5394 addr_reg = XEXP (newaddr, 0);
5395 newoffset = INTVAL (XEXP (newaddr, 1));
5399 if (REGNO (addr_reg) != base_regno
5400 || newoffset != offset + 4 * i)
5407 /* Return 1 for an PARALLEL suitable for stmw. */
5410 stmw_operation (op, mode)
5412 enum machine_mode mode ATTRIBUTE_UNUSED;
5414 int count = XVECLEN (op, 0);
5415 unsigned int src_regno;
5417 unsigned int base_regno;
5418 HOST_WIDE_INT offset;
5421 /* Perform a quick check so we don't blow up below. */
5423 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5424 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5425 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5428 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5429 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5432 || count != 32 - (int) src_regno)
5435 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5438 base_regno = REGNO (dest_addr);
5439 if (base_regno == 0)
5442 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5444 offset = INTVAL (XEXP (dest_addr, 1));
5445 base_regno = REGNO (XEXP (dest_addr, 0));
5450 for (i = 0; i < count; i++)
5452 rtx elt = XVECEXP (op, 0, i);
5455 HOST_WIDE_INT newoffset;
5457 if (GET_CODE (elt) != SET
5458 || GET_CODE (SET_SRC (elt)) != REG
5459 || GET_MODE (SET_SRC (elt)) != SImode
5460 || REGNO (SET_SRC (elt)) != src_regno + i
5461 || GET_CODE (SET_DEST (elt)) != MEM
5462 || GET_MODE (SET_DEST (elt)) != SImode)
5464 newaddr = XEXP (SET_DEST (elt), 0);
5465 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5470 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5472 addr_reg = XEXP (newaddr, 0);
5473 newoffset = INTVAL (XEXP (newaddr, 1));
5477 if (REGNO (addr_reg) != base_regno
5478 || newoffset != offset + 4 * i)
5485 /* A validation routine: say whether CODE, a condition code, and MODE
5486 match. The other alternatives either don't make sense or should
5487 never be generated. */
5490 validate_condition_mode (code, mode)
5492 enum machine_mode mode;
5494 if (GET_RTX_CLASS (code) != '<'
5495 || GET_MODE_CLASS (mode) != MODE_CC)
5498 /* These don't make sense. */
5499 if ((code == GT || code == LT || code == GE || code == LE)
5500 && mode == CCUNSmode)
5503 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5504 && mode != CCUNSmode)
5507 if (mode != CCFPmode
5508 && (code == ORDERED || code == UNORDERED
5509 || code == UNEQ || code == LTGT
5510 || code == UNGT || code == UNLT
5511 || code == UNGE || code == UNLE))
5514 /* These should never be generated except for
5515 flag_unsafe_math_optimizations. */
5516 if (mode == CCFPmode
5517 && ! flag_unsafe_math_optimizations
5518 && (code == LE || code == GE
5519 || code == UNEQ || code == LTGT
5520 || code == UNGT || code == UNLT))
5523 /* These are invalid; the information is not there. */
5524 if (mode == CCEQmode
5525 && code != EQ && code != NE)
5529 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5530 We only check the opcode against the mode of the CC value here. */
5533 branch_comparison_operator (op, mode)
5535 enum machine_mode mode ATTRIBUTE_UNUSED;
5537 enum rtx_code code = GET_CODE (op);
5538 enum machine_mode cc_mode;
5540 if (GET_RTX_CLASS (code) != '<')
5543 cc_mode = GET_MODE (XEXP (op, 0));
5544 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5547 validate_condition_mode (code, cc_mode);
5552 /* Return 1 if OP is a comparison operation that is valid for a branch
5553 insn and which is true if the corresponding bit in the CC register
5557 branch_positive_comparison_operator (op, mode)
5559 enum machine_mode mode;
5563 if (! branch_comparison_operator (op, mode))
5566 code = GET_CODE (op);
5567 return (code == EQ || code == LT || code == GT
5568 || code == LTU || code == GTU
5569 || code == UNORDERED);
5572 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5573 We check the opcode against the mode of the CC value and disallow EQ or
5574 NE comparisons for integers. */
5577 scc_comparison_operator (op, mode)
5579 enum machine_mode mode;
5581 enum rtx_code code = GET_CODE (op);
5582 enum machine_mode cc_mode;
5584 if (GET_MODE (op) != mode && mode != VOIDmode)
5587 if (GET_RTX_CLASS (code) != '<')
5590 cc_mode = GET_MODE (XEXP (op, 0));
5591 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5594 validate_condition_mode (code, cc_mode);
5596 if (code == NE && cc_mode != CCFPmode)
5603 trap_comparison_operator (op, mode)
5605 enum machine_mode mode;
5607 if (mode != VOIDmode && mode != GET_MODE (op))
5609 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5613 boolean_operator (op, mode)
5615 enum machine_mode mode ATTRIBUTE_UNUSED;
5617 enum rtx_code code = GET_CODE (op);
5618 return (code == AND || code == IOR || code == XOR);
5622 boolean_or_operator (op, mode)
5624 enum machine_mode mode ATTRIBUTE_UNUSED;
5626 enum rtx_code code = GET_CODE (op);
5627 return (code == IOR || code == XOR);
5631 min_max_operator (op, mode)
5633 enum machine_mode mode ATTRIBUTE_UNUSED;
5635 enum rtx_code code = GET_CODE (op);
5636 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5639 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5640 mask required to convert the result of a rotate insn into a shift
5641 left insn of SHIFTOP bits. Both are known to be CONST_INT. */
5644 includes_lshift_p (shiftop, andop)
5648 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5650 shift_mask <<= INTVAL (shiftop);
5652 return (INTVAL (andop) & ~shift_mask) == 0;
5655 /* Similar, but for right shift. */
5658 includes_rshift_p (shiftop, andop)
5662 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5664 shift_mask >>= INTVAL (shiftop);
5666 return (INTVAL (andop) & ~shift_mask) == 0;
5669 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5670 to perform a left shift. It must have exactly SHIFTOP least
5671 signifigant 0's, then one or more 1's, then zero or more 0's. */
5674 includes_rldic_lshift_p (shiftop, andop)
5678 if (GET_CODE (andop) == CONST_INT)
5680 HOST_WIDE_INT c, lsb, shift_mask;
5683 if (c == 0 || c == ~0)
5687 shift_mask <<= INTVAL (shiftop);
5689 /* Find the least signifigant one bit. */
5692 /* It must coincide with the LSB of the shift mask. */
5693 if (-lsb != shift_mask)
5696 /* Invert to look for the next transition (if any). */
5699 /* Remove the low group of ones (originally low group of zeros). */
5702 /* Again find the lsb, and check we have all 1's above. */
5706 else if (GET_CODE (andop) == CONST_DOUBLE
5707 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5709 HOST_WIDE_INT low, high, lsb;
5710 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5712 low = CONST_DOUBLE_LOW (andop);
5713 if (HOST_BITS_PER_WIDE_INT < 64)
5714 high = CONST_DOUBLE_HIGH (andop);
5716 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5717 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5720 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5722 shift_mask_high = ~0;
5723 if (INTVAL (shiftop) > 32)
5724 shift_mask_high <<= INTVAL (shiftop) - 32;
5728 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5735 return high == -lsb;
5738 shift_mask_low = ~0;
5739 shift_mask_low <<= INTVAL (shiftop);
5743 if (-lsb != shift_mask_low)
5746 if (HOST_BITS_PER_WIDE_INT < 64)
5751 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5754 return high == -lsb;
5758 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5764 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5765 to perform a left shift. It must have SHIFTOP or more least
5766 signifigant 0's, with the remainder of the word 1's. */
5769 includes_rldicr_lshift_p (shiftop, andop)
5773 if (GET_CODE (andop) == CONST_INT)
5775 HOST_WIDE_INT c, lsb, shift_mask;
5778 shift_mask <<= INTVAL (shiftop);
5781 /* Find the least signifigant one bit. */
5784 /* It must be covered by the shift mask.
5785 This test also rejects c == 0. */
5786 if ((lsb & shift_mask) == 0)
5789 /* Check we have all 1's above the transition, and reject all 1's. */
5790 return c == -lsb && lsb != 1;
5792 else if (GET_CODE (andop) == CONST_DOUBLE
5793 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5795 HOST_WIDE_INT low, lsb, shift_mask_low;
5797 low = CONST_DOUBLE_LOW (andop);
5799 if (HOST_BITS_PER_WIDE_INT < 64)
5801 HOST_WIDE_INT high, shift_mask_high;
5803 high = CONST_DOUBLE_HIGH (andop);
5807 shift_mask_high = ~0;
5808 if (INTVAL (shiftop) > 32)
5809 shift_mask_high <<= INTVAL (shiftop) - 32;
5813 if ((lsb & shift_mask_high) == 0)
5816 return high == -lsb;
5822 shift_mask_low = ~0;
5823 shift_mask_low <<= INTVAL (shiftop);
5827 if ((lsb & shift_mask_low) == 0)
5830 return low == -lsb && lsb != 1;
5836 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5837 for lfq and stfq insns.
5839 Note reg1 and reg2 *must* be hard registers. To be sure we will
5840 abort if we are passed pseudo registers. */
5843 registers_ok_for_quad_peep (reg1, reg2)
5846 /* We might have been passed a SUBREG. */
5847 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5850 return (REGNO (reg1) == REGNO (reg2) - 1);
5853 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5854 addr1 and addr2 must be in consecutive memory locations
5855 (addr2 == addr1 + 8). */
5858 addrs_ok_for_quad_peep (addr1, addr2)
5865 /* Extract an offset (if used) from the first addr. */
5866 if (GET_CODE (addr1) == PLUS)
5868 /* If not a REG, return zero. */
5869 if (GET_CODE (XEXP (addr1, 0)) != REG)
5873 reg1 = REGNO (XEXP (addr1, 0));
5874 /* The offset must be constant! */
5875 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5877 offset1 = INTVAL (XEXP (addr1, 1));
5880 else if (GET_CODE (addr1) != REG)
5884 reg1 = REGNO (addr1);
5885 /* This was a simple (mem (reg)) expression. Offset is 0. */
5889 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5890 if (GET_CODE (addr2) != PLUS)
5893 if (GET_CODE (XEXP (addr2, 0)) != REG
5894 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5897 if (reg1 != REGNO (XEXP (addr2, 0)))
5900 /* The offset for the second addr must be 8 more than the first addr. */
5901 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5904 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5909 /* Return the register class of a scratch register needed to copy IN into
5910 or out of a register in CLASS in MODE. If it can be done directly,
5911 NO_REGS is returned. */
5914 secondary_reload_class (class, mode, in)
5915 enum reg_class class;
5916 enum machine_mode mode ATTRIBUTE_UNUSED;
5921 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5923 /* We cannot copy a symbolic operand directly into anything
5924 other than BASE_REGS for TARGET_ELF. So indicate that a
5925 register from BASE_REGS is needed as an intermediate
5928 On Darwin, pic addresses require a load from memory, which
5929 needs a base register. */
5930 if (class != BASE_REGS
5931 && (GET_CODE (in) == SYMBOL_REF
5932 || GET_CODE (in) == HIGH
5933 || GET_CODE (in) == LABEL_REF
5934 || GET_CODE (in) == CONST))
5938 if (GET_CODE (in) == REG)
5941 if (regno >= FIRST_PSEUDO_REGISTER)
5943 regno = true_regnum (in);
5944 if (regno >= FIRST_PSEUDO_REGISTER)
5948 else if (GET_CODE (in) == SUBREG)
5950 regno = true_regnum (in);
5951 if (regno >= FIRST_PSEUDO_REGISTER)
5957 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
5959 if (class == GENERAL_REGS || class == BASE_REGS
5960 || (regno >= 0 && INT_REGNO_P (regno)))
5963 /* Constants, memory, and FP registers can go into FP registers. */
5964 if ((regno == -1 || FP_REGNO_P (regno))
5965 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
5968 /* Memory, and AltiVec registers can go into AltiVec registers. */
5969 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
5970 && class == ALTIVEC_REGS)
5973 /* We can copy among the CR registers. */
5974 if ((class == CR_REGS || class == CR0_REGS)
5975 && regno >= 0 && CR_REGNO_P (regno))
5978 /* Otherwise, we need GENERAL_REGS. */
5979 return GENERAL_REGS;
5982 /* Given a comparison operation, return the bit number in CCR to test. We
5983 know this is a valid comparison.
5985 SCC_P is 1 if this is for an scc. That means that %D will have been
5986 used instead of %C, so the bits will be in different places.
5988 Return -1 if OP isn't a valid comparison for some reason. */
5995 enum rtx_code code = GET_CODE (op);
5996 enum machine_mode cc_mode;
6001 if (GET_RTX_CLASS (code) != '<')
6006 if (GET_CODE (reg) != REG
6007 || ! CR_REGNO_P (REGNO (reg)))
6010 cc_mode = GET_MODE (reg);
6011 cc_regnum = REGNO (reg);
6012 base_bit = 4 * (cc_regnum - CR0_REGNO);
6014 validate_condition_mode (code, cc_mode);
6019 return scc_p ? base_bit + 3 : base_bit + 2;
6021 return base_bit + 2;
6022 case GT: case GTU: case UNLE:
6023 return base_bit + 1;
6024 case LT: case LTU: case UNGE:
6026 case ORDERED: case UNORDERED:
6027 return base_bit + 3;
6030 /* If scc, we will have done a cror to put the bit in the
6031 unordered position. So test that bit. For integer, this is ! LT
6032 unless this is an scc insn. */
6033 return scc_p ? base_bit + 3 : base_bit;
6036 return scc_p ? base_bit + 3 : base_bit + 1;
6043 /* Return the GOT register. */
6046 rs6000_got_register (value)
6047 rtx value ATTRIBUTE_UNUSED;
6049 /* The second flow pass currently (June 1999) can't update
6050 regs_ever_live without disturbing other parts of the compiler, so
6051 update it here to make the prolog/epilogue code happy. */
6052 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6053 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
6055 current_function_uses_pic_offset_table = 1;
6057 return pic_offset_table_rtx;
6060 /* Functions to init, mark and free struct machine_function.
6061 These will be called, via pointer variables,
6062 from push_function_context and pop_function_context. */
6065 rs6000_init_machine_status (p)
6068 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6072 rs6000_free_machine_status (p)
6075 if (p->machine == NULL)
6083 /* Print an operand. Recognize special options, documented below. */
6086 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6087 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6089 #define SMALL_DATA_RELOC "sda21"
6090 #define SMALL_DATA_REG 0
6094 print_operand (file, x, code)
6102 /* These macros test for integers and extract the low-order bits. */
6104 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6105 && GET_MODE (X) == VOIDmode)
6107 #define INT_LOWPART(X) \
6108 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6113 /* Write out an instruction after the call which may be replaced
6114 with glue code by the loader. This depends on the AIX version. */
6115 asm_fprintf (file, RS6000_CALL_GLUE);
6118 /* %a is output_address. */
6121 /* If X is a constant integer whose low-order 5 bits are zero,
6122 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6123 in the AIX assembler where "sri" with a zero shift count
6124 writes a trash instruction. */
6125 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6132 /* If constant, low-order 16 bits of constant, unsigned.
6133 Otherwise, write normally. */
6135 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6137 print_operand (file, x, 0);
6141 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6142 for 64-bit mask direction. */
6143 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6146 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6150 /* There used to be a comment for 'C' reading "This is an
6151 optional cror needed for certain floating-point
6152 comparisons. Otherwise write nothing." */
6154 /* Similar, except that this is for an scc, so we must be able to
6155 encode the test in a single bit that is one. We do the above
6156 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6157 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6158 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6160 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6162 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6164 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6167 else if (GET_CODE (x) == NE)
6169 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6171 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6172 base_bit + 2, base_bit + 2);
6177 /* X is a CR register. Print the number of the EQ bit of the CR */
6178 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6179 output_operand_lossage ("invalid %%E value");
6181 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6185 /* X is a CR register. Print the shift count needed to move it
6186 to the high-order four bits. */
6187 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6188 output_operand_lossage ("invalid %%f value");
6190 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6194 /* Similar, but print the count for the rotate in the opposite
6196 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6197 output_operand_lossage ("invalid %%F value");
6199 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6203 /* X is a constant integer. If it is negative, print "m",
6204 otherwise print "z". This is to make a aze or ame insn. */
6205 if (GET_CODE (x) != CONST_INT)
6206 output_operand_lossage ("invalid %%G value");
6207 else if (INTVAL (x) >= 0)
6214 /* If constant, output low-order five bits. Otherwise, write
6217 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6219 print_operand (file, x, 0);
6223 /* If constant, output low-order six bits. Otherwise, write
6226 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6228 print_operand (file, x, 0);
6232 /* Print `i' if this is a constant, else nothing. */
6238 /* Write the bit number in CCR for jump. */
6241 output_operand_lossage ("invalid %%j code");
6243 fprintf (file, "%d", i);
6247 /* Similar, but add one for shift count in rlinm for scc and pass
6248 scc flag to `ccr_bit'. */
6251 output_operand_lossage ("invalid %%J code");
6253 /* If we want bit 31, write a shift count of zero, not 32. */
6254 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6258 /* X must be a constant. Write the 1's complement of the
6261 output_operand_lossage ("invalid %%k value");
6263 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6267 /* X must be a symbolic constant on ELF. Write an
6268 expression suitable for an 'addi' that adds in the low 16
6270 if (GET_CODE (x) != CONST)
6272 print_operand_address (file, x);
6277 if (GET_CODE (XEXP (x, 0)) != PLUS
6278 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6279 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6280 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6281 output_operand_lossage ("invalid %%K value");
6282 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6284 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6288 /* %l is output_asm_label. */
6291 /* Write second word of DImode or DFmode reference. Works on register
6292 or non-indexed memory only. */
6293 if (GET_CODE (x) == REG)
6294 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6295 else if (GET_CODE (x) == MEM)
6297 /* Handle possible auto-increment. Since it is pre-increment and
6298 we have already done it, we can just use an offset of word. */
6299 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6300 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6301 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6304 output_address (XEXP (adjust_address_nv (x, SImode,
6308 if (small_data_operand (x, GET_MODE (x)))
6309 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6310 reg_names[SMALL_DATA_REG]);
6315 /* MB value for a mask operand. */
6316 if (! mask_operand (x, VOIDmode))
6317 output_operand_lossage ("invalid %%m value");
6319 val = INT_LOWPART (x);
6321 /* If the high bit is set and the low bit is not, the value is zero.
6322 If the high bit is zero, the value is the first 1 bit we find from
6324 if ((val & 0x80000000) && ((val & 1) == 0))
6329 else if ((val & 0x80000000) == 0)
6331 for (i = 1; i < 32; i++)
6332 if ((val <<= 1) & 0x80000000)
6334 fprintf (file, "%d", i);
6338 /* Otherwise, look for the first 0 bit from the right. The result is its
6339 number plus 1. We know the low-order bit is one. */
6340 for (i = 0; i < 32; i++)
6341 if (((val >>= 1) & 1) == 0)
6344 /* If we ended in ...01, i would be 0. The correct value is 31, so
6346 fprintf (file, "%d", 31 - i);
6350 /* ME value for a mask operand. */
6351 if (! mask_operand (x, VOIDmode))
6352 output_operand_lossage ("invalid %%M value");
6354 val = INT_LOWPART (x);
6356 /* If the low bit is set and the high bit is not, the value is 31.
6357 If the low bit is zero, the value is the first 1 bit we find from
6359 if ((val & 1) && ((val & 0x80000000) == 0))
6364 else if ((val & 1) == 0)
6366 for (i = 0; i < 32; i++)
6367 if ((val >>= 1) & 1)
6370 /* If we had ....10, i would be 0. The result should be
6371 30, so we need 30 - i. */
6372 fprintf (file, "%d", 30 - i);
6376 /* Otherwise, look for the first 0 bit from the left. The result is its
6377 number minus 1. We know the high-order bit is one. */
6378 for (i = 0; i < 32; i++)
6379 if (((val <<= 1) & 0x80000000) == 0)
6382 fprintf (file, "%d", i);
6385 /* %n outputs the negative of its operand. */
6388 /* Write the number of elements in the vector times 4. */
6389 if (GET_CODE (x) != PARALLEL)
6390 output_operand_lossage ("invalid %%N value");
6392 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6396 /* Similar, but subtract 1 first. */
6397 if (GET_CODE (x) != PARALLEL)
6398 output_operand_lossage ("invalid %%O value");
6400 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6404 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6406 || INT_LOWPART (x) < 0
6407 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6408 output_operand_lossage ("invalid %%p value");
6410 fprintf (file, "%d", i);
6414 /* The operand must be an indirect memory reference. The result
6415 is the register number. */
6416 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6417 || REGNO (XEXP (x, 0)) >= 32)
6418 output_operand_lossage ("invalid %%P value");
6420 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6424 /* This outputs the logical code corresponding to a boolean
6425 expression. The expression may have one or both operands
6426 negated (if one, only the first one). For condition register
6427 logical operations, it will also treat the negated
6428 CR codes as NOTs, but not handle NOTs of them. */
6430 const char *const *t = 0;
6432 enum rtx_code code = GET_CODE (x);
6433 static const char * const tbl[3][3] = {
6434 { "and", "andc", "nor" },
6435 { "or", "orc", "nand" },
6436 { "xor", "eqv", "xor" } };
6440 else if (code == IOR)
6442 else if (code == XOR)
6445 output_operand_lossage ("invalid %%q value");
6447 if (GET_CODE (XEXP (x, 0)) != NOT)
6451 if (GET_CODE (XEXP (x, 1)) == NOT)
6462 /* X is a CR register. Print the mask for `mtcrf'. */
6463 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6464 output_operand_lossage ("invalid %%R value");
6466 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6470 /* Low 5 bits of 32 - value */
6472 output_operand_lossage ("invalid %%s value");
6474 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6478 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6479 CONST_INT 32-bit mask is considered sign-extended so any
6480 transition must occur within the CONST_INT, not on the boundary. */
6481 if (! mask64_operand (x, VOIDmode))
6482 output_operand_lossage ("invalid %%S value");
6484 val = INT_LOWPART (x);
6486 if (val & 1) /* Clear Left */
6488 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6489 if (!((val >>= 1) & 1))
6492 #if HOST_BITS_PER_WIDE_INT == 32
6493 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6495 val = CONST_DOUBLE_HIGH (x);
6500 for (i = 32; i < 64; i++)
6501 if (!((val >>= 1) & 1))
6505 /* i = index of last set bit from right
6506 mask begins at 63 - i from left */
6508 output_operand_lossage ("%%S computed all 1's mask");
6510 fprintf (file, "%d", 63 - i);
6513 else /* Clear Right */
6515 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6516 if ((val >>= 1) & 1)
6519 #if HOST_BITS_PER_WIDE_INT == 32
6520 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6522 val = CONST_DOUBLE_HIGH (x);
6524 if (val == (HOST_WIDE_INT) -1)
6527 for (i = 32; i < 64; i++)
6528 if ((val >>= 1) & 1)
6532 /* i = index of last clear bit from right
6533 mask ends at 62 - i from left */
6535 output_operand_lossage ("%%S computed all 0's mask");
6537 fprintf (file, "%d", 62 - i);
6542 /* Print the symbolic name of a branch target register. */
6543 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6544 && REGNO (x) != COUNT_REGISTER_REGNUM))
6545 output_operand_lossage ("invalid %%T value");
6546 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6547 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6549 fputs ("ctr", file);
6553 /* High-order 16 bits of constant for use in unsigned operand. */
6555 output_operand_lossage ("invalid %%u value");
6557 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6558 (INT_LOWPART (x) >> 16) & 0xffff);
6562 /* High-order 16 bits of constant for use in signed operand. */
6564 output_operand_lossage ("invalid %%v value");
6566 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6567 (INT_LOWPART (x) >> 16) & 0xffff);
6571 /* Print `u' if this has an auto-increment or auto-decrement. */
6572 if (GET_CODE (x) == MEM
6573 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6574 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6579 /* Print the trap code for this operand. */
6580 switch (GET_CODE (x))
6583 fputs ("eq", file); /* 4 */
6586 fputs ("ne", file); /* 24 */
6589 fputs ("lt", file); /* 16 */
6592 fputs ("le", file); /* 20 */
6595 fputs ("gt", file); /* 8 */
6598 fputs ("ge", file); /* 12 */
6601 fputs ("llt", file); /* 2 */
6604 fputs ("lle", file); /* 6 */
6607 fputs ("lgt", file); /* 1 */
6610 fputs ("lge", file); /* 5 */
6618 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6621 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6622 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6624 print_operand (file, x, 0);
6628 /* MB value for a PowerPC64 rldic operand. */
6629 val = (GET_CODE (x) == CONST_INT
6630 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6635 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6636 if ((val <<= 1) < 0)
6639 #if HOST_BITS_PER_WIDE_INT == 32
6640 if (GET_CODE (x) == CONST_INT && i >= 0)
6641 i += 32; /* zero-extend high-part was all 0's */
6642 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6644 val = CONST_DOUBLE_LOW (x);
6651 for ( ; i < 64; i++)
6652 if ((val <<= 1) < 0)
6657 fprintf (file, "%d", i + 1);
6661 if (GET_CODE (x) == MEM
6662 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6667 /* Like 'L', for third word of TImode */
6668 if (GET_CODE (x) == REG)
6669 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6670 else if (GET_CODE (x) == MEM)
6672 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6673 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6674 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6676 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6677 if (small_data_operand (x, GET_MODE (x)))
6678 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6679 reg_names[SMALL_DATA_REG]);
6684 /* X is a SYMBOL_REF. Write out the name preceded by a
6685 period and without any trailing data in brackets. Used for function
6686 names. If we are configured for System V (or the embedded ABI) on
6687 the PowerPC, do not emit the period, since those systems do not use
6688 TOCs and the like. */
6689 if (GET_CODE (x) != SYMBOL_REF)
6692 if (XSTR (x, 0)[0] != '.')
6694 switch (DEFAULT_ABI)
6704 case ABI_AIX_NODESC:
6710 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6712 assemble_name (file, XSTR (x, 0));
6717 /* Like 'L', for last word of TImode. */
6718 if (GET_CODE (x) == REG)
6719 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6720 else if (GET_CODE (x) == MEM)
6722 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6723 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6724 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6726 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6727 if (small_data_operand (x, GET_MODE (x)))
6728 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6729 reg_names[SMALL_DATA_REG]);
6733 /* Print AltiVec memory operand. */
6738 if (GET_CODE (x) != MEM)
6743 if (GET_CODE (tmp) == REG)
6744 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6745 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6747 if (REGNO (XEXP (tmp, 0)) == 0)
6748 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6749 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6751 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6752 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6760 if (GET_CODE (x) == REG)
6761 fprintf (file, "%s", reg_names[REGNO (x)]);
6762 else if (GET_CODE (x) == MEM)
6764 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6765 know the width from the mode. */
6766 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6767 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6768 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6769 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6770 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6771 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6773 output_address (XEXP (x, 0));
6776 output_addr_const (file, x);
6780 output_operand_lossage ("invalid %%xn code");
6784 /* Print the address of an operand. */
6787 print_operand_address (file, x)
6791 if (GET_CODE (x) == REG)
6792 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6793 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6794 || GET_CODE (x) == LABEL_REF)
6796 output_addr_const (file, x);
6797 if (small_data_operand (x, GET_MODE (x)))
6798 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6799 reg_names[SMALL_DATA_REG]);
6800 else if (TARGET_TOC)
6803 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6805 if (REGNO (XEXP (x, 0)) == 0)
6806 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6807 reg_names[ REGNO (XEXP (x, 0)) ]);
6809 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6810 reg_names[ REGNO (XEXP (x, 1)) ]);
6812 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6814 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6815 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6818 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6819 && CONSTANT_P (XEXP (x, 1)))
6821 output_addr_const (file, XEXP (x, 1));
6822 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6826 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6827 && CONSTANT_P (XEXP (x, 1)))
6829 fprintf (file, "lo16(");
6830 output_addr_const (file, XEXP (x, 1));
6831 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6834 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6836 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6838 rtx contains_minus = XEXP (x, 1);
6842 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6843 turn it into (sym) for output_addr_const. */
6844 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6845 contains_minus = XEXP (contains_minus, 0);
6847 minus = XEXP (contains_minus, 0);
6848 symref = XEXP (minus, 0);
6849 XEXP (contains_minus, 0) = symref;
6854 name = XSTR (symref, 0);
6855 newname = alloca (strlen (name) + sizeof ("@toc"));
6856 strcpy (newname, name);
6857 strcat (newname, "@toc");
6858 XSTR (symref, 0) = newname;
6860 output_addr_const (file, XEXP (x, 1));
6862 XSTR (symref, 0) = name;
6863 XEXP (contains_minus, 0) = minus;
6866 output_addr_const (file, XEXP (x, 1));
6868 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6874 /* Target hook for assembling integer objects. The powerpc version has
6875 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6876 is defined. It also needs to handle DI-mode objects on 64-bit
6880 rs6000_assemble_integer (x, size, aligned_p)
6885 #ifdef RELOCATABLE_NEEDS_FIXUP
6886 /* Special handling for SI values. */
6887 if (size == 4 && aligned_p)
6889 extern int in_toc_section PARAMS ((void));
6890 static int recurse = 0;
6892 /* For -mrelocatable, we mark all addresses that need to be fixed up
6893 in the .fixup section. */
6894 if (TARGET_RELOCATABLE
6895 && !in_toc_section ()
6896 && !in_text_section ()
6898 && GET_CODE (x) != CONST_INT
6899 && GET_CODE (x) != CONST_DOUBLE
6905 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6907 ASM_OUTPUT_LABEL (asm_out_file, buf);
6908 fprintf (asm_out_file, "\t.long\t(");
6909 output_addr_const (asm_out_file, x);
6910 fprintf (asm_out_file, ")@fixup\n");
6911 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6912 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6913 fprintf (asm_out_file, "\t.long\t");
6914 assemble_name (asm_out_file, buf);
6915 fprintf (asm_out_file, "\n\t.previous\n");
6919 /* Remove initial .'s to turn a -mcall-aixdesc function
6920 address into the address of the descriptor, not the function
6922 else if (GET_CODE (x) == SYMBOL_REF
6923 && XSTR (x, 0)[0] == '.'
6924 && DEFAULT_ABI == ABI_AIX)
6926 const char *name = XSTR (x, 0);
6927 while (*name == '.')
6930 fprintf (asm_out_file, "\t.long\t%s\n", name);
6934 #endif /* RELOCATABLE_NEEDS_FIXUP */
6935 return default_assemble_integer (x, size, aligned_p);
6939 rs6000_reverse_condition (mode, code)
6940 enum machine_mode mode;
6943 /* Reversal of FP compares takes care -- an ordered compare
6944 becomes an unordered compare and vice versa. */
6945 if (mode == CCFPmode)
6946 return reverse_condition_maybe_unordered (code);
6948 return reverse_condition (code);
6951 /* Generate a compare for CODE. Return a brand-new rtx that
6952 represents the result of the compare. */
6955 rs6000_generate_compare (code)
6958 enum machine_mode comp_mode;
6961 if (rs6000_compare_fp_p)
6962 comp_mode = CCFPmode;
6963 else if (code == GTU || code == LTU
6964 || code == GEU || code == LEU)
6965 comp_mode = CCUNSmode;
6969 /* First, the compare. */
6970 compare_result = gen_reg_rtx (comp_mode);
6971 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
6972 gen_rtx_COMPARE (comp_mode,
6974 rs6000_compare_op1)));
6976 /* Some kinds of FP comparisons need an OR operation;
6977 except for flag_unsafe_math_optimizations we don't bother. */
6978 if (rs6000_compare_fp_p
6979 && ! flag_unsafe_math_optimizations
6980 && (code == LE || code == GE
6981 || code == UNEQ || code == LTGT
6982 || code == UNGT || code == UNLT))
6984 enum rtx_code or1, or2;
6985 rtx or1_rtx, or2_rtx, compare2_rtx;
6986 rtx or_result = gen_reg_rtx (CCEQmode);
6990 case LE: or1 = LT; or2 = EQ; break;
6991 case GE: or1 = GT; or2 = EQ; break;
6992 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
6993 case LTGT: or1 = LT; or2 = GT; break;
6994 case UNGT: or1 = UNORDERED; or2 = GT; break;
6995 case UNLT: or1 = UNORDERED; or2 = LT; break;
6998 validate_condition_mode (or1, comp_mode);
6999 validate_condition_mode (or2, comp_mode);
7000 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7001 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7002 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7003 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7005 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7007 compare_result = or_result;
7011 validate_condition_mode (code, GET_MODE (compare_result));
7013 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7017 /* Emit the RTL for an sCOND pattern. */
7020 rs6000_emit_sCOND (code, result)
7025 enum machine_mode op_mode;
7027 condition_rtx = rs6000_generate_compare (code);
7029 op_mode = GET_MODE (rs6000_compare_op0);
7030 if (op_mode == VOIDmode)
7031 op_mode = GET_MODE (rs6000_compare_op1);
7033 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7035 PUT_MODE (condition_rtx, DImode);
7036 convert_move (result, condition_rtx, 0);
7040 PUT_MODE (condition_rtx, SImode);
7041 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7045 /* Emit a branch of kind CODE to location LOC. */
7048 rs6000_emit_cbranch (code, loc)
7052 rtx condition_rtx, loc_ref;
7054 condition_rtx = rs6000_generate_compare (code);
7055 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7056 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7057 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7061 /* Return the string to output a conditional branch to LABEL, which is
7062 the operand number of the label, or -1 if the branch is really a
7065 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7066 condition code register and its mode specifies what kind of
7069 REVERSED is non-zero if we should reverse the sense of the comparison.
7071 INSN is the insn. */
7074 output_cbranch (op, label, reversed, insn)
7080 static char string[64];
7081 enum rtx_code code = GET_CODE (op);
7082 rtx cc_reg = XEXP (op, 0);
7083 enum machine_mode mode = GET_MODE (cc_reg);
7084 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7085 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7086 int really_reversed = reversed ^ need_longbranch;
7092 validate_condition_mode (code, mode);
7094 /* Work out which way this really branches. We could use
7095 reverse_condition_maybe_unordered here always but this
7096 makes the resulting assembler clearer. */
7097 if (really_reversed)
7098 code = rs6000_reverse_condition (mode, code);
7102 /* Not all of these are actually distinct opcodes, but
7103 we distinguish them for clarity of the resulting assembler. */
7105 ccode = "ne"; break;
7107 ccode = "eq"; break;
7109 ccode = "ge"; break;
7110 case GT: case GTU: case UNGT:
7111 ccode = "gt"; break;
7113 ccode = "le"; break;
7114 case LT: case LTU: case UNLT:
7115 ccode = "lt"; break;
7116 case UNORDERED: ccode = "un"; break;
7117 case ORDERED: ccode = "nu"; break;
7118 case UNGE: ccode = "nl"; break;
7119 case UNLE: ccode = "ng"; break;
7124 /* Maybe we have a guess as to how likely the branch is.
7125 The old mnemonics don't have a way to specify this information. */
7126 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7127 if (note != NULL_RTX)
7129 /* PROB is the difference from 50%. */
7130 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7132 /* For branches that are very close to 50%, assume not-taken. */
7133 if (abs (prob) > REG_BR_PROB_BASE / 20
7134 && ((prob > 0) ^ need_longbranch))
7143 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7145 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7147 /* We need to escape any '%' characters in the reg_names string.
7148 Assume they'd only be the first character... */
7149 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7151 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7155 /* If the branch distance was too far, we may have to use an
7156 unconditional branch to go the distance. */
7157 if (need_longbranch)
7158 s += sprintf (s, ",$+8\n\tb %s", label);
7160 s += sprintf (s, ",%s", label);
7166 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7167 operands of the last comparison is nonzero/true, FALSE_COND if it
7168 is zero/false. Return 0 if the hardware has no such operation. */
7171 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7177 enum rtx_code code = GET_CODE (op);
7178 rtx op0 = rs6000_compare_op0;
7179 rtx op1 = rs6000_compare_op1;
7181 enum machine_mode mode = GET_MODE (op0);
7184 /* First, work out if the hardware can do this at all, or
7185 if it's too slow... */
7186 /* If the comparison is an integer one, since we only have fsel
7187 it'll be cheaper to use a branch. */
7188 if (! rs6000_compare_fp_p)
7191 /* Eliminate half of the comparisons by switching operands, this
7192 makes the remaining code simpler. */
7193 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7194 || code == LTGT || code == LT)
7196 code = reverse_condition_maybe_unordered (code);
7198 true_cond = false_cond;
7202 /* UNEQ and LTGT take four instructions for a comparison with zero,
7203 it'll probably be faster to use a branch here too. */
7207 if (GET_CODE (op1) == CONST_DOUBLE)
7208 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7210 /* We're going to try to implement comparions by performing
7211 a subtract, then comparing against zero. Unfortunately,
7212 Inf - Inf is NaN which is not zero, and so if we don't
7213 know that the the operand is finite and the comparison
7214 would treat EQ different to UNORDERED, we can't do it. */
7215 if (! flag_unsafe_math_optimizations
7216 && code != GT && code != UNGE
7217 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7218 /* Constructs of the form (a OP b ? a : b) are safe. */
7219 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7220 || (! rtx_equal_p (op0, true_cond)
7221 && ! rtx_equal_p (op1, true_cond))))
7223 /* At this point we know we can use fsel. */
7225 /* Reduce the comparison to a comparison against zero. */
7226 temp = gen_reg_rtx (mode);
7227 emit_insn (gen_rtx_SET (VOIDmode, temp,
7228 gen_rtx_MINUS (mode, op0, op1)));
7230 op1 = CONST0_RTX (mode);
7232 /* If we don't care about NaNs we can reduce some of the comparisons
7233 down to faster ones. */
7234 if (flag_unsafe_math_optimizations)
7240 true_cond = false_cond;
7253 /* Now, reduce everything down to a GE. */
7260 temp = gen_reg_rtx (mode);
7261 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7266 temp = gen_reg_rtx (mode);
7267 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
7272 temp = gen_reg_rtx (mode);
7273 emit_insn (gen_rtx_SET (VOIDmode, temp,
7275 gen_rtx_ABS (mode, op0))));
7280 temp = gen_reg_rtx (mode);
7281 emit_insn (gen_rtx_SET (VOIDmode, temp,
7282 gen_rtx_IF_THEN_ELSE (mode,
7283 gen_rtx_GE (VOIDmode,
7285 true_cond, false_cond)));
7287 true_cond = false_cond;
7289 temp = gen_reg_rtx (mode);
7290 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7295 temp = gen_reg_rtx (mode);
7296 emit_insn (gen_rtx_SET (VOIDmode, temp,
7297 gen_rtx_IF_THEN_ELSE (mode,
7298 gen_rtx_GE (VOIDmode,
7300 true_cond, false_cond)));
7302 false_cond = true_cond;
7304 temp = gen_reg_rtx (mode);
7305 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7313 emit_insn (gen_rtx_SET (VOIDmode, dest,
7314 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
7315 gen_rtx_GE (VOIDmode,
7317 true_cond, false_cond)));
7322 rs6000_emit_minmax (dest, code, op0, op1)
7328 enum machine_mode mode = GET_MODE (op0);
7330 if (code == SMAX || code == UMAX)
7331 target = emit_conditional_move (dest, GE, op0, op1, mode,
7334 target = emit_conditional_move (dest, GE, op0, op1, mode,
7336 if (target == NULL_RTX)
7339 emit_move_insn (dest, target);
7342 /* This page contains routines that are used to determine what the
7343 function prologue and epilogue code will do and write them out. */
7345 /* Return the first fixed-point register that is required to be
7346 saved. 32 if none. */
7349 first_reg_to_save ()
7353 /* Find lowest numbered live register. */
7354 for (first_reg = 13; first_reg <= 31; first_reg++)
7355 if (regs_ever_live[first_reg]
7356 && (! call_used_regs[first_reg]
7357 || (first_reg == PIC_OFFSET_TABLE_REGNUM
7358 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7359 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7362 if (current_function_profile)
7364 /* AIX must save/restore every register that contains a parameter
7365 before/after the .__mcount call plus an additional register
7366 for the static chain, if needed; use registers from 30 down to 22
7368 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7370 int last_parm_reg, profile_first_reg;
7372 /* Figure out last used parameter register. The proper thing
7373 to do is to walk incoming args of the function. A function
7374 might have live parameter registers even if it has no
7376 for (last_parm_reg = 10;
7377 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7381 /* Calculate first reg for saving parameter registers
7383 Skip reg 31 which may contain the frame pointer. */
7384 profile_first_reg = (33 - last_parm_reg
7385 - (current_function_needs_context ? 1 : 0));
7387 /* Need to skip another reg to account for R31 being PICBASE
7388 (when flag_pic is set) or R30 being used as the frame
7389 pointer (when flag_pic is not set). */
7390 --profile_first_reg;
7392 /* Do not save frame pointer if no parameters needs to be saved. */
7393 if (profile_first_reg == 31)
7394 profile_first_reg = 32;
7396 if (first_reg > profile_first_reg)
7397 first_reg = profile_first_reg;
7400 /* SVR4 may need one register to preserve the static chain. */
7401 else if (current_function_needs_context)
7403 /* Skip reg 31 which may contain the frame pointer. */
7410 if (flag_pic && current_function_uses_pic_offset_table &&
7411 (first_reg > PIC_OFFSET_TABLE_REGNUM))
7412 return PIC_OFFSET_TABLE_REGNUM;
7418 /* Similar, for FP regs. */
7421 first_fp_reg_to_save ()
7425 /* Find lowest numbered live register. */
7426 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7427 if (regs_ever_live[first_reg])
7433 /* Similar, for AltiVec regs. */
7436 first_altivec_reg_to_save ()
7440 /* Stack frame remains as is unless we are in AltiVec ABI. */
7441 if (! TARGET_ALTIVEC_ABI)
7442 return LAST_ALTIVEC_REGNO + 1;
7444 /* Find lowest numbered live register. */
7445 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7446 if (regs_ever_live[i])
7452 /* Return a 32-bit mask of the AltiVec registers we need to set in
7453 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7454 the 32-bit word is 0. */
7457 compute_vrsave_mask ()
7459 unsigned int i, mask = 0;
7461 /* First, find out if we use _any_ altivec registers. */
7462 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7463 if (regs_ever_live[i])
7464 mask |= ALTIVEC_REG_BIT (i);
7469 /* Next, add all registers that are call-clobbered. We do this
7470 because post-reload register optimizers such as regrename_optimize
7471 may choose to use them. They never change the register class
7472 chosen by reload, so cannot create new uses of altivec registers
7473 if there were none before, so the early exit above is safe. */
7474 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7475 altivec registers not saved in the mask, which might well make the
7476 adjustments below more effective in eliding the save/restore of
7477 VRSAVE in small functions. */
7478 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7479 if (call_used_regs[i])
7480 mask |= ALTIVEC_REG_BIT (i);
7482 /* Next, remove the argument registers from the set. These must
7483 be in the VRSAVE mask set by the caller, so we don't need to add
7484 them in again. More importantly, the mask we compute here is
7485 used to generate CLOBBERs in the set_vrsave insn, and we do not
7486 wish the argument registers to die. */
7487 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7488 mask &= ~ALTIVEC_REG_BIT (i);
7490 /* Similarly, remove the return value from the set. */
7493 diddle_return_value (is_altivec_return_reg, &yes);
7495 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7502 is_altivec_return_reg (reg, xyes)
7506 bool *yes = (bool *) xyes;
7507 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7512 /* Calculate the stack information for the current function. This is
7513 complicated by having two separate calling sequences, the AIX calling
7514 sequence and the V.4 calling sequence.
7516 AIX (and Darwin/Mac OS X) stack frames look like:
7518 SP----> +---------------------------------------+
7519 | back chain to caller | 0 0
7520 +---------------------------------------+
7521 | saved CR | 4 8 (8-11)
7522 +---------------------------------------+
7524 +---------------------------------------+
7525 | reserved for compilers | 12 24
7526 +---------------------------------------+
7527 | reserved for binders | 16 32
7528 +---------------------------------------+
7529 | saved TOC pointer | 20 40
7530 +---------------------------------------+
7531 | Parameter save area (P) | 24 48
7532 +---------------------------------------+
7533 | Alloca space (A) | 24+P etc.
7534 +---------------------------------------+
7535 | Local variable space (L) | 24+P+A
7536 +---------------------------------------+
7537 | Float/int conversion temporary (X) | 24+P+A+L
7538 +---------------------------------------+
7539 | Save area for AltiVec registers (W) | 24+P+A+L+X
7540 +---------------------------------------+
7541 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7542 +---------------------------------------+
7543 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7544 +---------------------------------------+
7545 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7546 +---------------------------------------+
7547 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7548 +---------------------------------------+
7549 old SP->| back chain to caller's caller |
7550 +---------------------------------------+
7552 The required alignment for AIX configurations is two words (i.e., 8
7556 V.4 stack frames look like:
7558 SP----> +---------------------------------------+
7559 | back chain to caller | 0
7560 +---------------------------------------+
7561 | caller's saved LR | 4
7562 +---------------------------------------+
7563 | Parameter save area (P) | 8
7564 +---------------------------------------+
7565 | Alloca space (A) | 8+P
7566 +---------------------------------------+
7567 | Varargs save area (V) | 8+P+A
7568 +---------------------------------------+
7569 | Local variable space (L) | 8+P+A+V
7570 +---------------------------------------+
7571 | Float/int conversion temporary (X) | 8+P+A+V+L
7572 +---------------------------------------+
7573 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7574 +---------------------------------------+
7575 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7576 +---------------------------------------+
7577 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7578 +---------------------------------------+
7579 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7580 +---------------------------------------+
7581 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7582 +---------------------------------------+
7583 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7584 +---------------------------------------+
7585 old SP->| back chain to caller's caller |
7586 +---------------------------------------+
7588 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7589 given. (But note below and in sysv4.h that we require only 8 and
7590 may round up the size of our stack frame anyways. The historical
7591 reason is early versions of powerpc-linux which didn't properly
7592 align the stack at program startup. A happy side-effect is that
7593 -mno-eabi libraries can be used with -meabi programs.)
7595 The EABI configuration defaults to the V.4 layout, unless
7596 -mcall-aix is used, in which case the AIX layout is used. However,
7597 the stack alignment requirements may differ. If -mno-eabi is not
7598 given, the required stack alignment is 8 bytes; if -mno-eabi is
7599 given, the required alignment is 16 bytes. (But see V.4 comment
7602 #ifndef ABI_STACK_BOUNDARY
7603 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7607 rs6000_stack_info ()
7609 static rs6000_stack_t info, zero_info;
7610 rs6000_stack_t *info_ptr = &info;
7611 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7612 enum rs6000_abi abi;
7616 /* Zero all fields portably. */
7619 /* Select which calling sequence. */
7620 info_ptr->abi = abi = DEFAULT_ABI;
7622 /* Calculate which registers need to be saved & save area size. */
7623 info_ptr->first_gp_reg_save = first_reg_to_save ();
7624 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
7625 even if it currently looks like we won't. */
7626 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7627 || (flag_pic == 1 && abi == ABI_V4)
7628 || (flag_pic && abi == ABI_DARWIN))
7629 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
7630 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
7632 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7634 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7635 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7637 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7638 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7639 - info_ptr->first_altivec_reg_save);
7641 /* Does this function call anything? */
7642 info_ptr->calls_p = (! current_function_is_leaf
7643 || cfun->machine->ra_needs_full_frame);
7645 /* Determine if we need to save the link register. */
7646 if (rs6000_ra_ever_killed ()
7647 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7648 #ifdef TARGET_RELOCATABLE
7649 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7651 || (info_ptr->first_fp_reg_save != 64
7652 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7653 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7654 || (abi == ABI_V4 && current_function_calls_alloca)
7655 || (DEFAULT_ABI == ABI_DARWIN
7657 && current_function_uses_pic_offset_table)
7658 || info_ptr->calls_p)
7660 info_ptr->lr_save_p = 1;
7661 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7664 /* Determine if we need to save the condition code registers. */
7665 if (regs_ever_live[CR2_REGNO]
7666 || regs_ever_live[CR3_REGNO]
7667 || regs_ever_live[CR4_REGNO])
7669 info_ptr->cr_save_p = 1;
7671 info_ptr->cr_size = reg_size;
7674 /* If the current function calls __builtin_eh_return, then we need
7675 to allocate stack space for registers that will hold data for
7676 the exception handler. */
7677 if (current_function_calls_eh_return)
7680 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7682 ehrd_size = i * UNITS_PER_WORD;
7687 /* Determine various sizes. */
7688 info_ptr->reg_size = reg_size;
7689 info_ptr->fixed_size = RS6000_SAVE_AREA;
7690 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7691 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7692 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7695 if (TARGET_ALTIVEC_ABI)
7697 info_ptr->vrsave_mask = compute_vrsave_mask ();
7698 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7702 info_ptr->vrsave_mask = 0;
7703 info_ptr->vrsave_size = 0;
7706 /* Calculate the offsets. */
7714 case ABI_AIX_NODESC:
7716 info_ptr->fp_save_offset = - info_ptr->fp_size;
7717 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7719 if (TARGET_ALTIVEC_ABI)
7721 info_ptr->vrsave_save_offset
7722 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7724 /* Align stack so vector save area is on a quadword boundary. */
7725 if (info_ptr->altivec_size != 0)
7726 info_ptr->altivec_padding_size
7727 = 16 - (-info_ptr->vrsave_save_offset % 16);
7729 info_ptr->altivec_padding_size = 0;
7731 info_ptr->altivec_save_offset
7732 = info_ptr->vrsave_save_offset
7733 - info_ptr->altivec_padding_size
7734 - info_ptr->altivec_size;
7736 /* Adjust for AltiVec case. */
7737 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7740 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7741 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7742 info_ptr->lr_save_offset = 2*reg_size;
7746 info_ptr->fp_save_offset = - info_ptr->fp_size;
7747 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7748 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7750 if (TARGET_ALTIVEC_ABI)
7752 info_ptr->vrsave_save_offset
7753 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7755 /* Align stack so vector save area is on a quadword boundary. */
7756 if (info_ptr->altivec_size != 0)
7757 info_ptr->altivec_padding_size
7758 = 16 - (-info_ptr->vrsave_save_offset % 16);
7760 info_ptr->altivec_padding_size = 0;
7762 info_ptr->altivec_save_offset
7763 = info_ptr->vrsave_save_offset
7764 - info_ptr->altivec_padding_size
7765 - info_ptr->altivec_size;
7767 /* Adjust for AltiVec case. */
7768 info_ptr->toc_save_offset
7769 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7772 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7773 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7774 info_ptr->lr_save_offset = reg_size;
7778 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7780 + info_ptr->altivec_size
7781 + info_ptr->altivec_padding_size
7782 + info_ptr->vrsave_size
7786 + info_ptr->vrsave_size
7787 + info_ptr->toc_size,
7788 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7791 total_raw_size = (info_ptr->vars_size
7792 + info_ptr->parm_size
7793 + info_ptr->save_size
7794 + info_ptr->varargs_size
7795 + info_ptr->fixed_size);
7797 info_ptr->total_size =
7798 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7800 /* Determine if we need to allocate any stack frame:
7802 For AIX we need to push the stack if a frame pointer is needed
7803 (because the stack might be dynamically adjusted), if we are
7804 debugging, if we make calls, or if the sum of fp_save, gp_save,
7805 and local variables are more than the space needed to save all
7806 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7807 + 18*8 = 288 (GPR13 reserved).
7809 For V.4 we don't have the stack cushion that AIX uses, but assume
7810 that the debugger can handle stackless frames. */
7812 if (info_ptr->calls_p)
7813 info_ptr->push_p = 1;
7815 else if (abi == ABI_V4)
7816 info_ptr->push_p = (total_raw_size > info_ptr->fixed_size
7817 || info_ptr->calls_p);
7820 info_ptr->push_p = (frame_pointer_needed
7821 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7822 || ((total_raw_size - info_ptr->fixed_size)
7823 > (TARGET_32BIT ? 220 : 288)));
7825 /* Zero offsets if we're not saving those registers. */
7826 if (info_ptr->fp_size == 0)
7827 info_ptr->fp_save_offset = 0;
7829 if (info_ptr->gp_size == 0)
7830 info_ptr->gp_save_offset = 0;
7832 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7833 info_ptr->altivec_save_offset = 0;
7835 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7836 info_ptr->vrsave_save_offset = 0;
7838 if (! info_ptr->lr_save_p)
7839 info_ptr->lr_save_offset = 0;
7841 if (! info_ptr->cr_save_p)
7842 info_ptr->cr_save_offset = 0;
7844 if (! info_ptr->toc_save_p)
7845 info_ptr->toc_save_offset = 0;
7851 debug_stack_info (info)
7852 rs6000_stack_t *info;
7854 const char *abi_string;
7857 info = rs6000_stack_info ();
7859 fprintf (stderr, "\nStack information for function %s:\n",
7860 ((current_function_decl && DECL_NAME (current_function_decl))
7861 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7866 default: abi_string = "Unknown"; break;
7867 case ABI_NONE: abi_string = "NONE"; break;
7869 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7870 case ABI_DARWIN: abi_string = "Darwin"; break;
7871 case ABI_V4: abi_string = "V.4"; break;
7874 fprintf (stderr, "\tABI = %5s\n", abi_string);
7876 if (TARGET_ALTIVEC_ABI)
7877 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7879 if (info->first_gp_reg_save != 32)
7880 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7882 if (info->first_fp_reg_save != 64)
7883 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7885 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7886 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7887 info->first_altivec_reg_save);
7889 if (info->lr_save_p)
7890 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7892 if (info->cr_save_p)
7893 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7895 if (info->toc_save_p)
7896 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7898 if (info->vrsave_mask)
7899 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7902 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7905 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7907 if (info->gp_save_offset)
7908 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7910 if (info->fp_save_offset)
7911 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7913 if (info->altivec_save_offset)
7914 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7915 info->altivec_save_offset);
7917 if (info->vrsave_save_offset)
7918 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7919 info->vrsave_save_offset);
7921 if (info->lr_save_offset)
7922 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7924 if (info->cr_save_offset)
7925 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7927 if (info->toc_save_offset)
7928 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7930 if (info->varargs_save_offset)
7931 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7933 if (info->total_size)
7934 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7936 if (info->varargs_size)
7937 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7939 if (info->vars_size)
7940 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7942 if (info->parm_size)
7943 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7945 if (info->fixed_size)
7946 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7949 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7952 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7954 if (info->altivec_size)
7955 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
7957 if (info->vrsave_size)
7958 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
7960 if (info->altivec_padding_size)
7961 fprintf (stderr, "\taltivec_padding_size= %5d\n",
7962 info->altivec_padding_size);
7965 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
7968 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
7971 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
7973 if (info->save_size)
7974 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
7976 if (info->reg_size != 4)
7977 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
7979 fprintf (stderr, "\n");
7983 rs6000_return_addr (count, frame)
7987 /* Currently we don't optimize very well between prolog and body
7988 code and for PIC code the code can be actually quite bad, so
7989 don't try to be too clever here. */
7992 || DEFAULT_ABI == ABI_AIX
7993 || DEFAULT_ABI == ABI_AIX_NODESC)
7995 cfun->machine->ra_needs_full_frame = 1;
8002 plus_constant (copy_to_reg
8003 (gen_rtx_MEM (Pmode,
8004 memory_address (Pmode, frame))),
8005 RETURN_ADDRESS_OFFSET)));
8008 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8012 rs6000_ra_ever_killed ()
8016 #ifdef ASM_OUTPUT_MI_THUNK
8017 if (current_function_is_thunk)
8020 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8021 || cfun->machine->ra_needs_full_frame)
8022 return regs_ever_live[LINK_REGISTER_REGNUM];
8024 push_topmost_sequence ();
8026 pop_topmost_sequence ();
8028 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8032 /* Add a REG_MAYBE_DEAD note to the insn. */
8034 rs6000_maybe_dead (insn)
8037 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8042 /* Emit instructions needed to load the TOC register.
8043 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8044 a constant pool; or for SVR4 -fpic. */
8047 rs6000_emit_load_toc_table (fromprolog)
8051 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
8053 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8055 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8057 rtx temp = (fromprolog
8058 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8059 : gen_reg_rtx (Pmode));
8060 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8061 rs6000_maybe_dead (emit_move_insn (dest, temp));
8063 else if (flag_pic == 2)
8066 rtx tempLR = (fromprolog
8067 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8068 : gen_reg_rtx (Pmode));
8069 rtx temp0 = (fromprolog
8070 ? gen_rtx_REG (Pmode, 0)
8071 : gen_reg_rtx (Pmode));
8074 /* possibly create the toc section */
8075 if (! toc_initialized)
8078 function_section (current_function_decl);
8085 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8086 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8088 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8089 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8091 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8093 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8094 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8101 static int reload_toc_labelno = 0;
8103 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8105 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8106 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8108 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8111 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8112 rs6000_maybe_dead (emit_move_insn (temp0,
8113 gen_rtx_MEM (Pmode, dest)));
8115 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8117 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8119 /* This is for AIX code running in non-PIC ELF. */
8122 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8123 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8125 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8126 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8134 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8136 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8141 get_TOC_alias_set ()
8143 static int set = -1;
8145 set = new_alias_set ();
8149 /* This retuns nonzero if the current function uses the TOC. This is
8150 determined by the presence of (unspec ... 7), which is generated by
8151 the various load_toc_* patterns. */
8158 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8161 rtx pat = PATTERN (insn);
8164 if (GET_CODE (pat) == PARALLEL)
8165 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8166 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8167 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8174 create_TOC_reference (symbol)
8177 return gen_rtx_PLUS (Pmode,
8178 gen_rtx_REG (Pmode, TOC_REGISTER),
8179 gen_rtx_CONST (Pmode,
8180 gen_rtx_MINUS (Pmode, symbol,
8181 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8185 /* __throw will restore its own return address to be the same as the
8186 return address of the function that the throw is being made to.
8187 This is unfortunate, because we want to check the original
8188 return address to see if we need to restore the TOC.
8189 So we have to squirrel it away here.
8190 This is used only in compiling __throw and __rethrow.
8192 Most of this code should be removed by CSE. */
8193 static rtx insn_after_throw;
8195 /* This does the saving... */
8197 rs6000_aix_emit_builtin_unwind_init ()
8200 rtx stack_top = gen_reg_rtx (Pmode);
8201 rtx opcode_addr = gen_reg_rtx (Pmode);
8203 insn_after_throw = gen_reg_rtx (SImode);
8205 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8206 emit_move_insn (stack_top, mem);
8208 mem = gen_rtx_MEM (Pmode,
8209 gen_rtx_PLUS (Pmode, stack_top,
8210 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8211 emit_move_insn (opcode_addr, mem);
8212 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8215 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8216 in _eh.o). Only used on AIX.
8218 The idea is that on AIX, function calls look like this:
8219 bl somefunction-trampoline
8223 somefunction-trampoline:
8225 ... load function address in the count register ...
8227 or like this, if the linker determines that this is not a cross-module call
8228 and so the TOC need not be restored:
8231 or like this, if the compiler could determine that this is not a
8234 now, the tricky bit here is that register 2 is saved and restored
8235 by the _linker_, so we can't readily generate debugging information
8236 for it. So we need to go back up the call chain looking at the
8237 insns at return addresses to see which calls saved the TOC register
8238 and so see where it gets restored from.
8240 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8241 just before the actual epilogue.
8243 On the bright side, this incurs no space or time overhead unless an
8244 exception is thrown, except for the extra code in libgcc.a.
8246 The parameter STACKSIZE is a register containing (at runtime)
8247 the amount to be popped off the stack in addition to the stack frame
8248 of this routine (which will be __throw or __rethrow, and so is
8249 guaranteed to have a stack frame). */
8252 rs6000_emit_eh_toc_restore (stacksize)
8256 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8257 rtx tocompare = gen_reg_rtx (SImode);
8258 rtx opcode = gen_reg_rtx (SImode);
8259 rtx opcode_addr = gen_reg_rtx (Pmode);
8261 rtx loop_start = gen_label_rtx ();
8262 rtx no_toc_restore_needed = gen_label_rtx ();
8263 rtx loop_exit = gen_label_rtx ();
8265 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8266 set_mem_alias_set (mem, rs6000_sr_alias_set);
8267 emit_move_insn (bottom_of_stack, mem);
8269 top_of_stack = expand_binop (Pmode, add_optab,
8270 bottom_of_stack, stacksize,
8271 NULL_RTX, 1, OPTAB_WIDEN);
8273 emit_move_insn (tocompare,
8274 GEN_INT (trunc_int_for_mode (TARGET_32BIT
8276 : 0xE8410028, SImode)));
8278 if (insn_after_throw == NULL_RTX)
8280 emit_move_insn (opcode, insn_after_throw);
8282 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8283 emit_label (loop_start);
8285 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8286 SImode, NULL_RTX, NULL_RTX,
8287 no_toc_restore_needed);
8289 mem = gen_rtx_MEM (Pmode,
8290 gen_rtx_PLUS (Pmode, bottom_of_stack,
8291 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8292 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8294 emit_label (no_toc_restore_needed);
8295 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8296 Pmode, NULL_RTX, NULL_RTX,
8299 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8300 set_mem_alias_set (mem, rs6000_sr_alias_set);
8301 emit_move_insn (bottom_of_stack, mem);
8303 mem = gen_rtx_MEM (Pmode,
8304 gen_rtx_PLUS (Pmode, bottom_of_stack,
8305 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8306 emit_move_insn (opcode_addr, mem);
8307 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8309 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8310 emit_jump (loop_start);
8311 emit_note (NULL, NOTE_INSN_LOOP_END);
8312 emit_label (loop_exit);
8314 #endif /* TARGET_AIX */
8316 /* This ties together stack memory (MEM with an alias set of
8317 rs6000_sr_alias_set) and the change to the stack pointer. */
8320 rs6000_emit_stack_tie ()
8322 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8324 set_mem_alias_set (mem, rs6000_sr_alias_set);
8325 emit_insn (gen_stack_tie (mem));
8328 /* Emit the correct code for allocating stack space, as insns.
8329 If COPY_R12, make sure a copy of the old frame is left in r12.
8330 The generated code may use hard register 0 as a temporary. */
8333 rs6000_emit_allocate_stack (size, copy_r12)
8338 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8339 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8340 rtx todec = GEN_INT (-size);
8342 if (current_function_limit_stack)
8344 if (REG_P (stack_limit_rtx)
8345 && REGNO (stack_limit_rtx) > 1
8346 && REGNO (stack_limit_rtx) <= 31)
8348 emit_insn (Pmode == SImode
8349 ? gen_addsi3 (tmp_reg,
8352 : gen_adddi3 (tmp_reg,
8356 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8359 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8361 && DEFAULT_ABI == ABI_V4)
8363 rtx toload = gen_rtx_CONST (VOIDmode,
8364 gen_rtx_PLUS (Pmode,
8368 emit_insn (gen_elf_high (tmp_reg, toload));
8369 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8370 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8374 warning ("stack limit expression is not supported");
8377 if (copy_r12 || ! TARGET_UPDATE)
8378 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8384 /* Need a note here so that try_split doesn't get confused. */
8385 if (get_last_insn() == NULL_RTX)
8386 emit_note (0, NOTE_INSN_DELETED);
8387 insn = emit_move_insn (tmp_reg, todec);
8388 try_split (PATTERN (insn), insn, 0);
8392 if (Pmode == SImode)
8393 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8396 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8401 if (Pmode == SImode)
8402 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8404 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8405 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8406 gen_rtx_REG (Pmode, 12));
8409 RTX_FRAME_RELATED_P (insn) = 1;
8411 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8412 gen_rtx_SET (VOIDmode, stack_reg,
8413 gen_rtx_PLUS (Pmode, stack_reg,
8418 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8421 (mem (plus (blah) (regXX)))
8425 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8428 altivec_frame_fixup (insn, reg, val)
8434 real = copy_rtx (PATTERN (insn));
8436 real = replace_rtx (real, reg, GEN_INT (val));
8438 RTX_FRAME_RELATED_P (insn) = 1;
8439 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8444 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8445 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8446 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8447 deduce these equivalences by itself so it wasn't necessary to hold
8448 its hand so much. */
8451 rs6000_frame_related (insn, reg, val, reg2, rreg)
8460 real = copy_rtx (PATTERN (insn));
8462 real = replace_rtx (real, reg,
8463 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8464 STACK_POINTER_REGNUM),
8467 /* We expect that 'real' is either a SET or a PARALLEL containing
8468 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8469 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8471 if (GET_CODE (real) == SET)
8475 temp = simplify_rtx (SET_SRC (set));
8477 SET_SRC (set) = temp;
8478 temp = simplify_rtx (SET_DEST (set));
8480 SET_DEST (set) = temp;
8481 if (GET_CODE (SET_DEST (set)) == MEM)
8483 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8485 XEXP (SET_DEST (set), 0) = temp;
8488 else if (GET_CODE (real) == PARALLEL)
8491 for (i = 0; i < XVECLEN (real, 0); i++)
8492 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8494 rtx set = XVECEXP (real, 0, i);
8496 temp = simplify_rtx (SET_SRC (set));
8498 SET_SRC (set) = temp;
8499 temp = simplify_rtx (SET_DEST (set));
8501 SET_DEST (set) = temp;
8502 if (GET_CODE (SET_DEST (set)) == MEM)
8504 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8506 XEXP (SET_DEST (set), 0) = temp;
8508 RTX_FRAME_RELATED_P (set) = 1;
8514 if (reg2 != NULL_RTX)
8515 real = replace_rtx (real, reg2, rreg);
8517 RTX_FRAME_RELATED_P (insn) = 1;
8518 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8523 /* Returns an insn that has a vrsave set operation with the
8524 appropriate CLOBBERs. */
8527 generate_set_vrsave (reg, info, epiloguep)
8529 rs6000_stack_t *info;
8533 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8534 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8537 = gen_rtx_SET (VOIDmode,
8539 gen_rtx_UNSPEC_VOLATILE (SImode,
8540 gen_rtvec (2, reg, vrsave),
8545 /* We need to clobber the registers in the mask so the scheduler
8546 does not move sets to VRSAVE before sets of AltiVec registers.
8548 However, if the function receives nonlocal gotos, reload will set
8549 all call saved registers live. We will end up with:
8551 (set (reg 999) (mem))
8552 (parallel [ (set (reg vrsave) (unspec blah))
8553 (clobber (reg 999))])
8555 The clobber will cause the store into reg 999 to be dead, and
8556 flow will attempt to delete an epilogue insn. In this case, we
8557 need an unspec use/set of the register. */
8559 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8560 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8562 if (!epiloguep || call_used_regs [i])
8563 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8564 gen_rtx_REG (V4SImode, i));
8567 rtx reg = gen_rtx_REG (V4SImode, i);
8570 = gen_rtx_SET (VOIDmode,
8572 gen_rtx_UNSPEC (V4SImode,
8573 gen_rtvec (1, reg), 27));
8577 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8579 for (i = 0; i < nclobs; ++i)
8580 XVECEXP (insn, 0, i) = clobs[i];
8585 /* Emit function prologue as insns. */
8588 rs6000_emit_prologue ()
8590 rs6000_stack_t *info = rs6000_stack_info ();
8591 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8592 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8593 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8594 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8595 rtx frame_reg_rtx = sp_reg_rtx;
8596 rtx cr_save_rtx = NULL;
8598 int saving_FPRs_inline;
8599 int using_store_multiple;
8600 HOST_WIDE_INT sp_offset = 0;
8602 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8603 && info->first_gp_reg_save < 31);
8604 saving_FPRs_inline = (info->first_fp_reg_save == 64
8605 || FP_SAVE_INLINE (info->first_fp_reg_save));
8607 /* For V.4, update stack before we do any saving and set back pointer. */
8608 if (info->push_p && DEFAULT_ABI == ABI_V4)
8610 if (info->total_size < 32767)
8611 sp_offset = info->total_size;
8613 frame_reg_rtx = frame_ptr_rtx;
8614 rs6000_emit_allocate_stack (info->total_size,
8615 (frame_reg_rtx != sp_reg_rtx
8618 || info->first_fp_reg_save < 64
8619 || info->first_gp_reg_save < 32
8621 if (frame_reg_rtx != sp_reg_rtx)
8622 rs6000_emit_stack_tie ();
8625 /* Save AltiVec registers if needed. */
8626 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8630 /* There should be a non inline version of this, for when we
8631 are saving lots of vector registers. */
8632 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8633 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8635 rtx areg, savereg, mem;
8638 offset = info->altivec_save_offset + sp_offset
8639 + 16 * (i - info->first_altivec_reg_save);
8641 savereg = gen_rtx_REG (V4SImode, i);
8643 areg = gen_rtx_REG (Pmode, 0);
8644 emit_move_insn (areg, GEN_INT (offset));
8646 /* AltiVec addressing mode is [reg+reg]. */
8647 mem = gen_rtx_MEM (V4SImode,
8648 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8650 set_mem_alias_set (mem, rs6000_sr_alias_set);
8652 insn = emit_move_insn (mem, savereg);
8654 altivec_frame_fixup (insn, areg, offset);
8658 /* VRSAVE is a bit vector representing which AltiVec registers
8659 are used. The OS uses this to determine which vector
8660 registers to save on a context switch. We need to save
8661 VRSAVE on the stack frame, add whatever AltiVec registers we
8662 used in this function, and do the corresponding magic in the
8665 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8667 rtx reg, mem, vrsave;
8670 /* Get VRSAVE onto a GPR. */
8671 reg = gen_rtx_REG (SImode, 12);
8672 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8673 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8676 offset = info->vrsave_save_offset + sp_offset;
8678 = gen_rtx_MEM (SImode,
8679 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8680 set_mem_alias_set (mem, rs6000_sr_alias_set);
8681 insn = emit_move_insn (mem, reg);
8683 /* Include the registers in the mask. */
8684 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8686 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8689 /* If we use the link register, get it into r0. */
8690 if (info->lr_save_p)
8691 emit_move_insn (gen_rtx_REG (Pmode, 0),
8692 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8694 /* If we need to save CR, put it into r12. */
8695 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8697 cr_save_rtx = gen_rtx_REG (SImode, 12);
8698 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8701 /* Do any required saving of fpr's. If only one or two to save, do
8702 it ourselves. Otherwise, call function. */
8703 if (saving_FPRs_inline)
8706 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8707 if ((regs_ever_live[info->first_fp_reg_save+i]
8708 && ! call_used_regs[info->first_fp_reg_save+i]))
8711 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8712 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8713 GEN_INT (info->fp_save_offset
8716 mem = gen_rtx_MEM (DFmode, addr);
8717 set_mem_alias_set (mem, rs6000_sr_alias_set);
8719 insn = emit_move_insn (mem, reg);
8720 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8721 NULL_RTX, NULL_RTX);
8724 else if (info->first_fp_reg_save != 64)
8728 const char *alloc_rname;
8730 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8732 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8734 LINK_REGISTER_REGNUM));
8735 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8736 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8737 alloc_rname = ggc_strdup (rname);
8738 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8739 gen_rtx_SYMBOL_REF (Pmode,
8741 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8744 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8745 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8746 GEN_INT (info->fp_save_offset
8747 + sp_offset + 8*i));
8748 mem = gen_rtx_MEM (DFmode, addr);
8749 set_mem_alias_set (mem, rs6000_sr_alias_set);
8751 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8753 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8754 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8755 NULL_RTX, NULL_RTX);
8758 /* Save GPRs. This is done as a PARALLEL if we are using
8759 the store-multiple instructions. */
8760 if (using_store_multiple)
8764 p = rtvec_alloc (32 - info->first_gp_reg_save);
8765 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8766 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8769 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8770 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8771 GEN_INT (info->gp_save_offset
8774 mem = gen_rtx_MEM (reg_mode, addr);
8775 set_mem_alias_set (mem, rs6000_sr_alias_set);
8777 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8779 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8780 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8781 NULL_RTX, NULL_RTX);
8786 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8787 if ((regs_ever_live[info->first_gp_reg_save+i]
8788 && ! call_used_regs[info->first_gp_reg_save+i])
8789 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8790 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8791 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8794 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8795 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8796 GEN_INT (info->gp_save_offset
8799 mem = gen_rtx_MEM (reg_mode, addr);
8800 set_mem_alias_set (mem, rs6000_sr_alias_set);
8802 insn = emit_move_insn (mem, reg);
8803 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8804 NULL_RTX, NULL_RTX);
8808 /* ??? There's no need to emit actual instructions here, but it's the
8809 easiest way to get the frame unwind information emitted. */
8810 if (current_function_calls_eh_return)
8812 unsigned int i, regno;
8818 regno = EH_RETURN_DATA_REGNO (i);
8819 if (regno == INVALID_REGNUM)
8822 reg = gen_rtx_REG (reg_mode, regno);
8823 addr = plus_constant (frame_reg_rtx,
8824 info->ehrd_offset + sp_offset
8825 + reg_size * (int) i);
8826 mem = gen_rtx_MEM (reg_mode, addr);
8827 set_mem_alias_set (mem, rs6000_sr_alias_set);
8829 insn = emit_move_insn (mem, reg);
8830 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8831 NULL_RTX, NULL_RTX);
8835 /* Save lr if we used it. */
8836 if (info->lr_save_p)
8838 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8839 GEN_INT (info->lr_save_offset + sp_offset));
8840 rtx reg = gen_rtx_REG (Pmode, 0);
8841 rtx mem = gen_rtx_MEM (Pmode, addr);
8842 /* This should not be of rs6000_sr_alias_set, because of
8843 __builtin_return_address. */
8845 insn = emit_move_insn (mem, reg);
8846 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8847 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8850 /* Save CR if we use any that must be preserved. */
8851 if (info->cr_save_p)
8853 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8854 GEN_INT (info->cr_save_offset + sp_offset));
8855 rtx mem = gen_rtx_MEM (SImode, addr);
8857 set_mem_alias_set (mem, rs6000_sr_alias_set);
8859 /* If r12 was used to hold the original sp, copy cr into r0 now
8861 if (REGNO (frame_reg_rtx) == 12)
8863 cr_save_rtx = gen_rtx_REG (SImode, 0);
8864 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8866 insn = emit_move_insn (mem, cr_save_rtx);
8868 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8869 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8870 OK. All we have to do is specify that _one_ condition code
8871 register is saved in this stack slot. The thrower's epilogue
8872 will then restore all the call-saved registers.
8873 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8874 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8875 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8878 /* Update stack and set back pointer unless this is V.4,
8879 for which it was done previously. */
8880 if (info->push_p && DEFAULT_ABI != ABI_V4)
8881 rs6000_emit_allocate_stack (info->total_size, FALSE);
8883 /* Set frame pointer, if needed. */
8884 if (frame_pointer_needed)
8886 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8888 RTX_FRAME_RELATED_P (insn) = 1;
8891 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8892 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8893 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8894 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
8896 /* If emit_load_toc_table will use the link register, we need to save
8897 it. We use R11 for this purpose because emit_load_toc_table
8898 can use register 0. This allows us to use a plain 'blr' to return
8899 from the procedure more often. */
8900 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8901 && ! info->lr_save_p
8902 && EXIT_BLOCK_PTR->pred != NULL);
8903 if (save_LR_around_toc_setup)
8904 emit_move_insn (gen_rtx_REG (Pmode, 11),
8905 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8907 rs6000_emit_load_toc_table (TRUE);
8909 if (save_LR_around_toc_setup)
8910 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8911 gen_rtx_REG (Pmode, 11));
8914 if (DEFAULT_ABI == ABI_DARWIN
8915 && flag_pic && current_function_uses_pic_offset_table)
8917 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8919 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8922 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
8923 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8927 /* Write function prologue. */
8930 rs6000_output_function_prologue (file, size)
8932 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8934 rs6000_stack_t *info = rs6000_stack_info ();
8936 if (TARGET_DEBUG_STACK)
8937 debug_stack_info (info);
8939 /* Write .extern for any function we will call to save and restore
8941 if (info->first_fp_reg_save < 64
8942 && !FP_SAVE_INLINE (info->first_fp_reg_save))
8943 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8944 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8945 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8948 /* Write .extern for AIX common mode routines, if needed. */
8949 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8951 fputs ("\t.extern __mulh\n", file);
8952 fputs ("\t.extern __mull\n", file);
8953 fputs ("\t.extern __divss\n", file);
8954 fputs ("\t.extern __divus\n", file);
8955 fputs ("\t.extern __quoss\n", file);
8956 fputs ("\t.extern __quous\n", file);
8957 common_mode_defined = 1;
8960 if (! HAVE_prologue)
8964 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
8965 the "toplevel" insn chain. */
8966 emit_note (0, NOTE_INSN_DELETED);
8967 rs6000_emit_prologue ();
8968 emit_note (0, NOTE_INSN_DELETED);
8970 if (TARGET_DEBUG_STACK)
8971 debug_rtx_list (get_insns (), 100);
8972 final (get_insns (), file, FALSE, FALSE);
8976 rs6000_pic_labelno++;
8979 /* Emit function epilogue as insns.
8981 At present, dwarf2out_frame_debug_expr doesn't understand
8982 register restores, so we don't bother setting RTX_FRAME_RELATED_P
8983 anywhere in the epilogue. Most of the insns below would in any case
8984 need special notes to explain where r11 is in relation to the stack. */
8987 rs6000_emit_epilogue (sibcall)
8990 rs6000_stack_t *info;
8991 int restoring_FPRs_inline;
8992 int using_load_multiple;
8993 int using_mfcr_multiple;
8994 int use_backchain_to_restore_sp;
8996 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
8997 rtx frame_reg_rtx = sp_reg_rtx;
8998 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8999 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9002 info = rs6000_stack_info ();
9003 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9004 && info->first_gp_reg_save < 31);
9005 restoring_FPRs_inline = (sibcall
9006 || current_function_calls_eh_return
9007 || info->first_fp_reg_save == 64
9008 || FP_SAVE_INLINE (info->first_fp_reg_save));
9009 use_backchain_to_restore_sp = (frame_pointer_needed
9010 || current_function_calls_alloca
9011 || info->total_size > 32767);
9012 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9013 || rs6000_cpu == PROCESSOR_PPC603
9014 || rs6000_cpu == PROCESSOR_PPC750
9017 /* If we have a frame pointer, a call to alloca, or a large stack
9018 frame, restore the old stack pointer using the backchain. Otherwise,
9019 we know what size to update it with. */
9020 if (use_backchain_to_restore_sp)
9022 /* Under V.4, don't reset the stack pointer until after we're done
9023 loading the saved registers. */
9024 if (DEFAULT_ABI == ABI_V4)
9025 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9027 emit_move_insn (frame_reg_rtx,
9028 gen_rtx_MEM (Pmode, sp_reg_rtx));
9031 else if (info->push_p)
9033 if (DEFAULT_ABI == ABI_V4)
9034 sp_offset = info->total_size;
9037 emit_insn (TARGET_32BIT
9038 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9039 GEN_INT (info->total_size))
9040 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9041 GEN_INT (info->total_size)));
9045 /* Restore AltiVec registers if needed. */
9046 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9050 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9051 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9053 rtx addr, areg, mem;
9055 areg = gen_rtx_REG (Pmode, 0);
9057 (areg, GEN_INT (info->altivec_save_offset
9059 + 16 * (i - info->first_altivec_reg_save)));
9061 /* AltiVec addressing mode is [reg+reg]. */
9062 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9063 mem = gen_rtx_MEM (V4SImode, addr);
9064 set_mem_alias_set (mem, rs6000_sr_alias_set);
9066 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9070 /* Restore VRSAVE if needed. */
9071 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9075 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9076 GEN_INT (info->vrsave_save_offset + sp_offset));
9077 mem = gen_rtx_MEM (SImode, addr);
9078 set_mem_alias_set (mem, rs6000_sr_alias_set);
9079 reg = gen_rtx_REG (SImode, 12);
9080 emit_move_insn (reg, mem);
9082 emit_insn (generate_set_vrsave (reg, info, 1));
9085 /* Get the old lr if we saved it. */
9086 if (info->lr_save_p)
9088 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9089 GEN_INT (info->lr_save_offset + sp_offset));
9090 rtx mem = gen_rtx_MEM (Pmode, addr);
9092 set_mem_alias_set (mem, rs6000_sr_alias_set);
9094 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9097 /* Get the old cr if we saved it. */
9098 if (info->cr_save_p)
9100 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9101 GEN_INT (info->cr_save_offset + sp_offset));
9102 rtx mem = gen_rtx_MEM (SImode, addr);
9104 set_mem_alias_set (mem, rs6000_sr_alias_set);
9106 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9109 /* Set LR here to try to overlap restores below. */
9110 if (info->lr_save_p)
9111 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9112 gen_rtx_REG (Pmode, 0));
9114 /* Load exception handler data registers, if needed. */
9115 if (current_function_calls_eh_return)
9117 unsigned int i, regno;
9123 regno = EH_RETURN_DATA_REGNO (i);
9124 if (regno == INVALID_REGNUM)
9127 addr = plus_constant (frame_reg_rtx,
9128 info->ehrd_offset + sp_offset
9129 + reg_size * (int) i);
9130 mem = gen_rtx_MEM (reg_mode, addr);
9131 set_mem_alias_set (mem, rs6000_sr_alias_set);
9133 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9137 /* Restore GPRs. This is done as a PARALLEL if we are using
9138 the load-multiple instructions. */
9139 if (using_load_multiple)
9142 p = rtvec_alloc (32 - info->first_gp_reg_save);
9143 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9145 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9146 GEN_INT (info->gp_save_offset
9149 rtx mem = gen_rtx_MEM (reg_mode, addr);
9151 set_mem_alias_set (mem, rs6000_sr_alias_set);
9154 gen_rtx_SET (VOIDmode,
9155 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9158 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9161 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9162 if ((regs_ever_live[info->first_gp_reg_save+i]
9163 && ! call_used_regs[info->first_gp_reg_save+i])
9164 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
9165 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9166 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9168 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9169 GEN_INT (info->gp_save_offset
9172 rtx mem = gen_rtx_MEM (reg_mode, addr);
9174 set_mem_alias_set (mem, rs6000_sr_alias_set);
9176 emit_move_insn (gen_rtx_REG (reg_mode,
9177 info->first_gp_reg_save + i),
9181 /* Restore fpr's if we need to do it without calling a function. */
9182 if (restoring_FPRs_inline)
9183 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9184 if ((regs_ever_live[info->first_fp_reg_save+i]
9185 && ! call_used_regs[info->first_fp_reg_save+i]))
9188 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9189 GEN_INT (info->fp_save_offset
9192 mem = gen_rtx_MEM (DFmode, addr);
9193 set_mem_alias_set (mem, rs6000_sr_alias_set);
9195 emit_move_insn (gen_rtx_REG (DFmode,
9196 info->first_fp_reg_save + i),
9200 /* If we saved cr, restore it here. Just those that were used. */
9201 if (info->cr_save_p)
9203 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9206 if (using_mfcr_multiple)
9208 for (i = 0; i < 8; i++)
9209 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9215 if (using_mfcr_multiple && count > 1)
9220 p = rtvec_alloc (count);
9223 for (i = 0; i < 8; i++)
9224 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9226 rtvec r = rtvec_alloc (2);
9227 RTVEC_ELT (r, 0) = r12_rtx;
9228 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9229 RTVEC_ELT (p, ndx) =
9230 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9231 gen_rtx_UNSPEC (CCmode, r, 20));
9234 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9239 for (i = 0; i < 8; i++)
9240 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9242 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9248 /* If this is V.4, unwind the stack pointer after all of the loads
9249 have been done. We need to emit a block here so that sched
9250 doesn't decide to move the sp change before the register restores
9251 (which may not have any obvious dependency on the stack). This
9252 doesn't hurt performance, because there is no scheduling that can
9253 be done after this point. */
9254 if (DEFAULT_ABI == ABI_V4)
9256 if (frame_reg_rtx != sp_reg_rtx)
9257 rs6000_emit_stack_tie ();
9259 if (use_backchain_to_restore_sp)
9261 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9263 else if (sp_offset != 0)
9265 emit_insn (Pmode == SImode
9266 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9267 GEN_INT (sp_offset))
9268 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9269 GEN_INT (sp_offset)));
9273 if (current_function_calls_eh_return)
9275 rtx sa = EH_RETURN_STACKADJ_RTX;
9276 emit_insn (Pmode == SImode
9277 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9278 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9284 if (! restoring_FPRs_inline)
9285 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9287 p = rtvec_alloc (2);
9289 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9290 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9292 LINK_REGISTER_REGNUM));
9294 /* If we have to restore more than two FP registers, branch to the
9295 restore function. It will return to our caller. */
9296 if (! restoring_FPRs_inline)
9300 const char *alloc_rname;
9302 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9303 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9304 alloc_rname = ggc_strdup (rname);
9305 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9306 gen_rtx_SYMBOL_REF (Pmode,
9309 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9312 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9313 GEN_INT (info->fp_save_offset + 8*i));
9314 mem = gen_rtx_MEM (DFmode, addr);
9315 set_mem_alias_set (mem, rs6000_sr_alias_set);
9317 RTVEC_ELT (p, i+3) =
9318 gen_rtx_SET (VOIDmode,
9319 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9324 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9328 /* Write function epilogue. */
9331 rs6000_output_function_epilogue (file, size)
9333 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9335 rs6000_stack_t *info = rs6000_stack_info ();
9336 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9338 if (! HAVE_epilogue)
9340 rtx insn = get_last_insn ();
9341 /* If the last insn was a BARRIER, we don't have to write anything except
9343 if (GET_CODE (insn) == NOTE)
9344 insn = prev_nonnote_insn (insn);
9345 if (insn == 0 || GET_CODE (insn) != BARRIER)
9347 /* This is slightly ugly, but at least we don't have two
9348 copies of the epilogue-emitting code. */
9351 /* A NOTE_INSN_DELETED is supposed to be at the start
9352 and end of the "toplevel" insn chain. */
9353 emit_note (0, NOTE_INSN_DELETED);
9354 rs6000_emit_epilogue (FALSE);
9355 emit_note (0, NOTE_INSN_DELETED);
9357 if (TARGET_DEBUG_STACK)
9358 debug_rtx_list (get_insns (), 100);
9359 final (get_insns (), file, FALSE, FALSE);
9364 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9367 We don't output a traceback table if -finhibit-size-directive was
9368 used. The documentation for -finhibit-size-directive reads
9369 ``don't output a @code{.size} assembler directive, or anything
9370 else that would cause trouble if the function is split in the
9371 middle, and the two halves are placed at locations far apart in
9372 memory.'' The traceback table has this property, since it
9373 includes the offset from the start of the function to the
9374 traceback table itself.
9376 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9377 different traceback table. */
9378 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9380 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9381 const char *language_string = lang_hooks.name;
9382 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9385 while (*fname == '.') /* V.4 encodes . in the name */
9388 /* Need label immediately before tbtab, so we can compute its offset
9389 from the function start. */
9392 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9393 ASM_OUTPUT_LABEL (file, fname);
9395 /* The .tbtab pseudo-op can only be used for the first eight
9396 expressions, since it can't handle the possibly variable
9397 length fields that follow. However, if you omit the optional
9398 fields, the assembler outputs zeros for all optional fields
9399 anyways, giving each variable length field is minimum length
9400 (as defined in sys/debug.h). Thus we can not use the .tbtab
9401 pseudo-op at all. */
9403 /* An all-zero word flags the start of the tbtab, for debuggers
9404 that have to find it by searching forward from the entry
9405 point or from the current pc. */
9406 fputs ("\t.long 0\n", file);
9408 /* Tbtab format type. Use format type 0. */
9409 fputs ("\t.byte 0,", file);
9411 /* Language type. Unfortunately, there doesn't seem to be any
9412 official way to get this info, so we use language_string. C
9413 is 0. C++ is 9. No number defined for Obj-C, so use the
9414 value for C for now. There is no official value for Java,
9415 although IBM appears to be using 13. There is no official value
9416 for Chill, so we've chosen 44 pseudo-randomly. */
9417 if (! strcmp (language_string, "GNU C")
9418 || ! strcmp (language_string, "GNU Objective-C"))
9420 else if (! strcmp (language_string, "GNU F77"))
9422 else if (! strcmp (language_string, "GNU Ada"))
9424 else if (! strcmp (language_string, "GNU Pascal"))
9426 else if (! strcmp (language_string, "GNU C++"))
9428 else if (! strcmp (language_string, "GNU Java"))
9430 else if (! strcmp (language_string, "GNU CHILL"))
9434 fprintf (file, "%d,", i);
9436 /* 8 single bit fields: global linkage (not set for C extern linkage,
9437 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9438 from start of procedure stored in tbtab, internal function, function
9439 has controlled storage, function has no toc, function uses fp,
9440 function logs/aborts fp operations. */
9441 /* Assume that fp operations are used if any fp reg must be saved. */
9442 fprintf (file, "%d,",
9443 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9445 /* 6 bitfields: function is interrupt handler, name present in
9446 proc table, function calls alloca, on condition directives
9447 (controls stack walks, 3 bits), saves condition reg, saves
9449 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9450 set up as a frame pointer, even when there is no alloca call. */
9451 fprintf (file, "%d,",
9452 ((optional_tbtab << 6)
9453 | ((optional_tbtab & frame_pointer_needed) << 5)
9454 | (info->cr_save_p << 1)
9455 | (info->lr_save_p)));
9457 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9459 fprintf (file, "%d,",
9460 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9462 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9463 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9467 /* Compute the parameter info from the function decl argument
9470 int next_parm_info_bit = 31;
9472 for (decl = DECL_ARGUMENTS (current_function_decl);
9473 decl; decl = TREE_CHAIN (decl))
9475 rtx parameter = DECL_INCOMING_RTL (decl);
9476 enum machine_mode mode = GET_MODE (parameter);
9478 if (GET_CODE (parameter) == REG)
9480 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9488 else if (mode == DFmode)
9493 /* If only one bit will fit, don't or in this entry. */
9494 if (next_parm_info_bit > 0)
9495 parm_info |= (bits << (next_parm_info_bit - 1));
9496 next_parm_info_bit -= 2;
9500 fixed_parms += ((GET_MODE_SIZE (mode)
9501 + (UNITS_PER_WORD - 1))
9503 next_parm_info_bit -= 1;
9509 /* Number of fixed point parameters. */
9510 /* This is actually the number of words of fixed point parameters; thus
9511 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9512 fprintf (file, "%d,", fixed_parms);
9514 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9516 /* This is actually the number of fp registers that hold parameters;
9517 and thus the maximum value is 13. */
9518 /* Set parameters on stack bit if parameters are not in their original
9519 registers, regardless of whether they are on the stack? Xlc
9520 seems to set the bit when not optimizing. */
9521 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9523 if (! optional_tbtab)
9526 /* Optional fields follow. Some are variable length. */
9528 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9530 /* There is an entry for each parameter in a register, in the order that
9531 they occur in the parameter list. Any intervening arguments on the
9532 stack are ignored. If the list overflows a long (max possible length
9533 34 bits) then completely leave off all elements that don't fit. */
9534 /* Only emit this long if there was at least one parameter. */
9535 if (fixed_parms || float_parms)
9536 fprintf (file, "\t.long %d\n", parm_info);
9538 /* Offset from start of code to tb table. */
9539 fputs ("\t.long ", file);
9540 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9542 RS6000_OUTPUT_BASENAME (file, fname);
9544 assemble_name (file, fname);
9548 RS6000_OUTPUT_BASENAME (file, fname);
9550 assemble_name (file, fname);
9554 /* Interrupt handler mask. */
9555 /* Omit this long, since we never set the interrupt handler bit
9558 /* Number of CTL (controlled storage) anchors. */
9559 /* Omit this long, since the has_ctl bit is never set above. */
9561 /* Displacement into stack of each CTL anchor. */
9562 /* Omit this list of longs, because there are no CTL anchors. */
9564 /* Length of function name. */
9565 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9567 /* Function name. */
9568 assemble_string (fname, strlen (fname));
9570 /* Register for alloca automatic storage; this is always reg 31.
9571 Only emit this if the alloca bit was set above. */
9572 if (frame_pointer_needed)
9573 fputs ("\t.byte 31\n", file);
9578 /* A C compound statement that outputs the assembler code for a thunk
9579 function, used to implement C++ virtual function calls with
9580 multiple inheritance. The thunk acts as a wrapper around a virtual
9581 function, adjusting the implicit object parameter before handing
9582 control off to the real function.
9584 First, emit code to add the integer DELTA to the location that
9585 contains the incoming first argument. Assume that this argument
9586 contains a pointer, and is the one used to pass the `this' pointer
9587 in C++. This is the incoming argument *before* the function
9588 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9589 values of all other incoming arguments.
9591 After the addition, emit code to jump to FUNCTION, which is a
9592 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9593 not touch the return address. Hence returning from FUNCTION will
9594 return to whoever called the current `thunk'.
9596 The effect must be as if FUNCTION had been called directly with the
9597 adjusted first argument. This macro is responsible for emitting
9598 all of the code for a thunk function; output_function_prologue()
9599 and output_function_epilogue() are not invoked.
9601 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9602 been extracted from it.) It might possibly be useful on some
9603 targets, but probably not.
9605 If you do not define this macro, the target-independent code in the
9606 C++ frontend will generate a less efficient heavyweight thunk that
9607 calls FUNCTION instead of jumping to it. The generic approach does
9608 not support varargs. */
9611 output_mi_thunk (file, thunk_fndecl, delta, function)
9613 tree thunk_fndecl ATTRIBUTE_UNUSED;
9617 const char *this_reg =
9618 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9621 const char *r0 = reg_names[0];
9622 const char *toc = reg_names[2];
9623 const char *schain = reg_names[11];
9624 const char *r12 = reg_names[12];
9626 static int labelno = 0;
9628 /* Small constants that can be done by one add instruction. */
9629 if (delta >= -32768 && delta <= 32767)
9631 if (! TARGET_NEW_MNEMONICS)
9632 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9634 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9637 /* Large constants that can be done by one addis instruction. */
9638 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9639 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9642 /* 32-bit constants that can be done by an add and addis instruction. */
9643 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9645 /* Break into two pieces, propagating the sign bit from the low
9646 word to the upper word. */
9647 int delta_high = delta >> 16;
9648 int delta_low = delta & 0xffff;
9649 if ((delta_low & 0x8000) != 0)
9652 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9655 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9658 if (! TARGET_NEW_MNEMONICS)
9659 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9661 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9664 /* 64-bit constants, fixme */
9668 /* Get the prefix in front of the names. */
9669 switch (DEFAULT_ABI)
9679 case ABI_AIX_NODESC:
9684 /* If the function is compiled in this module, jump to it directly.
9685 Otherwise, load up its address and jump to it. */
9687 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9689 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9690 && ! lookup_attribute ("longcall",
9691 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9693 fprintf (file, "\tb %s", prefix);
9694 assemble_name (file, fname);
9695 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9701 switch (DEFAULT_ABI)
9707 /* Set up a TOC entry for the function. */
9708 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9710 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9713 if (TARGET_MINIMAL_TOC)
9714 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9717 fputs ("\t.tc ", file);
9718 assemble_name (file, fname);
9719 fputs ("[TC],", file);
9721 assemble_name (file, fname);
9724 if (TARGET_MINIMAL_TOC)
9725 asm_fprintf (file, (TARGET_32BIT)
9726 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9727 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9728 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9729 assemble_name (file, buf);
9730 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9731 fputs ("-(.LCTOC1)", file);
9732 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9734 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9738 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9741 asm_fprintf (file, "\tmtctr %s\n", r0);
9743 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9746 asm_fprintf (file, "\tbctr\n");
9749 case ABI_AIX_NODESC:
9751 fprintf (file, "\tb %s", prefix);
9752 assemble_name (file, fname);
9753 if (flag_pic) fputs ("@plt", file);
9759 fprintf (file, "\tb %s", prefix);
9760 if (flag_pic && !machopic_name_defined_p (fname))
9761 assemble_name (file, machopic_stub_name (fname));
9763 assemble_name (file, fname);
9772 /* A quick summary of the various types of 'constant-pool tables'
9775 Target Flags Name One table per
9776 AIX (none) AIX TOC object file
9777 AIX -mfull-toc AIX TOC object file
9778 AIX -mminimal-toc AIX minimal TOC translation unit
9779 SVR4/EABI (none) SVR4 SDATA object file
9780 SVR4/EABI -fpic SVR4 pic object file
9781 SVR4/EABI -fPIC SVR4 PIC translation unit
9782 SVR4/EABI -mrelocatable EABI TOC function
9783 SVR4/EABI -maix AIX TOC object file
9784 SVR4/EABI -maix -mminimal-toc
9785 AIX minimal TOC translation unit
9787 Name Reg. Set by entries contains:
9788 made by addrs? fp? sum?
9790 AIX TOC 2 crt0 as Y option option
9791 AIX minimal TOC 30 prolog gcc Y Y option
9792 SVR4 SDATA 13 crt0 gcc N Y N
9793 SVR4 pic 30 prolog ld Y not yet N
9794 SVR4 PIC 30 prolog gcc Y option option
9795 EABI TOC 30 prolog gcc Y option option
9799 /* Hash table stuff for keeping track of TOC entries. */
9801 struct toc_hash_struct
9803 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9804 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9806 enum machine_mode key_mode;
9810 static htab_t toc_hash_table;
9812 /* Hash functions for the hash table. */
9815 rs6000_hash_constant (k)
9818 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9819 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9820 int flen = strlen (format);
9823 if (GET_CODE (k) == LABEL_REF)
9824 return result * 1231 + X0INT (XEXP (k, 0), 3);
9826 if (GET_CODE (k) == CONST_DOUBLE)
9828 else if (GET_CODE (k) == CODE_LABEL)
9833 for (; fidx < flen; fidx++)
9834 switch (format[fidx])
9839 const char *str = XSTR (k, fidx);
9841 result = result * 613 + len;
9842 for (i = 0; i < len; i++)
9843 result = result * 613 + (unsigned) str[i];
9848 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9852 result = result * 613 + (unsigned) XINT (k, fidx);
9855 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9856 result = result * 613 + (unsigned) XWINT (k, fidx);
9860 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9861 result = result * 613 + (unsigned) (XWINT (k, fidx)
9872 toc_hash_function (hash_entry)
9873 const void * hash_entry;
9875 const struct toc_hash_struct *thc =
9876 (const struct toc_hash_struct *) hash_entry;
9877 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9880 /* Compare H1 and H2 for equivalence. */
9883 toc_hash_eq (h1, h2)
9887 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9888 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9890 if (((const struct toc_hash_struct *) h1)->key_mode
9891 != ((const struct toc_hash_struct *) h2)->key_mode)
9894 /* Gotcha: One of these const_doubles will be in memory.
9895 The other may be on the constant-pool chain.
9896 So rtx_equal_p will think they are different... */
9899 if (GET_CODE (r1) != GET_CODE (r2)
9900 || GET_MODE (r1) != GET_MODE (r2))
9902 if (GET_CODE (r1) == CONST_DOUBLE)
9904 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9906 for (i = 1; i < format_len; i++)
9907 if (XWINT (r1, i) != XWINT (r2, i))
9912 else if (GET_CODE (r1) == LABEL_REF)
9913 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9914 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9916 return rtx_equal_p (r1, r2);
9919 /* Mark the hash table-entry HASH_ENTRY. */
9922 toc_hash_mark_entry (hash_slot, unused)
9924 void * unused ATTRIBUTE_UNUSED;
9926 const struct toc_hash_struct * hash_entry =
9927 *(const struct toc_hash_struct **) hash_slot;
9928 rtx r = hash_entry->key;
9929 ggc_set_mark (hash_entry);
9930 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
9931 if (GET_CODE (r) == LABEL_REF)
9934 ggc_set_mark (XEXP (r, 0));
9941 /* Mark all the elements of the TOC hash-table *HT. */
9944 toc_hash_mark_table (vht)
9949 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9952 /* These are the names given by the C++ front-end to vtables, and
9953 vtable-like objects. Ideally, this logic should not be here;
9954 instead, there should be some programmatic way of inquiring as
9955 to whether or not an object is a vtable. */
9957 #define VTABLE_NAME_P(NAME) \
9958 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
9959 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
9960 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
9961 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
9964 rs6000_output_symbol_ref (file, x)
9968 /* Currently C++ toc references to vtables can be emitted before it
9969 is decided whether the vtable is public or private. If this is
9970 the case, then the linker will eventually complain that there is
9971 a reference to an unknown section. Thus, for vtables only,
9972 we emit the TOC reference to reference the symbol and not the
9974 const char *name = XSTR (x, 0);
9976 if (VTABLE_NAME_P (name))
9978 RS6000_OUTPUT_BASENAME (file, name);
9981 assemble_name (file, name);
9984 /* Output a TOC entry. We derive the entry name from what is being
9988 output_toc (file, x, labelno, mode)
9992 enum machine_mode mode;
9995 const char *name = buf;
9996 const char *real_name;
10003 /* When the linker won't eliminate them, don't output duplicate
10004 TOC entries (this happens on AIX if there is any kind of TOC,
10005 and on SVR4 under -fPIC or -mrelocatable). */
10008 struct toc_hash_struct *h;
10011 h = ggc_alloc (sizeof (*h));
10013 h->key_mode = mode;
10014 h->labelno = labelno;
10016 found = htab_find_slot (toc_hash_table, h, 1);
10017 if (*found == NULL)
10019 else /* This is indeed a duplicate.
10020 Set this label equal to that label. */
10022 fputs ("\t.set ", file);
10023 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10024 fprintf (file, "%d,", labelno);
10025 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10026 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10032 /* If we're going to put a double constant in the TOC, make sure it's
10033 aligned properly when strict alignment is on. */
10034 if (GET_CODE (x) == CONST_DOUBLE
10035 && STRICT_ALIGNMENT
10036 && GET_MODE_BITSIZE (mode) >= 64
10037 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10038 ASM_OUTPUT_ALIGN (file, 3);
10041 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10043 /* Handle FP constants specially. Note that if we have a minimal
10044 TOC, things we put here aren't actually in the TOC, so we can allow
10046 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10048 REAL_VALUE_TYPE rv;
10051 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10052 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10056 if (TARGET_MINIMAL_TOC)
10057 fputs (DOUBLE_INT_ASM_OP, file);
10059 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10060 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
10065 if (TARGET_MINIMAL_TOC)
10066 fputs ("\t.long ", file);
10068 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10069 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
10073 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10075 REAL_VALUE_TYPE rv;
10078 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10079 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10083 if (TARGET_MINIMAL_TOC)
10084 fputs (DOUBLE_INT_ASM_OP, file);
10086 fprintf (file, "\t.tc FS_%lx[TC],", l);
10087 fprintf (file, "0x%lx00000000\n", l);
10092 if (TARGET_MINIMAL_TOC)
10093 fputs ("\t.long ", file);
10095 fprintf (file, "\t.tc FS_%lx[TC],", l);
10096 fprintf (file, "0x%lx\n", l);
10100 else if (GET_MODE (x) == VOIDmode
10101 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10103 unsigned HOST_WIDE_INT low;
10104 HOST_WIDE_INT high;
10106 if (GET_CODE (x) == CONST_DOUBLE)
10108 low = CONST_DOUBLE_LOW (x);
10109 high = CONST_DOUBLE_HIGH (x);
10112 #if HOST_BITS_PER_WIDE_INT == 32
10115 high = (low & 0x80000000) ? ~0 : 0;
10119 low = INTVAL (x) & 0xffffffff;
10120 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10124 /* TOC entries are always Pmode-sized, but since this
10125 is a bigendian machine then if we're putting smaller
10126 integer constants in the TOC we have to pad them.
10127 (This is still a win over putting the constants in
10128 a separate constant pool, because then we'd have
10129 to have both a TOC entry _and_ the actual constant.)
10131 For a 32-bit target, CONST_INT values are loaded and shifted
10132 entirely within `low' and can be stored in one TOC entry. */
10134 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10135 abort ();/* It would be easy to make this work, but it doesn't now. */
10137 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10138 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10139 POINTER_SIZE, &low, &high, 0);
10143 if (TARGET_MINIMAL_TOC)
10144 fputs (DOUBLE_INT_ASM_OP, file);
10146 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long)high, (long)low);
10147 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10152 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10154 if (TARGET_MINIMAL_TOC)
10155 fputs ("\t.long ", file);
10157 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10158 (long)high, (long)low);
10159 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10163 if (TARGET_MINIMAL_TOC)
10164 fputs ("\t.long ", file);
10166 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10167 fprintf (file, "0x%lx\n", (long) low);
10173 if (GET_CODE (x) == CONST)
10175 if (GET_CODE (XEXP (x, 0)) != PLUS)
10178 base = XEXP (XEXP (x, 0), 0);
10179 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10182 if (GET_CODE (base) == SYMBOL_REF)
10183 name = XSTR (base, 0);
10184 else if (GET_CODE (base) == LABEL_REF)
10185 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10186 else if (GET_CODE (base) == CODE_LABEL)
10187 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10191 STRIP_NAME_ENCODING (real_name, name);
10192 if (TARGET_MINIMAL_TOC)
10193 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10196 fprintf (file, "\t.tc %s", real_name);
10199 fprintf (file, ".N%d", - offset);
10201 fprintf (file, ".P%d", offset);
10203 fputs ("[TC],", file);
10206 /* Currently C++ toc references to vtables can be emitted before it
10207 is decided whether the vtable is public or private. If this is
10208 the case, then the linker will eventually complain that there is
10209 a TOC reference to an unknown section. Thus, for vtables only,
10210 we emit the TOC reference to reference the symbol and not the
10212 if (VTABLE_NAME_P (name))
10214 RS6000_OUTPUT_BASENAME (file, name);
10216 fprintf (file, "%d", offset);
10217 else if (offset > 0)
10218 fprintf (file, "+%d", offset);
10221 output_addr_const (file, x);
10225 /* Output an assembler pseudo-op to write an ASCII string of N characters
10226 starting at P to FILE.
10228 On the RS/6000, we have to do this using the .byte operation and
10229 write out special characters outside the quoted string.
10230 Also, the assembler is broken; very long strings are truncated,
10231 so we must artificially break them up early. */
10234 output_ascii (file, p, n)
10240 int i, count_string;
10241 const char *for_string = "\t.byte \"";
10242 const char *for_decimal = "\t.byte ";
10243 const char *to_close = NULL;
10246 for (i = 0; i < n; i++)
10249 if (c >= ' ' && c < 0177)
10252 fputs (for_string, file);
10255 /* Write two quotes to get one. */
10263 for_decimal = "\"\n\t.byte ";
10267 if (count_string >= 512)
10269 fputs (to_close, file);
10271 for_string = "\t.byte \"";
10272 for_decimal = "\t.byte ";
10280 fputs (for_decimal, file);
10281 fprintf (file, "%d", c);
10283 for_string = "\n\t.byte \"";
10284 for_decimal = ", ";
10290 /* Now close the string if we have written one. Then end the line. */
10292 fputs (to_close, file);
10295 /* Generate a unique section name for FILENAME for a section type
10296 represented by SECTION_DESC. Output goes into BUF.
10298 SECTION_DESC can be any string, as long as it is different for each
10299 possible section type.
10301 We name the section in the same manner as xlc. The name begins with an
10302 underscore followed by the filename (after stripping any leading directory
10303 names) with the last period replaced by the string SECTION_DESC. If
10304 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10308 rs6000_gen_section_name (buf, filename, section_desc)
10310 const char *filename;
10311 const char *section_desc;
10313 const char *q, *after_last_slash, *last_period = 0;
10317 after_last_slash = filename;
10318 for (q = filename; *q; q++)
10321 after_last_slash = q + 1;
10322 else if (*q == '.')
10326 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10327 *buf = (char *) permalloc (len);
10332 for (q = after_last_slash; *q; q++)
10334 if (q == last_period)
10336 strcpy (p, section_desc);
10337 p += strlen (section_desc);
10340 else if (ISALNUM (*q))
10344 if (last_period == 0)
10345 strcpy (p, section_desc);
10350 /* Emit profile function. */
10353 output_profile_hook (labelno)
10356 if (DEFAULT_ABI == ABI_AIX)
10359 const char *label_name;
10364 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10365 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10366 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10368 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10371 else if (DEFAULT_ABI == ABI_DARWIN)
10373 const char *mcount_name = RS6000_MCOUNT;
10374 int caller_addr_regno = LINK_REGISTER_REGNUM;
10376 /* Be conservative and always set this, at least for now. */
10377 current_function_uses_pic_offset_table = 1;
10380 /* For PIC code, set up a stub and collect the caller's address
10381 from r0, which is where the prologue puts it. */
10384 mcount_name = machopic_stub_name (mcount_name);
10385 if (current_function_uses_pic_offset_table)
10386 caller_addr_regno = 0;
10389 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10391 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10395 /* Write function profiler code. */
10398 output_function_profiler (file, labelno)
10404 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10405 switch (DEFAULT_ABI)
10411 case ABI_AIX_NODESC:
10412 fprintf (file, "\tmflr %s\n", reg_names[0]);
10415 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10416 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10417 reg_names[0], reg_names[1]);
10418 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10419 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10420 assemble_name (file, buf);
10421 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10423 else if (flag_pic > 1)
10425 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10426 reg_names[0], reg_names[1]);
10427 /* Now, we need to get the address of the label. */
10428 fputs ("\tbl 1f\n\t.long ", file);
10429 assemble_name (file, buf);
10430 fputs ("-.\n1:", file);
10431 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10432 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10433 reg_names[0], reg_names[11]);
10434 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10435 reg_names[0], reg_names[0], reg_names[11]);
10439 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10440 assemble_name (file, buf);
10441 fputs ("@ha\n", file);
10442 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10443 reg_names[0], reg_names[1]);
10444 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10445 assemble_name (file, buf);
10446 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10449 if (current_function_needs_context)
10450 asm_fprintf (file, "\tmr %s,%s\n",
10451 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10452 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10453 if (current_function_needs_context)
10454 asm_fprintf (file, "\tmr %s,%s\n",
10455 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10460 /* Don't do anything, done in output_profile_hook (). */
10466 /* Adjust the cost of a scheduling dependency. Return the new cost of
10467 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10470 rs6000_adjust_cost (insn, link, dep_insn, cost)
10473 rtx dep_insn ATTRIBUTE_UNUSED;
10476 if (! recog_memoized (insn))
10479 if (REG_NOTE_KIND (link) != 0)
10482 if (REG_NOTE_KIND (link) == 0)
10484 /* Data dependency; DEP_INSN writes a register that INSN reads
10485 some cycles later. */
10486 switch (get_attr_type (insn))
10489 /* Tell the first scheduling pass about the latency between
10490 a mtctr and bctr (and mtlr and br/blr). The first
10491 scheduling pass will not know about this latency since
10492 the mtctr instruction, which has the latency associated
10493 to it, will be generated by reload. */
10494 return TARGET_POWER ? 5 : 4;
10496 /* Leave some extra cycles between a compare and its
10497 dependent branch, to inhibit expensive mispredicts. */
10498 if ((rs6000_cpu_attr == CPU_PPC750
10499 || rs6000_cpu_attr == CPU_PPC7400
10500 || rs6000_cpu_attr == CPU_PPC7450)
10501 && recog_memoized (dep_insn)
10502 && (INSN_CODE (dep_insn) >= 0)
10503 && (get_attr_type (dep_insn) == TYPE_COMPARE
10504 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10505 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10506 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10511 /* Fall out to return default cost. */
10517 /* A C statement (sans semicolon) to update the integer scheduling
10518 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10519 INSN earlier, increase the priority to execute INSN later. Do not
10520 define this macro if you do not need to adjust the scheduling
10521 priorities of insns. */
10524 rs6000_adjust_priority (insn, priority)
10525 rtx insn ATTRIBUTE_UNUSED;
10528 /* On machines (like the 750) which have asymmetric integer units,
10529 where one integer unit can do multiply and divides and the other
10530 can't, reduce the priority of multiply/divide so it is scheduled
10531 before other integer operations. */
10534 if (! INSN_P (insn))
10537 if (GET_CODE (PATTERN (insn)) == USE)
10540 switch (rs6000_cpu_attr) {
10542 switch (get_attr_type (insn))
10549 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10550 priority, priority);
10551 if (priority >= 0 && priority < 0x01000000)
10561 /* Return how many instructions the machine can issue per cycle. */
10564 rs6000_issue_rate ()
10566 switch (rs6000_cpu_attr) {
10567 case CPU_RIOS1: /* ? */
10569 case CPU_PPC601: /* ? */
10588 /* Length in units of the trampoline for entering a nested function. */
10591 rs6000_trampoline_size ()
10595 switch (DEFAULT_ABI)
10601 ret = (TARGET_32BIT) ? 12 : 24;
10606 case ABI_AIX_NODESC:
10607 ret = (TARGET_32BIT) ? 40 : 48;
10614 /* Emit RTL insns to initialize the variable parts of a trampoline.
10615 FNADDR is an RTX for the address of the function's pure code.
10616 CXT is an RTX for the static chain value for the function. */
10619 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10624 enum machine_mode pmode = Pmode;
10625 int regsize = (TARGET_32BIT) ? 4 : 8;
10626 rtx ctx_reg = force_reg (pmode, cxt);
10628 switch (DEFAULT_ABI)
10633 /* Macros to shorten the code expansions below. */
10634 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10635 #define MEM_PLUS(addr,offset) \
10636 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10638 /* Under AIX, just build the 3 word function descriptor */
10641 rtx fn_reg = gen_reg_rtx (pmode);
10642 rtx toc_reg = gen_reg_rtx (pmode);
10643 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10644 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10645 emit_move_insn (MEM_DEREF (addr), fn_reg);
10646 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10647 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10651 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10654 case ABI_AIX_NODESC:
10655 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10656 FALSE, VOIDmode, 4,
10658 GEN_INT (rs6000_trampoline_size ()), SImode,
10668 /* Table of valid machine attributes. */
10670 const struct attribute_spec rs6000_attribute_table[] =
10672 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10673 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10674 { NULL, 0, 0, false, false, false, NULL }
10677 /* Handle a "longcall" attribute; arguments as in struct
10678 attribute_spec.handler. */
10681 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10684 tree args ATTRIBUTE_UNUSED;
10685 int flags ATTRIBUTE_UNUSED;
10686 bool *no_add_attrs;
10688 if (TREE_CODE (*node) != FUNCTION_TYPE
10689 && TREE_CODE (*node) != FIELD_DECL
10690 && TREE_CODE (*node) != TYPE_DECL)
10692 warning ("`%s' attribute only applies to functions",
10693 IDENTIFIER_POINTER (name));
10694 *no_add_attrs = true;
10700 /* Return a reference suitable for calling a function with the
10701 longcall attribute. */
10704 rs6000_longcall_ref (call_ref)
10707 const char *call_name;
10710 if (GET_CODE (call_ref) != SYMBOL_REF)
10713 /* System V adds '.' to the internal name, so skip them. */
10714 call_name = XSTR (call_ref, 0);
10715 if (*call_name == '.')
10717 while (*call_name == '.')
10720 node = get_identifier (call_name);
10721 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10724 return force_reg (Pmode, call_ref);
10728 /* A C statement or statements to switch to the appropriate section
10729 for output of RTX in mode MODE. You can assume that RTX is some
10730 kind of constant in RTL. The argument MODE is redundant except in
10731 the case of a `const_int' rtx. Select the section by calling
10732 `text_section' or one of the alternatives for other sections.
10734 Do not define this macro if you put all constants in the read-only
10737 #ifdef USING_ELFOS_H
10740 rs6000_select_rtx_section (mode, x)
10741 enum machine_mode mode;
10744 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10750 /* A C statement or statements to switch to the appropriate
10751 section for output of DECL. DECL is either a `VAR_DECL' node
10752 or a constant of some sort. RELOC indicates whether forming
10753 the initial value of DECL requires link-time relocations. */
10756 rs6000_select_section (decl, reloc)
10760 int size = int_size_in_bytes (TREE_TYPE (decl));
10763 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10770 needs_sdata = (size > 0
10771 && size <= g_switch_value
10772 && rs6000_sdata != SDATA_NONE
10773 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10775 if (TREE_CODE (decl) == STRING_CST)
10776 readonly = ! flag_writable_strings;
10777 else if (TREE_CODE (decl) == VAR_DECL)
10778 readonly = (! (flag_pic && reloc)
10779 && TREE_READONLY (decl)
10780 && ! TREE_SIDE_EFFECTS (decl)
10781 && DECL_INITIAL (decl)
10782 && DECL_INITIAL (decl) != error_mark_node
10783 && TREE_CONSTANT (DECL_INITIAL (decl)));
10784 else if (TREE_CODE (decl) == CONSTRUCTOR)
10785 readonly = (! (flag_pic && reloc)
10786 && ! TREE_SIDE_EFFECTS (decl)
10787 && TREE_CONSTANT (decl));
10790 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10793 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10796 /* A C statement to build up a unique section name, expressed as a
10797 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10798 RELOC indicates whether the initial value of EXP requires
10799 link-time relocations. If you do not define this macro, GCC will use
10800 the symbol name prefixed by `.' as the section name. Note - this
10801 macro can now be called for uninitialized data items as well as
10802 initialised data and functions. */
10805 rs6000_unique_section (decl, reloc)
10813 const char *prefix;
10815 static const char *const prefixes[7][2] =
10817 { ".rodata.", ".gnu.linkonce.r." },
10818 { ".sdata2.", ".gnu.linkonce.s2." },
10819 { ".data.", ".gnu.linkonce.d." },
10820 { ".sdata.", ".gnu.linkonce.s." },
10821 { ".bss.", ".gnu.linkonce.b." },
10822 { ".sbss.", ".gnu.linkonce.sb." },
10823 { ".text.", ".gnu.linkonce.t." }
10826 if (TREE_CODE (decl) == FUNCTION_DECL)
10835 if (TREE_CODE (decl) == STRING_CST)
10836 readonly = ! flag_writable_strings;
10837 else if (TREE_CODE (decl) == VAR_DECL)
10838 readonly = (! (flag_pic && reloc)
10839 && TREE_READONLY (decl)
10840 && ! TREE_SIDE_EFFECTS (decl)
10841 && TREE_CONSTANT (DECL_INITIAL (decl)));
10843 size = int_size_in_bytes (TREE_TYPE (decl));
10844 needs_sdata = (size > 0
10845 && size <= g_switch_value
10846 && rs6000_sdata != SDATA_NONE
10847 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10849 if (DECL_INITIAL (decl) == 0
10850 || DECL_INITIAL (decl) == error_mark_node)
10852 else if (! readonly)
10859 /* .sdata2 is only for EABI. */
10860 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10866 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10867 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10868 len = strlen (name) + strlen (prefix);
10869 string = alloca (len + 1);
10871 sprintf (string, "%s%s", prefix, name);
10873 DECL_SECTION_NAME (decl) = build_string (len, string);
10877 /* If we are referencing a function that is static or is known to be
10878 in this file, make the SYMBOL_REF special. We can use this to indicate
10879 that we can branch to this function without emitting a no-op after the
10880 call. For real AIX calling sequences, we also replace the
10881 function name with the real name (1 or 2 leading .'s), rather than
10882 the function descriptor name. This saves a lot of overriding code
10883 to read the prefixes. */
10886 rs6000_encode_section_info (decl)
10889 if (TREE_CODE (decl) == FUNCTION_DECL)
10891 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10892 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10893 && ! DECL_WEAK (decl))
10894 SYMBOL_REF_FLAG (sym_ref) = 1;
10896 if (DEFAULT_ABI == ABI_AIX)
10898 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10899 size_t len2 = strlen (XSTR (sym_ref, 0));
10900 char *str = alloca (len1 + len2 + 1);
10903 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10905 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10908 else if (rs6000_sdata != SDATA_NONE
10909 && DEFAULT_ABI == ABI_V4
10910 && TREE_CODE (decl) == VAR_DECL)
10912 int size = int_size_in_bytes (TREE_TYPE (decl));
10913 tree section_name = DECL_SECTION_NAME (decl);
10914 const char *name = (char *)0;
10919 if (TREE_CODE (section_name) == STRING_CST)
10921 name = TREE_STRING_POINTER (section_name);
10922 len = TREE_STRING_LENGTH (section_name);
10928 if ((size > 0 && size <= g_switch_value)
10930 && ((len == sizeof (".sdata") - 1
10931 && strcmp (name, ".sdata") == 0)
10932 || (len == sizeof (".sdata2") - 1
10933 && strcmp (name, ".sdata2") == 0)
10934 || (len == sizeof (".sbss") - 1
10935 && strcmp (name, ".sbss") == 0)
10936 || (len == sizeof (".sbss2") - 1
10937 && strcmp (name, ".sbss2") == 0)
10938 || (len == sizeof (".PPC.EMB.sdata0") - 1
10939 && strcmp (name, ".PPC.EMB.sdata0") == 0)
10940 || (len == sizeof (".PPC.EMB.sbss0") - 1
10941 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10943 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10944 size_t len = strlen (XSTR (sym_ref, 0));
10945 char *str = alloca (len + 2);
10948 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
10949 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
10954 #endif /* USING_ELFOS_H */
10957 /* Return a REG that occurs in ADDR with coefficient 1.
10958 ADDR can be effectively incremented by incrementing REG.
10960 r0 is special and we must not select it as an address
10961 register by this routine since our caller will try to
10962 increment the returned register via an "la" instruction. */
10965 find_addr_reg (addr)
10968 while (GET_CODE (addr) == PLUS)
10970 if (GET_CODE (XEXP (addr, 0)) == REG
10971 && REGNO (XEXP (addr, 0)) != 0)
10972 addr = XEXP (addr, 0);
10973 else if (GET_CODE (XEXP (addr, 1)) == REG
10974 && REGNO (XEXP (addr, 1)) != 0)
10975 addr = XEXP (addr, 1);
10976 else if (CONSTANT_P (XEXP (addr, 0)))
10977 addr = XEXP (addr, 1);
10978 else if (CONSTANT_P (XEXP (addr, 1)))
10979 addr = XEXP (addr, 0);
10983 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
10989 rs6000_fatal_bad_address (op)
10992 fatal_insn ("bad address", op);
10995 /* Called to register all of our global variables with the garbage
10999 rs6000_add_gc_roots ()
11001 ggc_add_rtx_root (&rs6000_compare_op0, 1);
11002 ggc_add_rtx_root (&rs6000_compare_op1, 1);
11004 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11005 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11006 toc_hash_mark_table);
11009 machopic_add_gc_roots ();
11016 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11017 reference and a constant. */
11020 symbolic_operand (op)
11023 switch (GET_CODE (op))
11030 return (GET_CODE (op) == SYMBOL_REF ||
11031 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11032 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11033 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11040 #ifdef RS6000_LONG_BRANCH
11042 static tree stub_list = 0;
11044 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11045 procedure calls to the linked list. */
11048 add_compiler_stub (label_name, function_name, line_number)
11050 tree function_name;
11053 tree stub = build_tree_list (function_name, label_name);
11054 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11055 TREE_CHAIN (stub) = stub_list;
11059 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11060 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11061 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11063 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11064 handling procedure calls from the linked list and initializes the
11068 output_compiler_stub ()
11071 char label_buf[256];
11073 tree tmp_stub, stub;
11076 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11078 fprintf (asm_out_file,
11079 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11081 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11082 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11083 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11084 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11086 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11088 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11091 label_buf[0] = '_';
11092 strcpy (label_buf+1,
11093 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11096 strcpy (tmp_buf, "lis r12,hi16(");
11097 strcat (tmp_buf, label_buf);
11098 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11099 strcat (tmp_buf, label_buf);
11100 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11101 output_asm_insn (tmp_buf, 0);
11103 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11104 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11105 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11106 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11112 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11113 already there or not. */
11116 no_previous_def (function_name)
11117 tree function_name;
11120 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11121 if (function_name == STUB_FUNCTION_NAME (stub))
11126 /* GET_PREV_LABEL gets the label name from the previous definition of
11130 get_prev_label (function_name)
11131 tree function_name;
11134 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11135 if (function_name == STUB_FUNCTION_NAME (stub))
11136 return STUB_LABEL_NAME (stub);
11140 /* INSN is either a function call or a millicode call. It may have an
11141 unconditional jump in its delay slot.
11143 CALL_DEST is the routine we are calling. */
11146 output_call (insn, call_dest, operand_number)
11149 int operand_number;
11151 static char buf[256];
11152 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11155 tree funname = get_identifier (XSTR (call_dest, 0));
11157 if (no_previous_def (funname))
11160 rtx label_rtx = gen_label_rtx ();
11161 char *label_buf, temp_buf[256];
11162 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11163 CODE_LABEL_NUMBER (label_rtx));
11164 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11165 labelname = get_identifier (label_buf);
11166 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11168 line_number = NOTE_LINE_NUMBER (insn);
11169 add_compiler_stub (labelname, funname, line_number);
11172 labelname = get_prev_label (funname);
11174 sprintf (buf, "jbsr %%z%d,%.246s",
11175 operand_number, IDENTIFIER_POINTER (labelname));
11180 sprintf (buf, "bl %%z%d", operand_number);
11185 #endif /* RS6000_LONG_BRANCH */
11187 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11189 const char *const symbol_ = (SYMBOL); \
11190 char *buffer_ = (BUF); \
11191 if (symbol_[0] == '"') \
11193 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11195 else if (name_needs_quotes(symbol_)) \
11197 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11201 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11206 /* Generate PIC and indirect symbol stubs. */
11209 machopic_output_stub (file, symb, stub)
11211 const char *symb, *stub;
11213 unsigned int length;
11214 char *symbol_name, *lazy_ptr_name;
11215 char *local_label_0;
11216 static int label = 0;
11218 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11219 STRIP_NAME_ENCODING (symb, symb);
11223 length = strlen (symb);
11224 symbol_name = alloca (length + 32);
11225 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11227 lazy_ptr_name = alloca (length + 32);
11228 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11230 local_label_0 = alloca (length + 32);
11231 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11234 machopic_picsymbol_stub_section ();
11236 machopic_symbol_stub_section ();
11238 fprintf (file, "%s:\n", stub);
11239 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11243 fprintf (file, "\tmflr r0\n");
11244 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11245 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11246 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11247 lazy_ptr_name, local_label_0);
11248 fprintf (file, "\tmtlr r0\n");
11249 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11250 lazy_ptr_name, local_label_0);
11251 fprintf (file, "\tmtctr r12\n");
11252 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11253 lazy_ptr_name, local_label_0);
11254 fprintf (file, "\tbctr\n");
11257 fprintf (file, "non-pure not supported\n");
11259 machopic_lazy_symbol_ptr_section ();
11260 fprintf (file, "%s:\n", lazy_ptr_name);
11261 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11262 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11265 /* Legitimize PIC addresses. If the address is already
11266 position-independent, we return ORIG. Newly generated
11267 position-independent addresses go into a reg. This is REG if non
11268 zero, otherwise we allocate register(s) as necessary. */
11270 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11273 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11275 enum machine_mode mode;
11280 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11281 reg = gen_reg_rtx (Pmode);
11283 if (GET_CODE (orig) == CONST)
11285 if (GET_CODE (XEXP (orig, 0)) == PLUS
11286 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11289 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11292 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11295 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11301 if (GET_CODE (offset) == CONST_INT)
11303 if (SMALL_INT (offset))
11304 return plus_constant (base, INTVAL (offset));
11305 else if (! reload_in_progress && ! reload_completed)
11306 offset = force_reg (Pmode, offset);
11309 rtx mem = force_const_mem (Pmode, orig);
11310 return machopic_legitimize_pic_address (mem, Pmode, reg);
11313 return gen_rtx (PLUS, Pmode, base, offset);
11316 /* Fall back on generic machopic code. */
11317 return machopic_legitimize_pic_address (orig, mode, reg);
11320 /* This is just a placeholder to make linking work without having to
11321 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11322 ever needed for Darwin (not too likely!) this would have to get a
11323 real definition. */
11330 #endif /* TARGET_MACHO */
11333 static unsigned int
11334 rs6000_elf_section_type_flags (decl, name, reloc)
11339 unsigned int flags = default_section_type_flags (decl, name, reloc);
11341 if (TARGET_RELOCATABLE)
11342 flags |= SECTION_WRITE;
11347 /* Record an element in the table of global constructors. SYMBOL is
11348 a SYMBOL_REF of the function to be called; PRIORITY is a number
11349 between 0 and MAX_INIT_PRIORITY.
11351 This differs from default_named_section_asm_out_constructor in
11352 that we have special handling for -mrelocatable. */
11355 rs6000_elf_asm_out_constructor (symbol, priority)
11359 const char *section = ".ctors";
11362 if (priority != DEFAULT_INIT_PRIORITY)
11364 sprintf (buf, ".ctors.%.5u",
11365 /* Invert the numbering so the linker puts us in the proper
11366 order; constructors are run from right to left, and the
11367 linker sorts in increasing order. */
11368 MAX_INIT_PRIORITY - priority);
11372 named_section_flags (section, SECTION_WRITE);
11373 assemble_align (POINTER_SIZE);
11375 if (TARGET_RELOCATABLE)
11377 fputs ("\t.long (", asm_out_file);
11378 output_addr_const (asm_out_file, symbol);
11379 fputs (")@fixup\n", asm_out_file);
11382 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11386 rs6000_elf_asm_out_destructor (symbol, priority)
11390 const char *section = ".dtors";
11393 if (priority != DEFAULT_INIT_PRIORITY)
11395 sprintf (buf, ".dtors.%.5u",
11396 /* Invert the numbering so the linker puts us in the proper
11397 order; constructors are run from right to left, and the
11398 linker sorts in increasing order. */
11399 MAX_INIT_PRIORITY - priority);
11403 named_section_flags (section, SECTION_WRITE);
11404 assemble_align (POINTER_SIZE);
11406 if (TARGET_RELOCATABLE)
11408 fputs ("\t.long (", asm_out_file);
11409 output_addr_const (asm_out_file, symbol);
11410 fputs (")@fixup\n", asm_out_file);
11413 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11417 #ifdef OBJECT_FORMAT_COFF
11419 xcoff_asm_named_section (name, flags)
11421 unsigned int flags ATTRIBUTE_UNUSED;
11423 fprintf (asm_out_file, "\t.csect %s\n", name);