1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined;
80 /* Save information from a "cmpxx" operation until the branch or scc is
82 rtx rs6000_compare_op0, rs6000_compare_op1;
83 int rs6000_compare_fp_p;
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno;
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name = RS6000_ABI_NAME;
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
96 /* Which small data model to use */
97 const char *rs6000_sdata_name = (char *)0;
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno = 0;
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi;
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string;
110 const char *rs6000_debug_name;
111 int rs6000_debug_stack; /* debug stack applications */
112 int rs6000_debug_arg; /* debug argument handling */
114 /* Flag to say the TOC is initialized */
116 char toc_label_name[10];
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set;
121 static void rs6000_add_gc_roots PARAMS ((void));
122 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code, enum machine_mode));
126 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127 static void rs6000_maybe_dead PARAMS ((rtx));
128 static void rs6000_emit_stack_tie PARAMS ((void));
129 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131 static unsigned rs6000_hash_constant PARAMS ((rtx));
132 static unsigned toc_hash_function PARAMS ((const void *));
133 static int toc_hash_eq PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry PARAMS ((void **, void *));
135 static void toc_hash_mark_table PARAMS ((void *));
136 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137 static void rs6000_free_machine_status PARAMS ((struct function *));
138 static void rs6000_init_machine_status PARAMS ((struct function *));
139 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
167 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
168 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
169 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
170 static void rs6000_parse_abi_options PARAMS ((void));
171 static int first_altivec_reg_to_save PARAMS ((void));
172 static unsigned int compute_vrsave_mask PARAMS ((void));
173 static void is_altivec_return_reg PARAMS ((rtx, void *));
174 int vrsave_operation PARAMS ((rtx, enum machine_mode));
175 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
176 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
177 static int easy_vector_constant PARAMS ((rtx));
179 /* Default register names. */
180 char rs6000_reg_names[][8] =
182 "0", "1", "2", "3", "4", "5", "6", "7",
183 "8", "9", "10", "11", "12", "13", "14", "15",
184 "16", "17", "18", "19", "20", "21", "22", "23",
185 "24", "25", "26", "27", "28", "29", "30", "31",
186 "0", "1", "2", "3", "4", "5", "6", "7",
187 "8", "9", "10", "11", "12", "13", "14", "15",
188 "16", "17", "18", "19", "20", "21", "22", "23",
189 "24", "25", "26", "27", "28", "29", "30", "31",
190 "mq", "lr", "ctr","ap",
191 "0", "1", "2", "3", "4", "5", "6", "7",
193 /* AltiVec registers. */
194 "0", "1", "2", "3", "4", "5", "6", "7",
195 "8", "9", "10", "11", "12", "13", "14", "15",
196 "16", "17", "18", "19", "20", "21", "22", "23",
197 "24", "25", "26", "27", "28", "29", "30", "31",
201 #ifdef TARGET_REGNAMES
202 static const char alt_reg_names[][8] =
204 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
205 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
206 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
207 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
208 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
209 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
210 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
211 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
212 "mq", "lr", "ctr", "ap",
213 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
215 /* AltiVec registers. */
216 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
217 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
218 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
219 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
224 #ifndef MASK_STRICT_ALIGN
225 #define MASK_STRICT_ALIGN 0
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ATTRIBUTE_TABLE
230 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
232 #undef TARGET_ASM_ALIGNED_DI_OP
233 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
235 /* Default unaligned ops are only provided for ELF. Find the ops needed
236 for non-ELF systems. */
237 #ifndef OBJECT_FORMAT_ELF
238 #ifdef OBJECT_FORMAT_COFF
239 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
241 #undef TARGET_ASM_UNALIGNED_HI_OP
242 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
243 #undef TARGET_ASM_UNALIGNED_SI_OP
244 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
245 #undef TARGET_ASM_UNALIGNED_DI_OP
246 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
249 #undef TARGET_ASM_UNALIGNED_HI_OP
250 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
251 #undef TARGET_ASM_UNALIGNED_SI_OP
252 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
256 /* This hook deals with fixups for relocatable code and DI-mode objects
258 #undef TARGET_ASM_INTEGER
259 #define TARGET_ASM_INTEGER rs6000_assemble_integer
261 #undef TARGET_ASM_FUNCTION_PROLOGUE
262 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
263 #undef TARGET_ASM_FUNCTION_EPILOGUE
264 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
267 #undef TARGET_SECTION_TYPE_FLAGS
268 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
271 #undef TARGET_SCHED_ISSUE_RATE
272 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
273 #undef TARGET_SCHED_ADJUST_COST
274 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
275 #undef TARGET_SCHED_ADJUST_PRIORITY
276 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
278 #undef TARGET_INIT_BUILTINS
279 #define TARGET_INIT_BUILTINS rs6000_init_builtins
281 #undef TARGET_EXPAND_BUILTIN
282 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
284 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
285 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
287 struct gcc_target targetm = TARGET_INITIALIZER;
289 /* Override command line options. Mostly we process the processor
290 type and sometimes adjust other TARGET_ options. */
293 rs6000_override_options (default_cpu)
294 const char *default_cpu;
297 struct rs6000_cpu_select *ptr;
299 /* Simplify the entries below by making a mask for any POWER
300 variant and any PowerPC variant. */
302 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
303 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
304 | MASK_PPC_GFXOPT | MASK_POWERPC64)
305 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
309 const char *const name; /* Canonical processor name. */
310 const enum processor_type processor; /* Processor type enum value. */
311 const int target_enable; /* Target flags to enable. */
312 const int target_disable; /* Target flags to disable. */
313 } const processor_target_table[]
314 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
315 POWER_MASKS | POWERPC_MASKS},
316 {"power", PROCESSOR_POWER,
317 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
318 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
319 {"power2", PROCESSOR_POWER,
320 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
321 POWERPC_MASKS | MASK_NEW_MNEMONICS},
322 {"power3", PROCESSOR_PPC630,
323 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
324 POWER_MASKS | MASK_PPC_GPOPT},
325 {"powerpc", PROCESSOR_POWERPC,
326 MASK_POWERPC | MASK_NEW_MNEMONICS,
327 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
328 {"powerpc64", PROCESSOR_POWERPC64,
329 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
330 POWER_MASKS | POWERPC_OPT_MASKS},
331 {"rios", PROCESSOR_RIOS1,
332 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
333 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
334 {"rios1", PROCESSOR_RIOS1,
335 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
336 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
337 {"rsc", PROCESSOR_PPC601,
338 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
339 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
340 {"rsc1", PROCESSOR_PPC601,
341 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
342 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
343 {"rios2", PROCESSOR_RIOS2,
344 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
345 POWERPC_MASKS | MASK_NEW_MNEMONICS},
346 {"rs64a", PROCESSOR_RS64A,
347 MASK_POWERPC | MASK_NEW_MNEMONICS,
348 POWER_MASKS | POWERPC_OPT_MASKS},
349 {"401", PROCESSOR_PPC403,
350 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
351 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
352 {"403", PROCESSOR_PPC403,
353 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
354 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
355 {"405", PROCESSOR_PPC405,
356 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
357 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
358 {"505", PROCESSOR_MPCCORE,
359 MASK_POWERPC | MASK_NEW_MNEMONICS,
360 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
361 {"601", PROCESSOR_PPC601,
362 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
363 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
364 {"602", PROCESSOR_PPC603,
365 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
366 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
367 {"603", PROCESSOR_PPC603,
368 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
369 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
370 {"603e", PROCESSOR_PPC603,
371 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
372 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
373 {"ec603e", PROCESSOR_PPC603,
374 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
375 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
376 {"604", PROCESSOR_PPC604,
377 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
378 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
379 {"604e", PROCESSOR_PPC604e,
380 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
381 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
382 {"620", PROCESSOR_PPC620,
383 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
384 POWER_MASKS | MASK_PPC_GPOPT},
385 {"630", PROCESSOR_PPC630,
386 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
387 POWER_MASKS | MASK_PPC_GPOPT},
388 {"740", PROCESSOR_PPC750,
389 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
390 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
391 {"750", PROCESSOR_PPC750,
392 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
393 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
394 {"7400", PROCESSOR_PPC7400,
395 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
396 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
397 {"7450", PROCESSOR_PPC7450,
398 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
399 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
400 {"801", PROCESSOR_MPCCORE,
401 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
402 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
403 {"821", PROCESSOR_MPCCORE,
404 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
405 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
406 {"823", PROCESSOR_MPCCORE,
407 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
408 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
409 {"860", PROCESSOR_MPCCORE,
410 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
411 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
413 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
415 /* Save current -mmultiple/-mno-multiple status. */
416 int multiple = TARGET_MULTIPLE;
417 /* Save current -mstring/-mno-string status. */
418 int string = TARGET_STRING;
420 /* Identify the processor type. */
421 rs6000_select[0].string = default_cpu;
422 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
424 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
426 ptr = &rs6000_select[i];
427 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
429 for (j = 0; j < ptt_size; j++)
430 if (! strcmp (ptr->string, processor_target_table[j].name))
433 rs6000_cpu = processor_target_table[j].processor;
437 target_flags |= processor_target_table[j].target_enable;
438 target_flags &= ~processor_target_table[j].target_disable;
444 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
448 /* If we are optimizing big endian systems for space, use the store
449 multiple instructions. */
450 if (BYTES_BIG_ENDIAN && optimize_size)
451 target_flags |= MASK_MULTIPLE;
453 /* If -mmultiple or -mno-multiple was explicitly used, don't
454 override with the processor default */
455 if (TARGET_MULTIPLE_SET)
456 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
458 /* If -mstring or -mno-string was explicitly used, don't override
459 with the processor default. */
460 if (TARGET_STRING_SET)
461 target_flags = (target_flags & ~MASK_STRING) | string;
463 /* Don't allow -mmultiple or -mstring on little endian systems
464 unless the cpu is a 750, because the hardware doesn't support the
465 instructions used in little endian mode, and causes an alignment
466 trap. The 750 does not cause an alignment trap (except when the
467 target is unaligned). */
469 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
473 target_flags &= ~MASK_MULTIPLE;
474 if (TARGET_MULTIPLE_SET)
475 warning ("-mmultiple is not supported on little endian systems");
480 target_flags &= ~MASK_STRING;
481 if (TARGET_STRING_SET)
482 warning ("-mstring is not supported on little endian systems");
486 if (flag_pic && DEFAULT_ABI == ABI_AIX && extra_warnings)
488 warning ("-f%s ignored (all code is position independent)",
489 (flag_pic > 1) ? "PIC" : "pic");
493 #ifdef XCOFF_DEBUGGING_INFO
494 if (flag_function_sections && (write_symbols != NO_DEBUG)
495 && DEFAULT_ABI == ABI_AIX)
497 warning ("-ffunction-sections disabled on AIX when debugging");
498 flag_function_sections = 0;
501 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
503 warning ("-fdata-sections not supported on AIX");
504 flag_data_sections = 0;
508 /* Set debug flags */
509 if (rs6000_debug_name)
511 if (! strcmp (rs6000_debug_name, "all"))
512 rs6000_debug_stack = rs6000_debug_arg = 1;
513 else if (! strcmp (rs6000_debug_name, "stack"))
514 rs6000_debug_stack = 1;
515 else if (! strcmp (rs6000_debug_name, "arg"))
516 rs6000_debug_arg = 1;
518 error ("unknown -mdebug-%s switch", rs6000_debug_name);
521 /* Set size of long double */
522 rs6000_long_double_type_size = 64;
523 if (rs6000_long_double_size_string)
526 int size = strtol (rs6000_long_double_size_string, &tail, 10);
527 if (*tail != '\0' || (size != 64 && size != 128))
528 error ("Unknown switch -mlong-double-%s",
529 rs6000_long_double_size_string);
531 rs6000_long_double_type_size = size;
534 /* Handle -mabi= options. */
535 rs6000_parse_abi_options ();
537 #ifdef TARGET_REGNAMES
538 /* If the user desires alternate register names, copy in the
539 alternate names now. */
541 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
544 #ifdef SUBTARGET_OVERRIDE_OPTIONS
545 SUBTARGET_OVERRIDE_OPTIONS;
547 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
548 SUBSUBTARGET_OVERRIDE_OPTIONS;
551 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
552 If -maix-struct-return or -msvr4-struct-return was explicitly
553 used, don't override with the ABI default. */
554 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
556 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
557 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
559 target_flags |= MASK_AIX_STRUCT_RET;
562 /* Register global variables with the garbage collector. */
563 rs6000_add_gc_roots ();
565 /* Allocate an alias set for register saves & restores from stack. */
566 rs6000_sr_alias_set = new_alias_set ();
569 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
571 /* We can only guarantee the availability of DI pseudo-ops when
572 assembling for 64-bit targets. */
575 targetm.asm_out.aligned_op.di = NULL;
576 targetm.asm_out.unaligned_op.di = NULL;
579 /* Arrange to save and restore machine status around nested functions. */
580 init_machine_status = rs6000_init_machine_status;
581 free_machine_status = rs6000_free_machine_status;
584 /* Handle -mabi= options. */
586 rs6000_parse_abi_options ()
588 if (rs6000_abi_string == 0)
590 else if (! strcmp (rs6000_abi_string, "altivec"))
591 rs6000_altivec_abi = 1;
592 else if (! strcmp (rs6000_abi_string, "no-altivec"))
593 rs6000_altivec_abi = 0;
595 error ("unknown ABI specified: '%s'", rs6000_abi_string);
599 optimization_options (level, size)
600 int level ATTRIBUTE_UNUSED;
601 int size ATTRIBUTE_UNUSED;
605 /* Do anything needed at the start of the asm file. */
608 rs6000_file_start (file, default_cpu)
610 const char *default_cpu;
614 const char *start = buffer;
615 struct rs6000_cpu_select *ptr;
617 if (flag_verbose_asm)
619 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
620 rs6000_select[0].string = default_cpu;
622 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
624 ptr = &rs6000_select[i];
625 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
627 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
633 switch (rs6000_sdata)
635 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
636 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
637 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
638 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
641 if (rs6000_sdata && g_switch_value)
643 fprintf (file, "%s -G %d", start, g_switch_value);
654 /* Create a CONST_DOUBLE from a string. */
657 rs6000_float_const (string, mode)
659 enum machine_mode mode;
661 REAL_VALUE_TYPE value;
662 value = REAL_VALUE_ATOF (string, mode);
663 return immed_real_const_1 (value, mode);
666 /* Return non-zero if this function is known to have a null epilogue. */
671 if (reload_completed)
673 rs6000_stack_t *info = rs6000_stack_info ();
675 if (info->first_gp_reg_save == 32
676 && info->first_fp_reg_save == 64
677 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
680 && info->vrsave_mask == 0
688 /* Returns 1 always. */
691 any_operand (op, mode)
692 rtx op ATTRIBUTE_UNUSED;
693 enum machine_mode mode ATTRIBUTE_UNUSED;
698 /* Returns 1 if op is the count register. */
700 count_register_operand (op, mode)
702 enum machine_mode mode ATTRIBUTE_UNUSED;
704 if (GET_CODE (op) != REG)
707 if (REGNO (op) == COUNT_REGISTER_REGNUM)
710 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
716 /* Returns 1 if op is an altivec register. */
718 altivec_register_operand (op, mode)
720 enum machine_mode mode ATTRIBUTE_UNUSED;
723 return (register_operand (op, mode)
724 && (GET_CODE (op) != REG
725 || REGNO (op) > FIRST_PSEUDO_REGISTER
726 || ALTIVEC_REGNO_P (REGNO (op))));
730 xer_operand (op, mode)
732 enum machine_mode mode ATTRIBUTE_UNUSED;
734 if (GET_CODE (op) != REG)
737 if (XER_REGNO_P (REGNO (op)))
743 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
744 by such constants completes more quickly. */
747 s8bit_cint_operand (op, mode)
749 enum machine_mode mode ATTRIBUTE_UNUSED;
751 return ( GET_CODE (op) == CONST_INT
752 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
755 /* Return 1 if OP is a constant that can fit in a D field. */
758 short_cint_operand (op, mode)
760 enum machine_mode mode ATTRIBUTE_UNUSED;
762 return (GET_CODE (op) == CONST_INT
763 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
766 /* Similar for an unsigned D field. */
769 u_short_cint_operand (op, mode)
771 enum machine_mode mode ATTRIBUTE_UNUSED;
773 return (GET_CODE (op) == CONST_INT
774 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
777 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
780 non_short_cint_operand (op, mode)
782 enum machine_mode mode ATTRIBUTE_UNUSED;
784 return (GET_CODE (op) == CONST_INT
785 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
788 /* Returns 1 if OP is a CONST_INT that is a positive value
789 and an exact power of 2. */
792 exact_log2_cint_operand (op, mode)
794 enum machine_mode mode ATTRIBUTE_UNUSED;
796 return (GET_CODE (op) == CONST_INT
798 && exact_log2 (INTVAL (op)) >= 0);
801 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
805 gpc_reg_operand (op, mode)
807 enum machine_mode mode;
809 return (register_operand (op, mode)
810 && (GET_CODE (op) != REG
811 || (REGNO (op) >= ARG_POINTER_REGNUM
812 && !XER_REGNO_P (REGNO (op)))
813 || REGNO (op) < MQ_REGNO));
816 /* Returns 1 if OP is either a pseudo-register or a register denoting a
820 cc_reg_operand (op, mode)
822 enum machine_mode mode;
824 return (register_operand (op, mode)
825 && (GET_CODE (op) != REG
826 || REGNO (op) >= FIRST_PSEUDO_REGISTER
827 || CR_REGNO_P (REGNO (op))));
830 /* Returns 1 if OP is either a pseudo-register or a register denoting a
831 CR field that isn't CR0. */
834 cc_reg_not_cr0_operand (op, mode)
836 enum machine_mode mode;
838 return (register_operand (op, mode)
839 && (GET_CODE (op) != REG
840 || REGNO (op) >= FIRST_PSEUDO_REGISTER
841 || CR_REGNO_NOT_CR0_P (REGNO (op))));
844 /* Returns 1 if OP is either a constant integer valid for a D-field or
845 a non-special register. If a register, it must be in the proper
846 mode unless MODE is VOIDmode. */
849 reg_or_short_operand (op, mode)
851 enum machine_mode mode;
853 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
856 /* Similar, except check if the negation of the constant would be
857 valid for a D-field. */
860 reg_or_neg_short_operand (op, mode)
862 enum machine_mode mode;
864 if (GET_CODE (op) == CONST_INT)
865 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
867 return gpc_reg_operand (op, mode);
870 /* Returns 1 if OP is either a constant integer valid for a DS-field or
871 a non-special register. If a register, it must be in the proper
872 mode unless MODE is VOIDmode. */
875 reg_or_aligned_short_operand (op, mode)
877 enum machine_mode mode;
879 if (gpc_reg_operand (op, mode))
881 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
888 /* Return 1 if the operand is either a register or an integer whose
889 high-order 16 bits are zero. */
892 reg_or_u_short_operand (op, mode)
894 enum machine_mode mode;
896 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
899 /* Return 1 is the operand is either a non-special register or ANY
903 reg_or_cint_operand (op, mode)
905 enum machine_mode mode;
907 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
910 /* Return 1 is the operand is either a non-special register or ANY
911 32-bit signed constant integer. */
914 reg_or_arith_cint_operand (op, mode)
916 enum machine_mode mode;
918 return (gpc_reg_operand (op, mode)
919 || (GET_CODE (op) == CONST_INT
920 #if HOST_BITS_PER_WIDE_INT != 32
921 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
922 < (unsigned HOST_WIDE_INT) 0x100000000ll)
927 /* Return 1 is the operand is either a non-special register or a 32-bit
928 signed constant integer valid for 64-bit addition. */
931 reg_or_add_cint64_operand (op, mode)
933 enum machine_mode mode;
935 return (gpc_reg_operand (op, mode)
936 || (GET_CODE (op) == CONST_INT
937 #if HOST_BITS_PER_WIDE_INT == 32
938 && INTVAL (op) < 0x7fff8000
940 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
946 /* Return 1 is the operand is either a non-special register or a 32-bit
947 signed constant integer valid for 64-bit subtraction. */
950 reg_or_sub_cint64_operand (op, mode)
952 enum machine_mode mode;
954 return (gpc_reg_operand (op, mode)
955 || (GET_CODE (op) == CONST_INT
956 #if HOST_BITS_PER_WIDE_INT == 32
957 && (- INTVAL (op)) < 0x7fff8000
959 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
965 /* Return 1 is the operand is either a non-special register or ANY
966 32-bit unsigned constant integer. */
969 reg_or_logical_cint_operand (op, mode)
971 enum machine_mode mode;
973 if (GET_CODE (op) == CONST_INT)
975 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
977 if (GET_MODE_BITSIZE (mode) <= 32)
984 return ((INTVAL (op) & GET_MODE_MASK (mode)
985 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
987 else if (GET_CODE (op) == CONST_DOUBLE)
989 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
993 return CONST_DOUBLE_HIGH (op) == 0;
996 return gpc_reg_operand (op, mode);
999 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1002 got_operand (op, mode)
1004 enum machine_mode mode ATTRIBUTE_UNUSED;
1006 return (GET_CODE (op) == SYMBOL_REF
1007 || GET_CODE (op) == CONST
1008 || GET_CODE (op) == LABEL_REF);
1011 /* Return 1 if the operand is a simple references that can be loaded via
1012 the GOT (labels involving addition aren't allowed). */
1015 got_no_const_operand (op, mode)
1017 enum machine_mode mode ATTRIBUTE_UNUSED;
1019 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1022 /* Return the number of instructions it takes to form a constant in an
1023 integer register. */
1026 num_insns_constant_wide (value)
1027 HOST_WIDE_INT value;
1029 /* signed constant loadable with {cal|addi} */
1030 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1033 /* constant loadable with {cau|addis} */
1034 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1037 #if HOST_BITS_PER_WIDE_INT == 64
1038 else if (TARGET_POWERPC64)
1040 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1041 HOST_WIDE_INT high = value >> 31;
1043 if (high == 0 || high == -1)
1049 return num_insns_constant_wide (high) + 1;
1051 return (num_insns_constant_wide (high)
1052 + num_insns_constant_wide (low) + 1);
1061 num_insns_constant (op, mode)
1063 enum machine_mode mode;
1065 if (GET_CODE (op) == CONST_INT)
1067 #if HOST_BITS_PER_WIDE_INT == 64
1068 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1069 && mask64_operand (op, mode))
1073 return num_insns_constant_wide (INTVAL (op));
1076 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1081 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1082 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1083 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1086 else if (GET_CODE (op) == CONST_DOUBLE)
1092 int endian = (WORDS_BIG_ENDIAN == 0);
1094 if (mode == VOIDmode || mode == DImode)
1096 high = CONST_DOUBLE_HIGH (op);
1097 low = CONST_DOUBLE_LOW (op);
1101 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1102 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1104 low = l[1 - endian];
1108 return (num_insns_constant_wide (low)
1109 + num_insns_constant_wide (high));
1113 if (high == 0 && low >= 0)
1114 return num_insns_constant_wide (low);
1116 else if (high == -1 && low < 0)
1117 return num_insns_constant_wide (low);
1119 else if (mask64_operand (op, mode))
1123 return num_insns_constant_wide (high) + 1;
1126 return (num_insns_constant_wide (high)
1127 + num_insns_constant_wide (low) + 1);
1135 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1136 register with one instruction per word. We only do this if we can
1137 safely read CONST_DOUBLE_{LOW,HIGH}. */
1140 easy_fp_constant (op, mode)
1142 enum machine_mode mode;
1144 if (GET_CODE (op) != CONST_DOUBLE
1145 || GET_MODE (op) != mode
1146 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1149 /* Consider all constants with -msoft-float to be easy. */
1150 if (TARGET_SOFT_FLOAT && mode != DImode)
1153 /* If we are using V.4 style PIC, consider all constants to be hard. */
1154 if (flag_pic && DEFAULT_ABI == ABI_V4)
1157 #ifdef TARGET_RELOCATABLE
1158 /* Similarly if we are using -mrelocatable, consider all constants
1160 if (TARGET_RELOCATABLE)
1169 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1170 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1172 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1173 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1176 else if (mode == SFmode)
1181 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1182 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1184 return num_insns_constant_wide (l) == 1;
1187 else if (mode == DImode)
1188 return ((TARGET_POWERPC64
1189 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1190 || (num_insns_constant (op, DImode) <= 2));
1192 else if (mode == SImode)
1198 /* Return 1 if the operand is a CONST_INT and can be put into a
1199 register with one instruction. */
1202 easy_vector_constant (op)
1208 if (GET_CODE (op) != CONST_VECTOR)
1211 units = CONST_VECTOR_NUNITS (op);
1213 /* We can generate 0 easily. Look for that. */
1214 for (i = 0; i < units; ++i)
1216 elt = CONST_VECTOR_ELT (op, i);
1218 /* We could probably simplify this by just checking for equality
1219 with CONST0_RTX for the current mode, but let's be safe
1222 if (GET_CODE (elt) == CONST_INT && INTVAL (elt) != 0)
1225 if (GET_CODE (elt) == CONST_DOUBLE
1226 && (CONST_DOUBLE_LOW (elt) != 0
1227 || CONST_DOUBLE_HIGH (elt) != 0))
1231 /* We could probably generate a few other constants trivially, but
1232 gcc doesn't generate them yet. FIXME later. */
1236 /* Return 1 if the operand is the constant 0. This works for scalars
1237 as well as vectors. */
1239 zero_constant (op, mode)
1241 enum machine_mode mode;
1243 return op == CONST0_RTX (mode);
1246 /* Return 1 if the operand is 0.0. */
1248 zero_fp_constant (op, mode)
1250 enum machine_mode mode;
1252 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1255 /* Return 1 if the operand is in volatile memory. Note that during
1256 the RTL generation phase, memory_operand does not return TRUE for
1257 volatile memory references. So this function allows us to
1258 recognize volatile references where its safe. */
1261 volatile_mem_operand (op, mode)
1263 enum machine_mode mode;
1265 if (GET_CODE (op) != MEM)
1268 if (!MEM_VOLATILE_P (op))
1271 if (mode != GET_MODE (op))
1274 if (reload_completed)
1275 return memory_operand (op, mode);
1277 if (reload_in_progress)
1278 return strict_memory_address_p (mode, XEXP (op, 0));
1280 return memory_address_p (mode, XEXP (op, 0));
1283 /* Return 1 if the operand is an offsettable memory operand. */
1286 offsettable_mem_operand (op, mode)
1288 enum machine_mode mode;
1290 return ((GET_CODE (op) == MEM)
1291 && offsettable_address_p (reload_completed || reload_in_progress,
1292 mode, XEXP (op, 0)));
1295 /* Return 1 if the operand is either an easy FP constant (see above) or
1299 mem_or_easy_const_operand (op, mode)
1301 enum machine_mode mode;
1303 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1306 /* Return 1 if the operand is either a non-special register or an item
1307 that can be used as the operand of a `mode' add insn. */
1310 add_operand (op, mode)
1312 enum machine_mode mode;
1314 if (GET_CODE (op) == CONST_INT)
1315 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1316 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1318 return gpc_reg_operand (op, mode);
1321 /* Return 1 if OP is a constant but not a valid add_operand. */
1324 non_add_cint_operand (op, mode)
1326 enum machine_mode mode ATTRIBUTE_UNUSED;
1328 return (GET_CODE (op) == CONST_INT
1329 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1330 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1333 /* Return 1 if the operand is a non-special register or a constant that
1334 can be used as the operand of an OR or XOR insn on the RS/6000. */
1337 logical_operand (op, mode)
1339 enum machine_mode mode;
1341 HOST_WIDE_INT opl, oph;
1343 if (gpc_reg_operand (op, mode))
1346 if (GET_CODE (op) == CONST_INT)
1348 opl = INTVAL (op) & GET_MODE_MASK (mode);
1350 #if HOST_BITS_PER_WIDE_INT <= 32
1351 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1355 else if (GET_CODE (op) == CONST_DOUBLE)
1357 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1360 opl = CONST_DOUBLE_LOW (op);
1361 oph = CONST_DOUBLE_HIGH (op);
1368 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1369 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1372 /* Return 1 if C is a constant that is not a logical operand (as
1373 above), but could be split into one. */
1376 non_logical_cint_operand (op, mode)
1378 enum machine_mode mode;
1380 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1381 && ! logical_operand (op, mode)
1382 && reg_or_logical_cint_operand (op, mode));
1385 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1386 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1387 Reject all ones and all zeros, since these should have been optimized
1388 away and confuse the making of MB and ME. */
1391 mask_operand (op, mode)
1393 enum machine_mode mode ATTRIBUTE_UNUSED;
1395 HOST_WIDE_INT c, lsb;
1397 if (GET_CODE (op) != CONST_INT)
1402 /* Fail in 64-bit mode if the mask wraps around because the upper
1403 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1404 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1407 /* We don't change the number of transitions by inverting,
1408 so make sure we start with the LS bit zero. */
1412 /* Reject all zeros or all ones. */
1416 /* Find the first transition. */
1419 /* Invert to look for a second transition. */
1422 /* Erase first transition. */
1425 /* Find the second transition (if any). */
1428 /* Match if all the bits above are 1's (or c is zero). */
1432 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1433 It is if there are no more than one 1->0 or 0->1 transitions.
1434 Reject all ones and all zeros, since these should have been optimized
1435 away and confuse the making of MB and ME. */
1438 mask64_operand (op, mode)
1440 enum machine_mode mode;
1442 if (GET_CODE (op) == CONST_INT)
1444 HOST_WIDE_INT c, lsb;
1446 /* We don't change the number of transitions by inverting,
1447 so make sure we start with the LS bit zero. */
1452 /* Reject all zeros or all ones. */
1456 /* Find the transition, and check that all bits above are 1's. */
1460 else if (GET_CODE (op) == CONST_DOUBLE
1461 && (mode == VOIDmode || mode == DImode))
1463 HOST_WIDE_INT low, high, lsb;
1465 if (HOST_BITS_PER_WIDE_INT < 64)
1466 high = CONST_DOUBLE_HIGH (op);
1468 low = CONST_DOUBLE_LOW (op);
1471 if (HOST_BITS_PER_WIDE_INT < 64)
1478 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1482 return high == -lsb;
1486 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1492 /* Return 1 if the operand is either a non-special register or a constant
1493 that can be used as the operand of a PowerPC64 logical AND insn. */
1496 and64_operand (op, mode)
1498 enum machine_mode mode;
1500 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1501 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1503 return (logical_operand (op, mode) || mask64_operand (op, mode));
1506 /* Return 1 if the operand is either a non-special register or a
1507 constant that can be used as the operand of an RS/6000 logical AND insn. */
1510 and_operand (op, mode)
1512 enum machine_mode mode;
1514 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1515 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1517 return (logical_operand (op, mode) || mask_operand (op, mode));
1520 /* Return 1 if the operand is a general register or memory operand. */
1523 reg_or_mem_operand (op, mode)
1525 enum machine_mode mode;
1527 return (gpc_reg_operand (op, mode)
1528 || memory_operand (op, mode)
1529 || volatile_mem_operand (op, mode));
1532 /* Return 1 if the operand is a general register or memory operand without
1533 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1537 lwa_operand (op, mode)
1539 enum machine_mode mode;
1543 if (reload_completed && GET_CODE (inner) == SUBREG)
1544 inner = SUBREG_REG (inner);
1546 return gpc_reg_operand (inner, mode)
1547 || (memory_operand (inner, mode)
1548 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1549 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1550 && (GET_CODE (XEXP (inner, 0)) != PLUS
1551 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1552 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1555 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1556 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1560 call_operand (op, mode)
1562 enum machine_mode mode;
1564 if (mode != VOIDmode && GET_MODE (op) != mode)
1567 return (GET_CODE (op) == SYMBOL_REF
1568 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1571 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1572 this file and the function is not weakly defined. */
1575 current_file_function_operand (op, mode)
1577 enum machine_mode mode ATTRIBUTE_UNUSED;
1579 return (GET_CODE (op) == SYMBOL_REF
1580 && (SYMBOL_REF_FLAG (op)
1581 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1582 && ! DECL_WEAK (current_function_decl))));
1585 /* Return 1 if this operand is a valid input for a move insn. */
1588 input_operand (op, mode)
1590 enum machine_mode mode;
1592 /* Memory is always valid. */
1593 if (memory_operand (op, mode))
1596 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1597 if (GET_CODE (op) == CONSTANT_P_RTX)
1600 /* For floating-point, easy constants are valid. */
1601 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1603 && easy_fp_constant (op, mode))
1606 /* Allow any integer constant. */
1607 if (GET_MODE_CLASS (mode) == MODE_INT
1608 && (GET_CODE (op) == CONST_INT
1609 || GET_CODE (op) == CONST_DOUBLE))
1612 /* For floating-point or multi-word mode, the only remaining valid type
1614 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1615 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1616 return register_operand (op, mode);
1618 /* The only cases left are integral modes one word or smaller (we
1619 do not get called for MODE_CC values). These can be in any
1621 if (register_operand (op, mode))
1624 /* A SYMBOL_REF referring to the TOC is valid. */
1625 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1628 /* A constant pool expression (relative to the TOC) is valid */
1629 if (TOC_RELATIVE_EXPR_P (op))
1632 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1634 if (DEFAULT_ABI == ABI_V4
1635 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1636 && small_data_operand (op, Pmode))
1642 /* Return 1 for an operand in small memory on V.4/eabi. */
1645 small_data_operand (op, mode)
1646 rtx op ATTRIBUTE_UNUSED;
1647 enum machine_mode mode ATTRIBUTE_UNUSED;
1652 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1655 if (DEFAULT_ABI != ABI_V4)
1658 if (GET_CODE (op) == SYMBOL_REF)
1661 else if (GET_CODE (op) != CONST
1662 || GET_CODE (XEXP (op, 0)) != PLUS
1663 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1664 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1669 rtx sum = XEXP (op, 0);
1670 HOST_WIDE_INT summand;
1672 /* We have to be careful here, because it is the referenced address
1673 that must be 32k from _SDA_BASE_, not just the symbol. */
1674 summand = INTVAL (XEXP (sum, 1));
1675 if (summand < 0 || summand > g_switch_value)
1678 sym_ref = XEXP (sum, 0);
1681 if (*XSTR (sym_ref, 0) != '@')
1692 constant_pool_expr_1 (op, have_sym, have_toc)
1697 switch (GET_CODE(op))
1700 if (CONSTANT_POOL_ADDRESS_P (op))
1702 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1710 else if (! strcmp (XSTR (op, 0), toc_label_name))
1719 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1720 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1722 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1731 constant_pool_expr_p (op)
1736 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1740 toc_relative_expr_p (op)
1745 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1748 /* Try machine-dependent ways of modifying an illegitimate address
1749 to be legitimate. If we find one, return the new, valid address.
1750 This is used from only one place: `memory_address' in explow.c.
1752 OLDX is the address as it was before break_out_memory_refs was
1753 called. In some cases it is useful to look at this to decide what
1756 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1758 It is always safe for this function to do nothing. It exists to
1759 recognize opportunities to optimize the output.
1761 On RS/6000, first check for the sum of a register with a constant
1762 integer that is out of range. If so, generate code to add the
1763 constant with the low-order 16 bits masked to the register and force
1764 this result into another register (this can be done with `cau').
1765 Then generate an address of REG+(CONST&0xffff), allowing for the
1766 possibility of bit 16 being a one.
1768 Then check for the sum of a register and something not constant, try to
1769 load the other things into a register and return the sum. */
1771 rs6000_legitimize_address (x, oldx, mode)
1773 rtx oldx ATTRIBUTE_UNUSED;
1774 enum machine_mode mode;
1776 if (GET_CODE (x) == PLUS
1777 && GET_CODE (XEXP (x, 0)) == REG
1778 && GET_CODE (XEXP (x, 1)) == CONST_INT
1779 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1781 HOST_WIDE_INT high_int, low_int;
1783 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1784 high_int = INTVAL (XEXP (x, 1)) - low_int;
1785 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1786 GEN_INT (high_int)), 0);
1787 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1789 else if (GET_CODE (x) == PLUS
1790 && GET_CODE (XEXP (x, 0)) == REG
1791 && GET_CODE (XEXP (x, 1)) != CONST_INT
1792 && GET_MODE_NUNITS (mode) == 1
1793 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1794 && (TARGET_POWERPC64 || mode != DImode)
1797 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1798 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1800 else if (ALTIVEC_VECTOR_MODE (mode))
1804 /* Make sure both operands are registers. */
1805 if (GET_CODE (x) == PLUS)
1806 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1807 force_reg (Pmode, XEXP (x, 1)));
1809 reg = force_reg (Pmode, x);
1812 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1813 && GET_CODE (x) != CONST_INT
1814 && GET_CODE (x) != CONST_DOUBLE
1816 && GET_MODE_NUNITS (mode) == 1
1817 && (GET_MODE_BITSIZE (mode) <= 32
1818 || (TARGET_HARD_FLOAT && mode == DFmode)))
1820 rtx reg = gen_reg_rtx (Pmode);
1821 emit_insn (gen_elf_high (reg, (x)));
1822 return gen_rtx_LO_SUM (Pmode, reg, (x));
1824 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1826 && GET_CODE (x) != CONST_INT
1827 && GET_CODE (x) != CONST_DOUBLE
1829 && (TARGET_HARD_FLOAT || mode != DFmode)
1833 rtx reg = gen_reg_rtx (Pmode);
1834 emit_insn (gen_macho_high (reg, (x)));
1835 return gen_rtx_LO_SUM (Pmode, reg, (x));
1838 && CONSTANT_POOL_EXPR_P (x)
1839 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1841 return create_TOC_reference (x);
1847 /* The convention appears to be to define this wherever it is used.
1848 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1849 is now used here. */
1850 #ifndef REG_MODE_OK_FOR_BASE_P
1851 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1854 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1855 replace the input X, or the original X if no replacement is called for.
1856 The output parameter *WIN is 1 if the calling macro should goto WIN,
1859 For RS/6000, we wish to handle large displacements off a base
1860 register by splitting the addend across an addiu/addis and the mem insn.
1861 This cuts number of extra insns needed from 3 to 1.
1863 On Darwin, we use this to generate code for floating point constants.
1864 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1865 The Darwin code is inside #if TARGET_MACHO because only then is
1866 machopic_function_base_name() defined. */
1868 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1870 enum machine_mode mode;
1873 int ind_levels ATTRIBUTE_UNUSED;
1876 /* We must recognize output that we have already generated ourselves. */
1877 if (GET_CODE (x) == PLUS
1878 && GET_CODE (XEXP (x, 0)) == PLUS
1879 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1880 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1881 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1883 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1884 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1885 opnum, (enum reload_type)type);
1891 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1892 && GET_CODE (x) == LO_SUM
1893 && GET_CODE (XEXP (x, 0)) == PLUS
1894 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1895 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1896 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1897 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1898 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1899 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1900 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1902 /* Result of previous invocation of this function on Darwin
1903 floating point constant. */
1904 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1905 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1906 opnum, (enum reload_type)type);
1911 if (GET_CODE (x) == PLUS
1912 && GET_CODE (XEXP (x, 0)) == REG
1913 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1914 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1915 && GET_CODE (XEXP (x, 1)) == CONST_INT
1916 && !ALTIVEC_VECTOR_MODE (mode))
1918 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1919 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1921 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1923 /* Check for 32-bit overflow. */
1924 if (high + low != val)
1930 /* Reload the high part into a base reg; leave the low part
1931 in the mem directly. */
1933 x = gen_rtx_PLUS (GET_MODE (x),
1934 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1938 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1939 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1940 opnum, (enum reload_type)type);
1945 if (GET_CODE (x) == SYMBOL_REF
1946 && DEFAULT_ABI == ABI_DARWIN
1947 && !ALTIVEC_VECTOR_MODE (mode)
1950 /* Darwin load of floating point constant. */
1951 rtx offset = gen_rtx (CONST, Pmode,
1952 gen_rtx (MINUS, Pmode, x,
1953 gen_rtx (SYMBOL_REF, Pmode,
1954 machopic_function_base_name ())));
1955 x = gen_rtx (LO_SUM, GET_MODE (x),
1956 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1957 gen_rtx (HIGH, Pmode, offset)), offset);
1958 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1959 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1960 opnum, (enum reload_type)type);
1966 && CONSTANT_POOL_EXPR_P (x)
1967 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1969 (x) = create_TOC_reference (x);
1977 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1978 that is a valid memory address for an instruction.
1979 The MODE argument is the machine mode for the MEM expression
1980 that wants to use this address.
1982 On the RS/6000, there are four valid address: a SYMBOL_REF that
1983 refers to a constant pool entry of an address (or the sum of it
1984 plus a constant), a short (16-bit signed) constant plus a register,
1985 the sum of two registers, or a register indirect, possibly with an
1986 auto-increment. For DFmode and DImode with an constant plus register,
1987 we must ensure that both words are addressable or PowerPC64 with offset
1990 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1991 32-bit DImode, TImode), indexed addressing cannot be used because
1992 adjacent memory cells are accessed by adding word-sized offsets
1993 during assembly output. */
1995 rs6000_legitimate_address (mode, x, reg_ok_strict)
1996 enum machine_mode mode;
2000 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2002 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2004 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2006 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2008 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2010 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2012 && GET_CODE (x) == PLUS
2013 && GET_CODE (XEXP (x, 0)) == REG
2014 && XEXP (x, 0) == virtual_stack_vars_rtx
2015 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2017 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2020 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2021 && (TARGET_POWERPC64 || mode != DImode)
2022 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2024 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2029 /* Try to output insns to set TARGET equal to the constant C if it can
2030 be done in less than N insns. Do all computations in MODE.
2031 Returns the place where the output has been placed if it can be
2032 done and the insns have been emitted. If it would take more than N
2033 insns, zero is returned and no insns and emitted. */
2036 rs6000_emit_set_const (dest, mode, source, n)
2038 enum machine_mode mode;
2039 int n ATTRIBUTE_UNUSED;
2041 HOST_WIDE_INT c0, c1;
2043 if (mode == QImode || mode == HImode || mode == SImode)
2046 dest = gen_reg_rtx (mode);
2047 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2051 if (GET_CODE (source) == CONST_INT)
2053 c0 = INTVAL (source);
2056 else if (GET_CODE (source) == CONST_DOUBLE)
2058 #if HOST_BITS_PER_WIDE_INT >= 64
2059 c0 = CONST_DOUBLE_LOW (source);
2062 c0 = CONST_DOUBLE_LOW (source);
2063 c1 = CONST_DOUBLE_HIGH (source);
2069 return rs6000_emit_set_long_const (dest, c0, c1);
2072 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2073 fall back to a straight forward decomposition. We do this to avoid
2074 exponential run times encountered when looking for longer sequences
2075 with rs6000_emit_set_const. */
2077 rs6000_emit_set_long_const (dest, c1, c2)
2079 HOST_WIDE_INT c1, c2;
2081 if (!TARGET_POWERPC64)
2083 rtx operand1, operand2;
2085 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2087 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2089 emit_move_insn (operand1, GEN_INT (c1));
2090 emit_move_insn (operand2, GEN_INT (c2));
2094 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2097 ud2 = (c1 & 0xffff0000) >> 16;
2098 #if HOST_BITS_PER_WIDE_INT >= 64
2102 ud4 = (c2 & 0xffff0000) >> 16;
2104 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2105 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2108 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2110 emit_move_insn (dest, GEN_INT (ud1));
2113 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2114 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2117 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2120 emit_move_insn (dest, GEN_INT (ud2 << 16));
2122 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2124 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2125 || (ud4 == 0 && ! (ud3 & 0x8000)))
2128 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2131 emit_move_insn (dest, GEN_INT (ud3 << 16));
2134 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2135 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2137 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2142 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2145 emit_move_insn (dest, GEN_INT (ud4 << 16));
2148 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2150 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2152 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2153 GEN_INT (ud2 << 16)));
2155 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2161 /* Emit a move from SOURCE to DEST in mode MODE. */
2163 rs6000_emit_move (dest, source, mode)
2166 enum machine_mode mode;
2170 operands[1] = source;
2172 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2173 if (GET_CODE (operands[1]) == CONST_DOUBLE
2174 && ! FLOAT_MODE_P (mode)
2175 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2177 /* FIXME. This should never happen. */
2178 /* Since it seems that it does, do the safe thing and convert
2180 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2182 if (GET_CODE (operands[1]) == CONST_DOUBLE
2183 && ! FLOAT_MODE_P (mode)
2184 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2185 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2186 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2187 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2190 /* Check if GCC is setting up a block move that will end up using FP
2191 registers as temporaries. We must make sure this is acceptable. */
2192 if (GET_CODE (operands[0]) == MEM
2193 && GET_CODE (operands[1]) == MEM
2195 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2196 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2197 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2198 ? 32 : MEM_ALIGN (operands[0])))
2199 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2201 : MEM_ALIGN (operands[1]))))
2202 && ! MEM_VOLATILE_P (operands [0])
2203 && ! MEM_VOLATILE_P (operands [1]))
2205 emit_move_insn (adjust_address (operands[0], SImode, 0),
2206 adjust_address (operands[1], SImode, 0));
2207 emit_move_insn (adjust_address (operands[0], SImode, 4),
2208 adjust_address (operands[1], SImode, 4));
2212 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2213 operands[1] = force_reg (mode, operands[1]);
2215 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2216 && GET_CODE (operands[0]) == MEM)
2220 if (reload_in_progress || reload_completed)
2221 regnum = true_regnum (operands[1]);
2222 else if (GET_CODE (operands[1]) == REG)
2223 regnum = REGNO (operands[1]);
2227 /* If operands[1] is a register, on POWER it may have
2228 double-precision data in it, so truncate it to single
2230 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2233 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2234 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2235 operands[1] = newreg;
2239 /* Handle the case where reload calls us with an invalid address;
2240 and the case of CONSTANT_P_RTX. */
2241 if (!VECTOR_MODE_P (mode)
2242 && (! general_operand (operands[1], mode)
2243 || ! nonimmediate_operand (operands[0], mode)
2244 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2246 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2250 /* FIXME: In the long term, this switch statement should go away
2251 and be replaced by a sequence of tests based on things like
2257 if (CONSTANT_P (operands[1])
2258 && GET_CODE (operands[1]) != CONST_INT)
2259 operands[1] = force_const_mem (mode, operands[1]);
2265 if (CONSTANT_P (operands[1])
2266 && ! easy_fp_constant (operands[1], mode))
2267 operands[1] = force_const_mem (mode, operands[1]);
2274 if (CONSTANT_P (operands[1])
2275 && !easy_vector_constant (operands[1]))
2276 operands[1] = force_const_mem (mode, operands[1]);
2281 /* Use default pattern for address of ELF small data */
2284 && DEFAULT_ABI == ABI_V4
2285 && (GET_CODE (operands[1]) == SYMBOL_REF
2286 || GET_CODE (operands[1]) == CONST)
2287 && small_data_operand (operands[1], mode))
2289 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2293 if (DEFAULT_ABI == ABI_V4
2294 && mode == Pmode && mode == SImode
2295 && flag_pic == 1 && got_operand (operands[1], mode))
2297 emit_insn (gen_movsi_got (operands[0], operands[1]));
2301 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2302 && TARGET_NO_TOC && ! flag_pic
2304 && CONSTANT_P (operands[1])
2305 && GET_CODE (operands[1]) != HIGH
2306 && GET_CODE (operands[1]) != CONST_INT)
2308 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2310 /* If this is a function address on -mcall-aixdesc,
2311 convert it to the address of the descriptor. */
2312 if (DEFAULT_ABI == ABI_AIX
2313 && GET_CODE (operands[1]) == SYMBOL_REF
2314 && XSTR (operands[1], 0)[0] == '.')
2316 const char *name = XSTR (operands[1], 0);
2318 while (*name == '.')
2320 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2321 CONSTANT_POOL_ADDRESS_P (new_ref)
2322 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2323 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2324 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2325 operands[1] = new_ref;
2328 if (DEFAULT_ABI == ABI_DARWIN)
2330 emit_insn (gen_macho_high (target, operands[1]));
2331 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2335 emit_insn (gen_elf_high (target, operands[1]));
2336 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2340 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2341 and we have put it in the TOC, we just need to make a TOC-relative
2344 && GET_CODE (operands[1]) == SYMBOL_REF
2345 && CONSTANT_POOL_EXPR_P (operands[1])
2346 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2347 get_pool_mode (operands[1])))
2349 operands[1] = create_TOC_reference (operands[1]);
2351 else if (mode == Pmode
2352 && CONSTANT_P (operands[1])
2353 && ((GET_CODE (operands[1]) != CONST_INT
2354 && ! easy_fp_constant (operands[1], mode))
2355 || (GET_CODE (operands[1]) == CONST_INT
2356 && num_insns_constant (operands[1], mode) > 2)
2357 || (GET_CODE (operands[0]) == REG
2358 && FP_REGNO_P (REGNO (operands[0]))))
2359 && GET_CODE (operands[1]) != HIGH
2360 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2361 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2363 /* Emit a USE operation so that the constant isn't deleted if
2364 expensive optimizations are turned on because nobody
2365 references it. This should only be done for operands that
2366 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2367 This should not be done for operands that contain LABEL_REFs.
2368 For now, we just handle the obvious case. */
2369 if (GET_CODE (operands[1]) != LABEL_REF)
2370 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2373 /* Darwin uses a special PIC legitimizer. */
2374 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2377 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2379 if (operands[0] != operands[1])
2380 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2385 /* If we are to limit the number of things we put in the TOC and
2386 this is a symbol plus a constant we can add in one insn,
2387 just put the symbol in the TOC and add the constant. Don't do
2388 this if reload is in progress. */
2389 if (GET_CODE (operands[1]) == CONST
2390 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2391 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2392 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2393 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2394 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2395 && ! side_effects_p (operands[0]))
2398 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2399 rtx other = XEXP (XEXP (operands[1], 0), 1);
2401 sym = force_reg (mode, sym);
2403 emit_insn (gen_addsi3 (operands[0], sym, other));
2405 emit_insn (gen_adddi3 (operands[0], sym, other));
2409 operands[1] = force_const_mem (mode, operands[1]);
2412 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2413 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2414 get_pool_constant (XEXP (operands[1], 0)),
2415 get_pool_mode (XEXP (operands[1], 0))))
2418 = gen_rtx_MEM (mode,
2419 create_TOC_reference (XEXP (operands[1], 0)));
2420 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2421 RTX_UNCHANGING_P (operands[1]) = 1;
2427 if (GET_CODE (operands[0]) == MEM
2428 && GET_CODE (XEXP (operands[0], 0)) != REG
2429 && ! reload_in_progress)
2431 = replace_equiv_address (operands[0],
2432 copy_addr_to_reg (XEXP (operands[0], 0)));
2434 if (GET_CODE (operands[1]) == MEM
2435 && GET_CODE (XEXP (operands[1], 0)) != REG
2436 && ! reload_in_progress)
2438 = replace_equiv_address (operands[1],
2439 copy_addr_to_reg (XEXP (operands[1], 0)));
2446 /* Above, we may have called force_const_mem which may have returned
2447 an invalid address. If we can, fix this up; otherwise, reload will
2448 have to deal with it. */
2449 if (GET_CODE (operands[1]) == MEM
2450 && ! memory_address_p (mode, XEXP (operands[1], 0))
2451 && ! reload_in_progress)
2452 operands[1] = adjust_address (operands[1], mode, 0);
2454 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2458 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2459 for a call to a function whose data type is FNTYPE.
2460 For a library call, FNTYPE is 0.
2462 For incoming args we set the number of arguments in the prototype large
2463 so we never return a PARALLEL. */
2466 init_cumulative_args (cum, fntype, libname, incoming)
2467 CUMULATIVE_ARGS *cum;
2469 rtx libname ATTRIBUTE_UNUSED;
2472 static CUMULATIVE_ARGS zero_cumulative;
2474 *cum = zero_cumulative;
2476 cum->fregno = FP_ARG_MIN_REG;
2477 cum->vregno = ALTIVEC_ARG_MIN_REG;
2478 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2479 cum->call_cookie = CALL_NORMAL;
2480 cum->sysv_gregno = GP_ARG_MIN_REG;
2483 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2485 else if (cum->prototype)
2486 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2487 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2488 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2491 cum->nargs_prototype = 0;
2493 cum->orig_nargs = cum->nargs_prototype;
2495 /* Check for longcall's */
2496 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2497 cum->call_cookie = CALL_LONG;
2499 if (TARGET_DEBUG_ARG)
2501 fprintf (stderr, "\ninit_cumulative_args:");
2504 tree ret_type = TREE_TYPE (fntype);
2505 fprintf (stderr, " ret code = %s,",
2506 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2509 if (cum->call_cookie & CALL_LONG)
2510 fprintf (stderr, " longcall,");
2512 fprintf (stderr, " proto = %d, nargs = %d\n",
2513 cum->prototype, cum->nargs_prototype);
2517 /* If defined, a C expression which determines whether, and in which
2518 direction, to pad out an argument with extra space. The value
2519 should be of type `enum direction': either `upward' to pad above
2520 the argument, `downward' to pad below, or `none' to inhibit
2523 For the AIX ABI structs are always stored left shifted in their
2527 function_arg_padding (mode, type)
2528 enum machine_mode mode;
2531 if (type != 0 && AGGREGATE_TYPE_P (type))
2534 /* This is the default definition. */
2535 return (! BYTES_BIG_ENDIAN
2538 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2539 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2540 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2541 ? downward : upward));
2544 /* If defined, a C expression that gives the alignment boundary, in bits,
2545 of an argument with the specified mode and type. If it is not defined,
2546 PARM_BOUNDARY is used for all arguments.
2548 V.4 wants long longs to be double word aligned. */
2551 function_arg_boundary (mode, type)
2552 enum machine_mode mode;
2553 tree type ATTRIBUTE_UNUSED;
2555 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2557 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2560 return PARM_BOUNDARY;
2563 /* Update the data in CUM to advance over an argument
2564 of mode MODE and data type TYPE.
2565 (TYPE is null for libcalls where that information may not be available.) */
2568 function_arg_advance (cum, mode, type, named)
2569 CUMULATIVE_ARGS *cum;
2570 enum machine_mode mode;
2574 cum->nargs_prototype--;
2576 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2578 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2581 cum->words += RS6000_ARG_SIZE (mode, type);
2583 else if (DEFAULT_ABI == ABI_V4)
2585 if (TARGET_HARD_FLOAT
2586 && (mode == SFmode || mode == DFmode))
2588 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2593 cum->words += cum->words & 1;
2594 cum->words += RS6000_ARG_SIZE (mode, type);
2600 int gregno = cum->sysv_gregno;
2602 /* Aggregates and IEEE quad get passed by reference. */
2603 if ((type && AGGREGATE_TYPE_P (type))
2607 n_words = RS6000_ARG_SIZE (mode, type);
2609 /* Long long is put in odd registers. */
2610 if (n_words == 2 && (gregno & 1) == 0)
2613 /* Long long is not split between registers and stack. */
2614 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2616 /* Long long is aligned on the stack. */
2618 cum->words += cum->words & 1;
2619 cum->words += n_words;
2622 /* Note: continuing to accumulate gregno past when we've started
2623 spilling to the stack indicates the fact that we've started
2624 spilling to the stack to expand_builtin_saveregs. */
2625 cum->sysv_gregno = gregno + n_words;
2628 if (TARGET_DEBUG_ARG)
2630 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2631 cum->words, cum->fregno);
2632 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2633 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2634 fprintf (stderr, "mode = %4s, named = %d\n",
2635 GET_MODE_NAME (mode), named);
2640 int align = (TARGET_32BIT && (cum->words & 1) != 0
2641 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2643 cum->words += align + RS6000_ARG_SIZE (mode, type);
2645 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2648 if (TARGET_DEBUG_ARG)
2650 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2651 cum->words, cum->fregno);
2652 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2653 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2654 fprintf (stderr, "named = %d, align = %d\n", named, align);
2659 /* Determine where to put an argument to a function.
2660 Value is zero to push the argument on the stack,
2661 or a hard register in which to store the argument.
2663 MODE is the argument's machine mode.
2664 TYPE is the data type of the argument (as a tree).
2665 This is null for libcalls where that information may
2667 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2668 the preceding args and about the function being called.
2669 NAMED is nonzero if this argument is a named parameter
2670 (otherwise it is an extra parameter matching an ellipsis).
2672 On RS/6000 the first eight words of non-FP are normally in registers
2673 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2674 Under V.4, the first 8 FP args are in registers.
2676 If this is floating-point and no prototype is specified, we use
2677 both an FP and integer register (or possibly FP reg and stack). Library
2678 functions (when TYPE is zero) always have the proper types for args,
2679 so we can pass the FP value just in one register. emit_library_function
2680 doesn't support PARALLEL anyway. */
2683 function_arg (cum, mode, type, named)
2684 CUMULATIVE_ARGS *cum;
2685 enum machine_mode mode;
2689 enum rs6000_abi abi = DEFAULT_ABI;
2691 /* Return a marker to indicate whether CR1 needs to set or clear the
2692 bit that V.4 uses to say fp args were passed in registers.
2693 Assume that we don't need the marker for software floating point,
2694 or compiler generated library calls. */
2695 if (mode == VOIDmode)
2698 && TARGET_HARD_FLOAT
2699 && cum->nargs_prototype < 0
2700 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2702 return GEN_INT (cum->call_cookie
2703 | ((cum->fregno == FP_ARG_MIN_REG)
2704 ? CALL_V4_SET_FP_ARGS
2705 : CALL_V4_CLEAR_FP_ARGS));
2708 return GEN_INT (cum->call_cookie);
2711 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2713 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2714 return gen_rtx_REG (mode, cum->vregno);
2718 else if (abi == ABI_V4)
2720 if (TARGET_HARD_FLOAT
2721 && (mode == SFmode || mode == DFmode))
2723 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2724 return gen_rtx_REG (mode, cum->fregno);
2731 int gregno = cum->sysv_gregno;
2733 /* Aggregates and IEEE quad get passed by reference. */
2734 if ((type && AGGREGATE_TYPE_P (type))
2738 n_words = RS6000_ARG_SIZE (mode, type);
2740 /* Long long is put in odd registers. */
2741 if (n_words == 2 && (gregno & 1) == 0)
2744 /* Long long is not split between registers and stack. */
2745 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2746 return gen_rtx_REG (mode, gregno);
2753 int align = (TARGET_32BIT && (cum->words & 1) != 0
2754 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2755 int align_words = cum->words + align;
2757 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2760 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2763 || ((cum->nargs_prototype > 0)
2764 /* IBM AIX extended its linkage convention definition always
2765 to require FP args after register save area hole on the
2767 && (DEFAULT_ABI != ABI_AIX
2769 || (align_words < GP_ARG_NUM_REG))))
2770 return gen_rtx_REG (mode, cum->fregno);
2772 return gen_rtx_PARALLEL (mode,
2774 gen_rtx_EXPR_LIST (VOIDmode,
2775 ((align_words >= GP_ARG_NUM_REG)
2778 + RS6000_ARG_SIZE (mode, type)
2780 /* If this is partially on the stack, then
2781 we only include the portion actually
2782 in registers here. */
2783 ? gen_rtx_REG (SImode,
2784 GP_ARG_MIN_REG + align_words)
2785 : gen_rtx_REG (mode,
2786 GP_ARG_MIN_REG + align_words))),
2788 gen_rtx_EXPR_LIST (VOIDmode,
2789 gen_rtx_REG (mode, cum->fregno),
2792 else if (align_words < GP_ARG_NUM_REG)
2793 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2799 /* For an arg passed partly in registers and partly in memory,
2800 this is the number of registers used.
2801 For args passed entirely in registers or entirely in memory, zero. */
2804 function_arg_partial_nregs (cum, mode, type, named)
2805 CUMULATIVE_ARGS *cum;
2806 enum machine_mode mode;
2808 int named ATTRIBUTE_UNUSED;
2810 if (DEFAULT_ABI == ABI_V4)
2813 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2814 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2816 if (cum->nargs_prototype >= 0)
2820 if (cum->words < GP_ARG_NUM_REG
2821 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2823 int ret = GP_ARG_NUM_REG - cum->words;
2824 if (ret && TARGET_DEBUG_ARG)
2825 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2833 /* A C expression that indicates when an argument must be passed by
2834 reference. If nonzero for an argument, a copy of that argument is
2835 made in memory and a pointer to the argument is passed instead of
2836 the argument itself. The pointer is passed in whatever way is
2837 appropriate for passing a pointer to that type.
2839 Under V.4, structures and unions are passed by reference. */
2842 function_arg_pass_by_reference (cum, mode, type, named)
2843 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2844 enum machine_mode mode ATTRIBUTE_UNUSED;
2846 int named ATTRIBUTE_UNUSED;
2848 if (DEFAULT_ABI == ABI_V4
2849 && ((type && AGGREGATE_TYPE_P (type))
2852 if (TARGET_DEBUG_ARG)
2853 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2861 /* Perform any needed actions needed for a function that is receiving a
2862 variable number of arguments.
2866 MODE and TYPE are the mode and type of the current parameter.
2868 PRETEND_SIZE is a variable that should be set to the amount of stack
2869 that must be pushed by the prolog to pretend that our caller pushed
2872 Normally, this macro will push all remaining incoming registers on the
2873 stack and set PRETEND_SIZE to the length of the registers pushed. */
2876 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2877 CUMULATIVE_ARGS *cum;
2878 enum machine_mode mode;
2884 CUMULATIVE_ARGS next_cum;
2885 int reg_size = TARGET_32BIT ? 4 : 8;
2886 rtx save_area = NULL_RTX, mem;
2887 int first_reg_offset, set;
2891 fntype = TREE_TYPE (current_function_decl);
2892 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2893 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2894 != void_type_node));
2896 /* For varargs, we do not want to skip the dummy va_dcl argument.
2897 For stdargs, we do want to skip the last named argument. */
2900 function_arg_advance (&next_cum, mode, type, 1);
2902 if (DEFAULT_ABI == ABI_V4)
2904 /* Indicate to allocate space on the stack for varargs save area. */
2905 cfun->machine->sysv_varargs_p = 1;
2907 save_area = plus_constant (virtual_stack_vars_rtx,
2908 - RS6000_VARARGS_SIZE);
2910 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2914 first_reg_offset = next_cum.words;
2915 save_area = virtual_incoming_args_rtx;
2916 cfun->machine->sysv_varargs_p = 0;
2918 if (MUST_PASS_IN_STACK (mode, type))
2919 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2922 set = get_varargs_alias_set ();
2923 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2925 mem = gen_rtx_MEM (BLKmode,
2926 plus_constant (save_area,
2927 first_reg_offset * reg_size)),
2928 set_mem_alias_set (mem, set);
2929 set_mem_align (mem, BITS_PER_WORD);
2932 (GP_ARG_MIN_REG + first_reg_offset, mem,
2933 GP_ARG_NUM_REG - first_reg_offset,
2934 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2936 /* ??? Does ABI_V4 need this at all? */
2937 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2940 /* Save FP registers if needed. */
2941 if (DEFAULT_ABI == ABI_V4
2942 && TARGET_HARD_FLOAT && ! no_rtl
2943 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2945 int fregno = next_cum.fregno;
2946 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2947 rtx lab = gen_label_rtx ();
2948 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2950 emit_jump_insn (gen_rtx_SET (VOIDmode,
2952 gen_rtx_IF_THEN_ELSE (VOIDmode,
2953 gen_rtx_NE (VOIDmode, cr1,
2955 gen_rtx_LABEL_REF (VOIDmode, lab),
2958 while (fregno <= FP_ARG_V4_MAX_REG)
2960 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2961 set_mem_alias_set (mem, set);
2962 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2971 /* Create the va_list data type. */
2974 rs6000_build_va_list ()
2976 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2978 /* For AIX, prefer 'char *' because that's what the system
2979 header files like. */
2980 if (DEFAULT_ABI != ABI_V4)
2981 return build_pointer_type (char_type_node);
2983 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
2984 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2986 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2987 unsigned_char_type_node);
2988 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2989 unsigned_char_type_node);
2990 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2992 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2995 DECL_FIELD_CONTEXT (f_gpr) = record;
2996 DECL_FIELD_CONTEXT (f_fpr) = record;
2997 DECL_FIELD_CONTEXT (f_ovf) = record;
2998 DECL_FIELD_CONTEXT (f_sav) = record;
3000 TREE_CHAIN (record) = type_decl;
3001 TYPE_NAME (record) = type_decl;
3002 TYPE_FIELDS (record) = f_gpr;
3003 TREE_CHAIN (f_gpr) = f_fpr;
3004 TREE_CHAIN (f_fpr) = f_ovf;
3005 TREE_CHAIN (f_ovf) = f_sav;
3007 layout_type (record);
3009 /* The correct type is an array type of one element. */
3010 return build_array_type (record, build_index_type (size_zero_node));
3013 /* Implement va_start. */
3016 rs6000_va_start (stdarg_p, valist, nextarg)
3021 HOST_WIDE_INT words, n_gpr, n_fpr;
3022 tree f_gpr, f_fpr, f_ovf, f_sav;
3023 tree gpr, fpr, ovf, sav, t;
3025 /* Only SVR4 needs something special. */
3026 if (DEFAULT_ABI != ABI_V4)
3028 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3032 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3033 f_fpr = TREE_CHAIN (f_gpr);
3034 f_ovf = TREE_CHAIN (f_fpr);
3035 f_sav = TREE_CHAIN (f_ovf);
3037 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3038 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3039 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3040 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3041 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3043 /* Count number of gp and fp argument registers used. */
3044 words = current_function_args_info.words;
3045 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3046 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3048 if (TARGET_DEBUG_ARG)
3050 fputs ("va_start: words = ", stderr);
3051 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3052 fputs (", n_gpr = ", stderr);
3053 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3054 fputs (", n_fpr = ", stderr);
3055 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3056 putc ('\n', stderr);
3059 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3060 TREE_SIDE_EFFECTS (t) = 1;
3061 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3063 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3064 TREE_SIDE_EFFECTS (t) = 1;
3065 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3067 /* Find the overflow area. */
3068 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3070 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3071 build_int_2 (words * UNITS_PER_WORD, 0));
3072 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3073 TREE_SIDE_EFFECTS (t) = 1;
3074 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3076 /* Find the register save area. */
3077 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3078 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3079 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3080 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3081 TREE_SIDE_EFFECTS (t) = 1;
3082 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3085 /* Implement va_arg. */
3088 rs6000_va_arg (valist, type)
3091 tree f_gpr, f_fpr, f_ovf, f_sav;
3092 tree gpr, fpr, ovf, sav, reg, t, u;
3093 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3094 rtx lab_false, lab_over, addr_rtx, r;
3096 if (DEFAULT_ABI != ABI_V4)
3097 return std_expand_builtin_va_arg (valist, type);
3099 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3100 f_fpr = TREE_CHAIN (f_gpr);
3101 f_ovf = TREE_CHAIN (f_fpr);
3102 f_sav = TREE_CHAIN (f_ovf);
3104 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3105 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3106 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3107 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3108 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3110 size = int_size_in_bytes (type);
3111 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3113 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3115 /* Aggregates and long doubles are passed by reference. */
3121 size = UNITS_PER_WORD;
3124 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3126 /* FP args go in FP registers, if present. */
3135 /* Otherwise into GP registers. */
3143 /* Pull the value out of the saved registers ... */
3145 lab_false = gen_label_rtx ();
3146 lab_over = gen_label_rtx ();
3147 addr_rtx = gen_reg_rtx (Pmode);
3149 /* Vectors never go in registers. */
3150 if (TREE_CODE (type) != VECTOR_TYPE)
3152 TREE_THIS_VOLATILE (reg) = 1;
3153 emit_cmp_and_jump_insns
3154 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3155 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3158 /* Long long is aligned in the registers. */
3161 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3162 build_int_2 (n_reg - 1, 0));
3163 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3164 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3165 TREE_SIDE_EFFECTS (u) = 1;
3166 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3170 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3174 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3175 build_int_2 (n_reg, 0));
3176 TREE_SIDE_EFFECTS (u) = 1;
3178 u = build1 (CONVERT_EXPR, integer_type_node, u);
3179 TREE_SIDE_EFFECTS (u) = 1;
3181 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3182 TREE_SIDE_EFFECTS (u) = 1;
3184 t = build (PLUS_EXPR, ptr_type_node, t, u);
3185 TREE_SIDE_EFFECTS (t) = 1;
3187 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3189 emit_move_insn (addr_rtx, r);
3191 emit_jump_insn (gen_jump (lab_over));
3195 emit_label (lab_false);
3197 /* ... otherwise out of the overflow area. */
3199 /* Make sure we don't find reg 7 for the next int arg.
3201 All AltiVec vectors go in the overflow area. So in the AltiVec
3202 case we need to get the vectors from the overflow area, but
3203 remember where the GPRs and FPRs are. */
3204 if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3206 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3207 TREE_SIDE_EFFECTS (t) = 1;
3208 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3211 /* Care for on-stack alignment if needed. */
3218 /* Vectors are 16 byte aligned. */
3219 if (TREE_CODE (type) == VECTOR_TYPE)
3224 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3225 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3229 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3231 emit_move_insn (addr_rtx, r);
3233 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3234 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3235 TREE_SIDE_EFFECTS (t) = 1;
3236 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3238 emit_label (lab_over);
3242 r = gen_rtx_MEM (Pmode, addr_rtx);
3243 set_mem_alias_set (r, get_varargs_alias_set ());
3244 emit_move_insn (addr_rtx, r);
3252 #define def_builtin(MASK, NAME, TYPE, CODE) \
3254 if ((MASK) & target_flags) \
3255 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3258 struct builtin_description
3260 const unsigned int mask;
3261 const enum insn_code icode;
3262 const char *const name;
3263 const enum rs6000_builtins code;
3266 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3268 static const struct builtin_description bdesc_3arg[] =
3270 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3271 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3272 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3273 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3274 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3275 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3276 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3277 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3278 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3279 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3280 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3281 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3282 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3283 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3284 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3285 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3286 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3287 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3288 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3289 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3290 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3291 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3292 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3295 /* DST operations: void foo (void *, const int, const char). */
3297 static const struct builtin_description bdesc_dst[] =
3299 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3300 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3301 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3302 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3305 /* Simple binary operations: VECc = foo (VECa, VECb). */
3307 static const struct builtin_description bdesc_2arg[] =
3309 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3310 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3311 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3312 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3313 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3314 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3315 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3316 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3317 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3318 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3319 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3320 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3321 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3322 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3323 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3324 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3325 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3326 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3327 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3328 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3329 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3330 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3331 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3332 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3333 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3334 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3335 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3336 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3337 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3338 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3339 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3340 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3341 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3342 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3343 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3344 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3345 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3346 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3347 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3348 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3349 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3350 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3351 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3358 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3359 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3360 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3361 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3362 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3363 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3364 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3373 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3374 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3377 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3378 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3379 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3380 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3381 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3382 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3383 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3384 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3385 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3386 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3387 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3388 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3389 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3390 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3391 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3392 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3393 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3395 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3404 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3405 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3406 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3407 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3408 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3421 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3424 /* AltiVec predicates. */
3426 struct builtin_description_predicates
3428 const unsigned int mask;
3429 const enum insn_code icode;
3431 const char *const name;
3432 const enum rs6000_builtins code;
3435 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3437 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3442 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3443 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3444 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3445 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3446 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3447 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3448 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3449 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3452 /* ABS* opreations. */
3454 static const struct builtin_description bdesc_abs[] =
3456 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3457 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3458 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3459 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3461 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3465 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3468 static const struct builtin_description bdesc_1arg[] =
3470 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3471 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3472 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3473 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3474 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3475 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3476 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3477 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3478 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3479 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3480 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3481 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3482 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3483 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3484 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3485 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3486 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3490 altivec_expand_unop_builtin (icode, arglist, target)
3491 enum insn_code icode;
3496 tree arg0 = TREE_VALUE (arglist);
3497 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3498 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3499 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3501 /* If we got invalid arguments bail out before generating bad rtl. */
3502 if (arg0 == error_mark_node)
3506 || GET_MODE (target) != tmode
3507 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3508 target = gen_reg_rtx (tmode);
3510 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3511 op0 = copy_to_mode_reg (mode0, op0);
3513 pat = GEN_FCN (icode) (target, op0);
3522 altivec_expand_abs_builtin (icode, arglist, target)
3523 enum insn_code icode;
3527 rtx pat, scratch1, scratch2;
3528 tree arg0 = TREE_VALUE (arglist);
3529 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3530 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3531 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3533 /* If we have invalid arguments, bail out before generating bad rtl. */
3534 if (arg0 == error_mark_node)
3538 || GET_MODE (target) != tmode
3539 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3540 target = gen_reg_rtx (tmode);
3542 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3543 op0 = copy_to_mode_reg (mode0, op0);
3545 scratch1 = gen_reg_rtx (mode0);
3546 scratch2 = gen_reg_rtx (mode0);
3548 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3557 altivec_expand_binop_builtin (icode, arglist, target)
3558 enum insn_code icode;
3563 tree arg0 = TREE_VALUE (arglist);
3564 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3565 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3566 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3567 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3568 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3569 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3571 /* If we got invalid arguments bail out before generating bad rtl. */
3572 if (arg0 == error_mark_node || arg1 == error_mark_node)
3576 || GET_MODE (target) != tmode
3577 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3578 target = gen_reg_rtx (tmode);
3580 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3581 op0 = copy_to_mode_reg (mode0, op0);
3582 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3583 op1 = copy_to_mode_reg (mode1, op1);
3585 pat = GEN_FCN (icode) (target, op0, op1);
3594 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3595 enum insn_code icode;
3601 tree cr6_form = TREE_VALUE (arglist);
3602 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3603 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3604 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3605 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3606 enum machine_mode tmode = SImode;
3607 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3608 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3611 if (TREE_CODE (cr6_form) != INTEGER_CST)
3613 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3617 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3622 /* If we have invalid arguments, bail out before generating bad rtl. */
3623 if (arg0 == error_mark_node || arg1 == error_mark_node)
3627 || GET_MODE (target) != tmode
3628 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3629 target = gen_reg_rtx (tmode);
3631 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3632 op0 = copy_to_mode_reg (mode0, op0);
3633 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3634 op1 = copy_to_mode_reg (mode1, op1);
3636 scratch = gen_reg_rtx (mode0);
3638 pat = GEN_FCN (icode) (scratch, op0, op1,
3639 gen_rtx (SYMBOL_REF, Pmode, opcode));
3644 /* The vec_any* and vec_all* predicates use the same opcodes for two
3645 different operations, but the bits in CR6 will be different
3646 depending on what information we want. So we have to play tricks
3647 with CR6 to get the right bits out.
3649 If you think this is disgusting, look at the specs for the
3650 AltiVec predicates. */
3652 switch (cr6_form_int)
3655 emit_insn (gen_cr6_test_for_zero (target));
3658 emit_insn (gen_cr6_test_for_zero_reverse (target));
3661 emit_insn (gen_cr6_test_for_lt (target));
3664 emit_insn (gen_cr6_test_for_lt_reverse (target));
3667 error ("argument 1 of __builtin_altivec_predicate is out of range");
3675 altivec_expand_stv_builtin (icode, arglist)
3676 enum insn_code icode;
3679 tree arg0 = TREE_VALUE (arglist);
3680 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3681 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3682 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3683 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3684 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3686 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3687 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3688 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3690 /* Invalid arguments. Bail before doing anything stoopid! */
3691 if (arg0 == error_mark_node
3692 || arg1 == error_mark_node
3693 || arg2 == error_mark_node)
3696 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3697 op0 = copy_to_mode_reg (mode2, op0);
3698 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3699 op1 = copy_to_mode_reg (mode0, op1);
3700 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3701 op2 = copy_to_mode_reg (mode1, op2);
3703 pat = GEN_FCN (icode) (op1, op2, op0);
3710 altivec_expand_ternop_builtin (icode, arglist, target)
3711 enum insn_code icode;
3716 tree arg0 = TREE_VALUE (arglist);
3717 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3718 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3719 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3720 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3721 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3722 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3723 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3724 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3725 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3727 /* If we got invalid arguments bail out before generating bad rtl. */
3728 if (arg0 == error_mark_node
3729 || arg1 == error_mark_node
3730 || arg2 == error_mark_node)
3734 || GET_MODE (target) != tmode
3735 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3736 target = gen_reg_rtx (tmode);
3738 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3739 op0 = copy_to_mode_reg (mode0, op0);
3740 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3741 op1 = copy_to_mode_reg (mode1, op1);
3742 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3743 op2 = copy_to_mode_reg (mode2, op2);
3745 pat = GEN_FCN (icode) (target, op0, op1, op2);
3753 altivec_expand_builtin (exp, target)
3757 struct builtin_description *d;
3758 struct builtin_description_predicates *dp;
3760 enum insn_code icode;
3761 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3762 tree arglist = TREE_OPERAND (exp, 1);
3763 tree arg0, arg1, arg2;
3764 rtx op0, op1, op2, pat;
3765 enum machine_mode tmode, mode0, mode1, mode2;
3766 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3770 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3771 icode = CODE_FOR_altivec_lvx_16qi;
3772 arg0 = TREE_VALUE (arglist);
3773 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3774 tmode = insn_data[icode].operand[0].mode;
3775 mode0 = insn_data[icode].operand[1].mode;
3778 || GET_MODE (target) != tmode
3779 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3780 target = gen_reg_rtx (tmode);
3782 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3783 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3785 pat = GEN_FCN (icode) (target, op0);
3791 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3792 icode = CODE_FOR_altivec_lvx_8hi;
3793 arg0 = TREE_VALUE (arglist);
3794 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3795 tmode = insn_data[icode].operand[0].mode;
3796 mode0 = insn_data[icode].operand[1].mode;
3799 || GET_MODE (target) != tmode
3800 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3801 target = gen_reg_rtx (tmode);
3803 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3804 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3806 pat = GEN_FCN (icode) (target, op0);
3812 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3813 icode = CODE_FOR_altivec_lvx_4si;
3814 arg0 = TREE_VALUE (arglist);
3815 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3816 tmode = insn_data[icode].operand[0].mode;
3817 mode0 = insn_data[icode].operand[1].mode;
3820 || GET_MODE (target) != tmode
3821 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3822 target = gen_reg_rtx (tmode);
3824 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3825 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3827 pat = GEN_FCN (icode) (target, op0);
3833 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3834 icode = CODE_FOR_altivec_lvx_4sf;
3835 arg0 = TREE_VALUE (arglist);
3836 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3837 tmode = insn_data[icode].operand[0].mode;
3838 mode0 = insn_data[icode].operand[1].mode;
3841 || GET_MODE (target) != tmode
3842 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3843 target = gen_reg_rtx (tmode);
3845 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3846 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3848 pat = GEN_FCN (icode) (target, op0);
3854 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3855 icode = CODE_FOR_altivec_stvx_16qi;
3856 arg0 = TREE_VALUE (arglist);
3857 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3858 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3859 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3860 mode0 = insn_data[icode].operand[0].mode;
3861 mode1 = insn_data[icode].operand[1].mode;
3863 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3864 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3865 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3866 op1 = copy_to_mode_reg (mode1, op1);
3868 pat = GEN_FCN (icode) (op0, op1);
3873 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3874 icode = CODE_FOR_altivec_stvx_8hi;
3875 arg0 = TREE_VALUE (arglist);
3876 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3877 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3878 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3879 mode0 = insn_data[icode].operand[0].mode;
3880 mode1 = insn_data[icode].operand[1].mode;
3882 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3883 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3884 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3885 op1 = copy_to_mode_reg (mode1, op1);
3887 pat = GEN_FCN (icode) (op0, op1);
3892 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3893 icode = CODE_FOR_altivec_stvx_4si;
3894 arg0 = TREE_VALUE (arglist);
3895 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3896 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3897 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3898 mode0 = insn_data[icode].operand[0].mode;
3899 mode1 = insn_data[icode].operand[1].mode;
3901 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3902 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3903 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3904 op1 = copy_to_mode_reg (mode1, op1);
3906 pat = GEN_FCN (icode) (op0, op1);
3911 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3912 icode = CODE_FOR_altivec_stvx_4sf;
3913 arg0 = TREE_VALUE (arglist);
3914 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3915 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3916 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3917 mode0 = insn_data[icode].operand[0].mode;
3918 mode1 = insn_data[icode].operand[1].mode;
3920 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3921 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3922 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3923 op1 = copy_to_mode_reg (mode1, op1);
3925 pat = GEN_FCN (icode) (op0, op1);
3930 case ALTIVEC_BUILTIN_STVX:
3931 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3932 case ALTIVEC_BUILTIN_STVEBX:
3933 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3934 case ALTIVEC_BUILTIN_STVEHX:
3935 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3936 case ALTIVEC_BUILTIN_STVEWX:
3937 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3938 case ALTIVEC_BUILTIN_STVXL:
3939 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3941 case ALTIVEC_BUILTIN_MFVSCR:
3942 icode = CODE_FOR_altivec_mfvscr;
3943 tmode = insn_data[icode].operand[0].mode;
3946 || GET_MODE (target) != tmode
3947 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3948 target = gen_reg_rtx (tmode);
3950 pat = GEN_FCN (icode) (target);
3956 case ALTIVEC_BUILTIN_MTVSCR:
3957 icode = CODE_FOR_altivec_mtvscr;
3958 arg0 = TREE_VALUE (arglist);
3959 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3960 mode0 = insn_data[icode].operand[0].mode;
3962 /* If we got invalid arguments bail out before generating bad rtl. */
3963 if (arg0 == error_mark_node)
3966 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3967 op0 = copy_to_mode_reg (mode0, op0);
3969 pat = GEN_FCN (icode) (op0);
3974 case ALTIVEC_BUILTIN_DSSALL:
3975 emit_insn (gen_altivec_dssall ());
3978 case ALTIVEC_BUILTIN_DSS:
3979 icode = CODE_FOR_altivec_dss;
3980 arg0 = TREE_VALUE (arglist);
3981 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3982 mode0 = insn_data[icode].operand[0].mode;
3984 /* If we got invalid arguments bail out before generating bad rtl. */
3985 if (arg0 == error_mark_node)
3988 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3989 op0 = copy_to_mode_reg (mode0, op0);
3991 emit_insn (gen_altivec_dss (op0));
3995 /* Handle DST variants. */
3996 d = (struct builtin_description *) bdesc_dst;
3997 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
3998 if (d->code == fcode)
4000 arg0 = TREE_VALUE (arglist);
4001 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4002 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4003 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4004 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4005 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4006 mode0 = insn_data[d->icode].operand[0].mode;
4007 mode1 = insn_data[d->icode].operand[1].mode;
4008 mode2 = insn_data[d->icode].operand[2].mode;
4010 /* Invalid arguments, bail out before generating bad rtl. */
4011 if (arg0 == error_mark_node
4012 || arg1 == error_mark_node
4013 || arg2 == error_mark_node)
4016 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4017 op0 = copy_to_mode_reg (mode0, op0);
4018 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4019 op1 = copy_to_mode_reg (mode1, op1);
4021 if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4023 error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4027 pat = GEN_FCN (d->icode) (op0, op1, op2);
4034 /* Expand abs* operations. */
4035 d = (struct builtin_description *) bdesc_abs;
4036 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4037 if (d->code == fcode)
4038 return altivec_expand_abs_builtin (d->icode, arglist, target);
4040 /* Handle simple unary operations. */
4041 d = (struct builtin_description *) bdesc_1arg;
4042 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4043 if (d->code == fcode)
4044 return altivec_expand_unop_builtin (d->icode, arglist, target);
4046 /* Handle simple binary operations. */
4047 d = (struct builtin_description *) bdesc_2arg;
4048 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4049 if (d->code == fcode)
4050 return altivec_expand_binop_builtin (d->icode, arglist, target);
4052 /* Expand the AltiVec predicates. */
4053 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4054 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4055 if (dp->code == fcode)
4056 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4058 /* LV* are funky. We initialized them differently. */
4061 case ALTIVEC_BUILTIN_LVSL:
4062 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4064 case ALTIVEC_BUILTIN_LVSR:
4065 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4067 case ALTIVEC_BUILTIN_LVEBX:
4068 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4070 case ALTIVEC_BUILTIN_LVEHX:
4071 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4073 case ALTIVEC_BUILTIN_LVEWX:
4074 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4076 case ALTIVEC_BUILTIN_LVXL:
4077 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4079 case ALTIVEC_BUILTIN_LVX:
4080 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4087 /* Handle simple ternary operations. */
4088 d = (struct builtin_description *) bdesc_3arg;
4089 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4090 if (d->code == fcode)
4091 return altivec_expand_ternop_builtin (d->icode, arglist, target);
4097 /* Expand an expression EXP that calls a built-in function,
4098 with result going to TARGET if that's convenient
4099 (and in mode MODE if that's convenient).
4100 SUBTARGET may be used as the target for computing one of EXP's operands.
4101 IGNORE is nonzero if the value is to be ignored. */
4104 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4107 rtx subtarget ATTRIBUTE_UNUSED;
4108 enum machine_mode mode ATTRIBUTE_UNUSED;
4109 int ignore ATTRIBUTE_UNUSED;
4112 return altivec_expand_builtin (exp, target);
4118 rs6000_init_builtins ()
4121 altivec_init_builtins ();
4125 altivec_init_builtins (void)
4127 struct builtin_description *d;
4128 struct builtin_description_predicates *dp;
4131 tree endlink = void_list_node;
4133 tree pint_type_node = build_pointer_type (integer_type_node);
4134 tree pvoid_type_node = build_pointer_type (void_type_node);
4135 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4136 tree pchar_type_node = build_pointer_type (char_type_node);
4137 tree pfloat_type_node = build_pointer_type (float_type_node);
4139 tree v4sf_ftype_v4sf_v4sf_v16qi
4140 = build_function_type (V4SF_type_node,
4141 tree_cons (NULL_TREE, V4SF_type_node,
4142 tree_cons (NULL_TREE, V4SF_type_node,
4143 tree_cons (NULL_TREE,
4146 tree v4si_ftype_v4si_v4si_v16qi
4147 = build_function_type (V4SI_type_node,
4148 tree_cons (NULL_TREE, V4SI_type_node,
4149 tree_cons (NULL_TREE, V4SI_type_node,
4150 tree_cons (NULL_TREE,
4153 tree v8hi_ftype_v8hi_v8hi_v16qi
4154 = build_function_type (V8HI_type_node,
4155 tree_cons (NULL_TREE, V8HI_type_node,
4156 tree_cons (NULL_TREE, V8HI_type_node,
4157 tree_cons (NULL_TREE,
4160 tree v16qi_ftype_v16qi_v16qi_v16qi
4161 = build_function_type (V16QI_type_node,
4162 tree_cons (NULL_TREE, V16QI_type_node,
4163 tree_cons (NULL_TREE, V16QI_type_node,
4164 tree_cons (NULL_TREE,
4168 /* V4SI foo (char). */
4169 tree v4si_ftype_char
4170 = build_function_type (V4SI_type_node,
4171 tree_cons (NULL_TREE, char_type_node, endlink));
4173 /* V8HI foo (char). */
4174 tree v8hi_ftype_char
4175 = build_function_type (V8HI_type_node,
4176 tree_cons (NULL_TREE, char_type_node, endlink));
4178 /* V16QI foo (char). */
4179 tree v16qi_ftype_char
4180 = build_function_type (V16QI_type_node,
4181 tree_cons (NULL_TREE, char_type_node, endlink));
4182 /* V4SF foo (V4SF). */
4183 tree v4sf_ftype_v4sf
4184 = build_function_type (V4SF_type_node,
4185 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4187 /* V4SI foo (int *). */
4188 tree v4si_ftype_pint
4189 = build_function_type (V4SI_type_node,
4190 tree_cons (NULL_TREE, pint_type_node, endlink));
4191 /* V8HI foo (short *). */
4192 tree v8hi_ftype_pshort
4193 = build_function_type (V8HI_type_node,
4194 tree_cons (NULL_TREE, pshort_type_node, endlink));
4195 /* V16QI foo (char *). */
4196 tree v16qi_ftype_pchar
4197 = build_function_type (V16QI_type_node,
4198 tree_cons (NULL_TREE, pchar_type_node, endlink));
4199 /* V4SF foo (float *). */
4200 tree v4sf_ftype_pfloat
4201 = build_function_type (V4SF_type_node,
4202 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4204 /* V8HI foo (V16QI). */
4205 tree v8hi_ftype_v16qi
4206 = build_function_type (V8HI_type_node,
4207 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4209 /* void foo (void *, int, char/literal). */
4210 tree void_ftype_pvoid_int_char
4211 = build_function_type (void_type_node,
4212 tree_cons (NULL_TREE, pvoid_type_node,
4213 tree_cons (NULL_TREE, integer_type_node,
4214 tree_cons (NULL_TREE,
4218 /* void foo (int *, V4SI). */
4219 tree void_ftype_pint_v4si
4220 = build_function_type (void_type_node,
4221 tree_cons (NULL_TREE, pint_type_node,
4222 tree_cons (NULL_TREE, V4SI_type_node,
4224 /* void foo (short *, V8HI). */
4225 tree void_ftype_pshort_v8hi
4226 = build_function_type (void_type_node,
4227 tree_cons (NULL_TREE, pshort_type_node,
4228 tree_cons (NULL_TREE, V8HI_type_node,
4230 /* void foo (char *, V16QI). */
4231 tree void_ftype_pchar_v16qi
4232 = build_function_type (void_type_node,
4233 tree_cons (NULL_TREE, pchar_type_node,
4234 tree_cons (NULL_TREE, V16QI_type_node,
4236 /* void foo (float *, V4SF). */
4237 tree void_ftype_pfloat_v4sf
4238 = build_function_type (void_type_node,
4239 tree_cons (NULL_TREE, pfloat_type_node,
4240 tree_cons (NULL_TREE, V4SF_type_node,
4243 /* void foo (V4SI). */
4244 tree void_ftype_v4si
4245 = build_function_type (void_type_node,
4246 tree_cons (NULL_TREE, V4SI_type_node,
4249 /* void foo (vint, int, void *). */
4250 tree void_ftype_v4si_int_pvoid
4251 = build_function_type (void_type_node,
4252 tree_cons (NULL_TREE, V4SI_type_node,
4253 tree_cons (NULL_TREE, integer_type_node,
4254 tree_cons (NULL_TREE,
4258 /* void foo (vchar, int, void *). */
4259 tree void_ftype_v16qi_int_pvoid
4260 = build_function_type (void_type_node,
4261 tree_cons (NULL_TREE, V16QI_type_node,
4262 tree_cons (NULL_TREE, integer_type_node,
4263 tree_cons (NULL_TREE,
4267 /* void foo (vshort, int, void *). */
4268 tree void_ftype_v8hi_int_pvoid
4269 = build_function_type (void_type_node,
4270 tree_cons (NULL_TREE, V8HI_type_node,
4271 tree_cons (NULL_TREE, integer_type_node,
4272 tree_cons (NULL_TREE,
4276 /* void foo (char). */
4278 = build_function_type (void_type_node,
4279 tree_cons (NULL_TREE, char_type_node,
4282 /* void foo (void). */
4283 tree void_ftype_void
4284 = build_function_type (void_type_node, void_list_node);
4286 /* vshort foo (void). */
4287 tree v8hi_ftype_void
4288 = build_function_type (V8HI_type_node, void_list_node);
4290 tree v4si_ftype_v4si_v4si
4291 = build_function_type (V4SI_type_node,
4292 tree_cons (NULL_TREE, V4SI_type_node,
4293 tree_cons (NULL_TREE, V4SI_type_node,
4296 /* These are for the unsigned 5 bit literals. */
4298 tree v4sf_ftype_v4si_char
4299 = build_function_type (V4SF_type_node,
4300 tree_cons (NULL_TREE, V4SI_type_node,
4301 tree_cons (NULL_TREE, char_type_node,
4303 tree v4si_ftype_v4sf_char
4304 = build_function_type (V4SI_type_node,
4305 tree_cons (NULL_TREE, V4SF_type_node,
4306 tree_cons (NULL_TREE, char_type_node,
4308 tree v4si_ftype_v4si_char
4309 = build_function_type (V4SI_type_node,
4310 tree_cons (NULL_TREE, V4SI_type_node,
4311 tree_cons (NULL_TREE, char_type_node,
4313 tree v8hi_ftype_v8hi_char
4314 = build_function_type (V8HI_type_node,
4315 tree_cons (NULL_TREE, V8HI_type_node,
4316 tree_cons (NULL_TREE, char_type_node,
4318 tree v16qi_ftype_v16qi_char
4319 = build_function_type (V16QI_type_node,
4320 tree_cons (NULL_TREE, V16QI_type_node,
4321 tree_cons (NULL_TREE, char_type_node,
4324 /* These are for the unsigned 4 bit literals. */
4326 tree v16qi_ftype_v16qi_v16qi_char
4327 = build_function_type (V16QI_type_node,
4328 tree_cons (NULL_TREE, V16QI_type_node,
4329 tree_cons (NULL_TREE, V16QI_type_node,
4330 tree_cons (NULL_TREE,
4334 tree v8hi_ftype_v8hi_v8hi_char
4335 = build_function_type (V8HI_type_node,
4336 tree_cons (NULL_TREE, V8HI_type_node,
4337 tree_cons (NULL_TREE, V8HI_type_node,
4338 tree_cons (NULL_TREE,
4342 tree v4si_ftype_v4si_v4si_char
4343 = build_function_type (V4SI_type_node,
4344 tree_cons (NULL_TREE, V4SI_type_node,
4345 tree_cons (NULL_TREE, V4SI_type_node,
4346 tree_cons (NULL_TREE,
4350 tree v4sf_ftype_v4sf_v4sf_char
4351 = build_function_type (V4SF_type_node,
4352 tree_cons (NULL_TREE, V4SF_type_node,
4353 tree_cons (NULL_TREE, V4SF_type_node,
4354 tree_cons (NULL_TREE,
4358 /* End of 4 bit literals. */
4360 tree v4sf_ftype_v4sf_v4sf
4361 = build_function_type (V4SF_type_node,
4362 tree_cons (NULL_TREE, V4SF_type_node,
4363 tree_cons (NULL_TREE, V4SF_type_node,
4365 tree v4sf_ftype_v4sf_v4sf_v4si
4366 = build_function_type (V4SF_type_node,
4367 tree_cons (NULL_TREE, V4SF_type_node,
4368 tree_cons (NULL_TREE, V4SF_type_node,
4369 tree_cons (NULL_TREE,
4372 tree v4sf_ftype_v4sf_v4sf_v4sf
4373 = build_function_type (V4SF_type_node,
4374 tree_cons (NULL_TREE, V4SF_type_node,
4375 tree_cons (NULL_TREE, V4SF_type_node,
4376 tree_cons (NULL_TREE,
4379 tree v4si_ftype_v4si_v4si_v4si
4380 = build_function_type (V4SI_type_node,
4381 tree_cons (NULL_TREE, V4SI_type_node,
4382 tree_cons (NULL_TREE, V4SI_type_node,
4383 tree_cons (NULL_TREE,
4387 tree v8hi_ftype_v8hi_v8hi
4388 = build_function_type (V8HI_type_node,
4389 tree_cons (NULL_TREE, V8HI_type_node,
4390 tree_cons (NULL_TREE, V8HI_type_node,
4392 tree v8hi_ftype_v8hi_v8hi_v8hi
4393 = build_function_type (V8HI_type_node,
4394 tree_cons (NULL_TREE, V8HI_type_node,
4395 tree_cons (NULL_TREE, V8HI_type_node,
4396 tree_cons (NULL_TREE,
4399 tree v4si_ftype_v8hi_v8hi_v4si
4400 = build_function_type (V4SI_type_node,
4401 tree_cons (NULL_TREE, V8HI_type_node,
4402 tree_cons (NULL_TREE, V8HI_type_node,
4403 tree_cons (NULL_TREE,
4406 tree v4si_ftype_v16qi_v16qi_v4si
4407 = build_function_type (V4SI_type_node,
4408 tree_cons (NULL_TREE, V16QI_type_node,
4409 tree_cons (NULL_TREE, V16QI_type_node,
4410 tree_cons (NULL_TREE,
4414 tree v16qi_ftype_v16qi_v16qi
4415 = build_function_type (V16QI_type_node,
4416 tree_cons (NULL_TREE, V16QI_type_node,
4417 tree_cons (NULL_TREE, V16QI_type_node,
4420 tree v4si_ftype_v4sf_v4sf
4421 = build_function_type (V4SI_type_node,
4422 tree_cons (NULL_TREE, V4SF_type_node,
4423 tree_cons (NULL_TREE, V4SF_type_node,
4426 tree v4si_ftype_v4si
4427 = build_function_type (V4SI_type_node,
4428 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4430 tree v8hi_ftype_v8hi
4431 = build_function_type (V8HI_type_node,
4432 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4434 tree v16qi_ftype_v16qi
4435 = build_function_type (V16QI_type_node,
4436 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4438 tree v8hi_ftype_v16qi_v16qi
4439 = build_function_type (V8HI_type_node,
4440 tree_cons (NULL_TREE, V16QI_type_node,
4441 tree_cons (NULL_TREE, V16QI_type_node,
4444 tree v4si_ftype_v8hi_v8hi
4445 = build_function_type (V4SI_type_node,
4446 tree_cons (NULL_TREE, V8HI_type_node,
4447 tree_cons (NULL_TREE, V8HI_type_node,
4450 tree v8hi_ftype_v4si_v4si
4451 = build_function_type (V8HI_type_node,
4452 tree_cons (NULL_TREE, V4SI_type_node,
4453 tree_cons (NULL_TREE, V4SI_type_node,
4456 tree v16qi_ftype_v8hi_v8hi
4457 = build_function_type (V16QI_type_node,
4458 tree_cons (NULL_TREE, V8HI_type_node,
4459 tree_cons (NULL_TREE, V8HI_type_node,
4462 tree v4si_ftype_v16qi_v4si
4463 = build_function_type (V4SI_type_node,
4464 tree_cons (NULL_TREE, V16QI_type_node,
4465 tree_cons (NULL_TREE, V4SI_type_node,
4468 tree v4si_ftype_v16qi_v16qi
4469 = build_function_type (V4SI_type_node,
4470 tree_cons (NULL_TREE, V16QI_type_node,
4471 tree_cons (NULL_TREE, V16QI_type_node,
4474 tree v4si_ftype_v8hi_v4si
4475 = build_function_type (V4SI_type_node,
4476 tree_cons (NULL_TREE, V8HI_type_node,
4477 tree_cons (NULL_TREE, V4SI_type_node,
4480 tree v4si_ftype_v8hi
4481 = build_function_type (V4SI_type_node,
4482 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4484 tree int_ftype_v4si_v4si
4485 = build_function_type (integer_type_node,
4486 tree_cons (NULL_TREE, V4SI_type_node,
4487 tree_cons (NULL_TREE, V4SI_type_node,
4490 tree int_ftype_v4sf_v4sf
4491 = build_function_type (integer_type_node,
4492 tree_cons (NULL_TREE, V4SF_type_node,
4493 tree_cons (NULL_TREE, V4SF_type_node,
4496 tree int_ftype_v16qi_v16qi
4497 = build_function_type (integer_type_node,
4498 tree_cons (NULL_TREE, V16QI_type_node,
4499 tree_cons (NULL_TREE, V16QI_type_node,
4502 tree int_ftype_int_v4si_v4si
4503 = build_function_type
4505 tree_cons (NULL_TREE, integer_type_node,
4506 tree_cons (NULL_TREE, V4SI_type_node,
4507 tree_cons (NULL_TREE, V4SI_type_node,
4510 tree int_ftype_int_v4sf_v4sf
4511 = build_function_type
4513 tree_cons (NULL_TREE, integer_type_node,
4514 tree_cons (NULL_TREE, V4SF_type_node,
4515 tree_cons (NULL_TREE, V4SF_type_node,
4518 tree int_ftype_int_v8hi_v8hi
4519 = build_function_type
4521 tree_cons (NULL_TREE, integer_type_node,
4522 tree_cons (NULL_TREE, V8HI_type_node,
4523 tree_cons (NULL_TREE, V8HI_type_node,
4526 tree int_ftype_int_v16qi_v16qi
4527 = build_function_type
4529 tree_cons (NULL_TREE, integer_type_node,
4530 tree_cons (NULL_TREE, V16QI_type_node,
4531 tree_cons (NULL_TREE, V16QI_type_node,
4534 tree v16qi_ftype_int_pvoid
4535 = build_function_type (V16QI_type_node,
4536 tree_cons (NULL_TREE, integer_type_node,
4537 tree_cons (NULL_TREE, pvoid_type_node,
4540 tree v4si_ftype_int_pvoid
4541 = build_function_type (V4SI_type_node,
4542 tree_cons (NULL_TREE, integer_type_node,
4543 tree_cons (NULL_TREE, pvoid_type_node,
4546 tree v8hi_ftype_int_pvoid
4547 = build_function_type (V8HI_type_node,
4548 tree_cons (NULL_TREE, integer_type_node,
4549 tree_cons (NULL_TREE, pvoid_type_node,
4552 tree int_ftype_v8hi_v8hi
4553 = build_function_type (integer_type_node,
4554 tree_cons (NULL_TREE, V8HI_type_node,
4555 tree_cons (NULL_TREE, V8HI_type_node,
4558 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4559 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4560 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4561 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4562 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4563 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4564 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4565 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4566 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4567 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4568 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4569 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4570 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4571 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4572 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4573 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4574 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4575 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4576 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4577 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4578 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4579 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4580 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4581 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4583 /* Add the simple ternary operators. */
4584 d = (struct builtin_description *) bdesc_3arg;
4585 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4588 enum machine_mode mode0, mode1, mode2, mode3;
4594 mode0 = insn_data[d->icode].operand[0].mode;
4595 mode1 = insn_data[d->icode].operand[1].mode;
4596 mode2 = insn_data[d->icode].operand[2].mode;
4597 mode3 = insn_data[d->icode].operand[3].mode;
4599 /* When all four are of the same mode. */
4600 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4605 type = v4si_ftype_v4si_v4si_v4si;
4608 type = v4sf_ftype_v4sf_v4sf_v4sf;
4611 type = v8hi_ftype_v8hi_v8hi_v8hi;
4614 type = v16qi_ftype_v16qi_v16qi_v16qi;
4620 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4625 type = v4si_ftype_v4si_v4si_v16qi;
4628 type = v4sf_ftype_v4sf_v4sf_v16qi;
4631 type = v8hi_ftype_v8hi_v8hi_v16qi;
4634 type = v16qi_ftype_v16qi_v16qi_v16qi;
4640 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4641 && mode3 == V4SImode)
4642 type = v4si_ftype_v16qi_v16qi_v4si;
4643 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4644 && mode3 == V4SImode)
4645 type = v4si_ftype_v8hi_v8hi_v4si;
4646 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4647 && mode3 == V4SImode)
4648 type = v4sf_ftype_v4sf_v4sf_v4si;
4650 /* vchar, vchar, vchar, 4 bit literal. */
4651 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4653 type = v16qi_ftype_v16qi_v16qi_char;
4655 /* vshort, vshort, vshort, 4 bit literal. */
4656 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4658 type = v8hi_ftype_v8hi_v8hi_char;
4660 /* vint, vint, vint, 4 bit literal. */
4661 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4663 type = v4si_ftype_v4si_v4si_char;
4665 /* vfloat, vfloat, vfloat, 4 bit literal. */
4666 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4668 type = v4sf_ftype_v4sf_v4sf_char;
4673 def_builtin (d->mask, d->name, type, d->code);
4676 /* Add the DST variants. */
4677 d = (struct builtin_description *) bdesc_dst;
4678 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4679 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4681 /* Initialize the predicates. */
4682 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4683 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4685 enum machine_mode mode1;
4688 mode1 = insn_data[dp->icode].operand[1].mode;
4693 type = int_ftype_int_v4si_v4si;
4696 type = int_ftype_int_v8hi_v8hi;
4699 type = int_ftype_int_v16qi_v16qi;
4702 type = int_ftype_int_v4sf_v4sf;
4708 def_builtin (dp->mask, dp->name, type, dp->code);
4711 /* Add the simple binary operators. */
4712 d = (struct builtin_description *) bdesc_2arg;
4713 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4715 enum machine_mode mode0, mode1, mode2;
4721 mode0 = insn_data[d->icode].operand[0].mode;
4722 mode1 = insn_data[d->icode].operand[1].mode;
4723 mode2 = insn_data[d->icode].operand[2].mode;
4725 /* When all three operands are of the same mode. */
4726 if (mode0 == mode1 && mode1 == mode2)
4731 type = v4sf_ftype_v4sf_v4sf;
4734 type = v4si_ftype_v4si_v4si;
4737 type = v16qi_ftype_v16qi_v16qi;
4740 type = v8hi_ftype_v8hi_v8hi;
4747 /* A few other combos we really don't want to do manually. */
4749 /* vint, vfloat, vfloat. */
4750 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4751 type = v4si_ftype_v4sf_v4sf;
4753 /* vshort, vchar, vchar. */
4754 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4755 type = v8hi_ftype_v16qi_v16qi;
4757 /* vint, vshort, vshort. */
4758 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4759 type = v4si_ftype_v8hi_v8hi;
4761 /* vshort, vint, vint. */
4762 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4763 type = v8hi_ftype_v4si_v4si;
4765 /* vchar, vshort, vshort. */
4766 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4767 type = v16qi_ftype_v8hi_v8hi;
4769 /* vint, vchar, vint. */
4770 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4771 type = v4si_ftype_v16qi_v4si;
4773 /* vint, vchar, vchar. */
4774 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4775 type = v4si_ftype_v16qi_v16qi;
4777 /* vint, vshort, vint. */
4778 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4779 type = v4si_ftype_v8hi_v4si;
4781 /* vint, vint, 5 bit literal. */
4782 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4783 type = v4si_ftype_v4si_char;
4785 /* vshort, vshort, 5 bit literal. */
4786 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4787 type = v8hi_ftype_v8hi_char;
4789 /* vchar, vchar, 5 bit literal. */
4790 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4791 type = v16qi_ftype_v16qi_char;
4793 /* vfloat, vint, 5 bit literal. */
4794 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4795 type = v4sf_ftype_v4si_char;
4797 /* vint, vfloat, 5 bit literal. */
4798 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4799 type = v4si_ftype_v4sf_char;
4802 else if (mode0 == SImode)
4807 type = int_ftype_v4si_v4si;
4810 type = int_ftype_v4sf_v4sf;
4813 type = int_ftype_v16qi_v16qi;
4816 type = int_ftype_v8hi_v8hi;
4826 def_builtin (d->mask, d->name, type, d->code);
4829 /* Initialize the abs* operators. */
4830 d = (struct builtin_description *) bdesc_abs;
4831 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4833 enum machine_mode mode0;
4836 mode0 = insn_data[d->icode].operand[0].mode;
4841 type = v4si_ftype_v4si;
4844 type = v8hi_ftype_v8hi;
4847 type = v16qi_ftype_v16qi;
4850 type = v4sf_ftype_v4sf;
4856 def_builtin (d->mask, d->name, type, d->code);
4859 /* Add the simple unary operators. */
4860 d = (struct builtin_description *) bdesc_1arg;
4861 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4863 enum machine_mode mode0, mode1;
4869 mode0 = insn_data[d->icode].operand[0].mode;
4870 mode1 = insn_data[d->icode].operand[1].mode;
4872 if (mode0 == V4SImode && mode1 == QImode)
4873 type = v4si_ftype_char;
4874 else if (mode0 == V8HImode && mode1 == QImode)
4875 type = v8hi_ftype_char;
4876 else if (mode0 == V16QImode && mode1 == QImode)
4877 type = v16qi_ftype_char;
4878 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4879 type = v4sf_ftype_v4sf;
4880 else if (mode0 == V8HImode && mode1 == V16QImode)
4881 type = v8hi_ftype_v16qi;
4882 else if (mode0 == V4SImode && mode1 == V8HImode)
4883 type = v4si_ftype_v8hi;
4887 def_builtin (d->mask, d->name, type, d->code);
4892 /* Generate a memory reference for expand_block_move, copying volatile,
4893 and other bits from an original memory reference. */
4896 expand_block_move_mem (mode, addr, orig_mem)
4897 enum machine_mode mode;
4901 rtx mem = gen_rtx_MEM (mode, addr);
4903 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4907 /* Expand a block move operation, and return 1 if successful. Return 0
4908 if we should let the compiler generate normal code.
4910 operands[0] is the destination
4911 operands[1] is the source
4912 operands[2] is the length
4913 operands[3] is the alignment */
4915 #define MAX_MOVE_REG 4
4918 expand_block_move (operands)
4921 rtx orig_dest = operands[0];
4922 rtx orig_src = operands[1];
4923 rtx bytes_rtx = operands[2];
4924 rtx align_rtx = operands[3];
4925 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4936 rtx stores[MAX_MOVE_REG];
4939 /* If this is not a fixed size move, just call memcpy */
4943 /* If this is not a fixed size alignment, abort */
4944 if (GET_CODE (align_rtx) != CONST_INT)
4946 align = INTVAL (align_rtx);
4948 /* Anything to move? */
4949 bytes = INTVAL (bytes_rtx);
4953 /* store_one_arg depends on expand_block_move to handle at least the size of
4954 reg_parm_stack_space. */
4955 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4958 /* Move the address into scratch registers. */
4959 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4960 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4962 if (TARGET_STRING) /* string instructions are available */
4964 for ( ; bytes > 0; bytes -= move_bytes)
4966 if (bytes > 24 /* move up to 32 bytes at a time */
4974 && ! fixed_regs[12])
4976 move_bytes = (bytes > 32) ? 32 : bytes;
4977 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4980 expand_block_move_mem (BLKmode,
4983 GEN_INT ((move_bytes == 32)
4987 else if (bytes > 16 /* move up to 24 bytes at a time */
4993 && ! fixed_regs[10])
4995 move_bytes = (bytes > 24) ? 24 : bytes;
4996 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4999 expand_block_move_mem (BLKmode,
5002 GEN_INT (move_bytes),
5005 else if (bytes > 8 /* move up to 16 bytes at a time */
5011 move_bytes = (bytes > 16) ? 16 : bytes;
5012 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5015 expand_block_move_mem (BLKmode,
5018 GEN_INT (move_bytes),
5021 else if (bytes >= 8 && TARGET_POWERPC64
5022 /* 64-bit loads and stores require word-aligned
5024 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5027 tmp_reg = gen_reg_rtx (DImode);
5028 emit_move_insn (tmp_reg,
5029 expand_block_move_mem (DImode,
5030 src_reg, orig_src));
5031 emit_move_insn (expand_block_move_mem (DImode,
5032 dest_reg, orig_dest),
5035 else if (bytes > 4 && !TARGET_POWERPC64)
5036 { /* move up to 8 bytes at a time */
5037 move_bytes = (bytes > 8) ? 8 : bytes;
5038 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5041 expand_block_move_mem (BLKmode,
5044 GEN_INT (move_bytes),
5047 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5048 { /* move 4 bytes */
5050 tmp_reg = gen_reg_rtx (SImode);
5051 emit_move_insn (tmp_reg,
5052 expand_block_move_mem (SImode,
5053 src_reg, orig_src));
5054 emit_move_insn (expand_block_move_mem (SImode,
5055 dest_reg, orig_dest),
5058 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5059 { /* move 2 bytes */
5061 tmp_reg = gen_reg_rtx (HImode);
5062 emit_move_insn (tmp_reg,
5063 expand_block_move_mem (HImode,
5064 src_reg, orig_src));
5065 emit_move_insn (expand_block_move_mem (HImode,
5066 dest_reg, orig_dest),
5069 else if (bytes == 1) /* move 1 byte */
5072 tmp_reg = gen_reg_rtx (QImode);
5073 emit_move_insn (tmp_reg,
5074 expand_block_move_mem (QImode,
5075 src_reg, orig_src));
5076 emit_move_insn (expand_block_move_mem (QImode,
5077 dest_reg, orig_dest),
5081 { /* move up to 4 bytes at a time */
5082 move_bytes = (bytes > 4) ? 4 : bytes;
5083 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5086 expand_block_move_mem (BLKmode,
5089 GEN_INT (move_bytes),
5093 if (bytes > move_bytes)
5095 if (! TARGET_POWERPC64)
5097 emit_insn (gen_addsi3 (src_reg, src_reg,
5098 GEN_INT (move_bytes)));
5099 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5100 GEN_INT (move_bytes)));
5104 emit_insn (gen_adddi3 (src_reg, src_reg,
5105 GEN_INT (move_bytes)));
5106 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5107 GEN_INT (move_bytes)));
5113 else /* string instructions not available */
5115 num_reg = offset = 0;
5116 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5118 /* Calculate the correct offset for src/dest */
5122 dest_addr = dest_reg;
5126 src_addr = plus_constant (src_reg, offset);
5127 dest_addr = plus_constant (dest_reg, offset);
5130 /* Generate the appropriate load and store, saving the stores
5132 if (bytes >= 8 && TARGET_POWERPC64
5133 /* 64-bit loads and stores require word-aligned
5135 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5138 tmp_reg = gen_reg_rtx (DImode);
5139 emit_insn (gen_movdi (tmp_reg,
5140 expand_block_move_mem (DImode,
5143 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5148 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5151 tmp_reg = gen_reg_rtx (SImode);
5152 emit_insn (gen_movsi (tmp_reg,
5153 expand_block_move_mem (SImode,
5156 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5161 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5164 tmp_reg = gen_reg_rtx (HImode);
5165 emit_insn (gen_movhi (tmp_reg,
5166 expand_block_move_mem (HImode,
5169 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5177 tmp_reg = gen_reg_rtx (QImode);
5178 emit_insn (gen_movqi (tmp_reg,
5179 expand_block_move_mem (QImode,
5182 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5188 if (num_reg >= MAX_MOVE_REG)
5190 for (i = 0; i < num_reg; i++)
5191 emit_insn (stores[i]);
5196 for (i = 0; i < num_reg; i++)
5197 emit_insn (stores[i]);
5204 /* Return 1 if OP is a load multiple operation. It is known to be a
5205 PARALLEL and the first section will be tested. */
5208 load_multiple_operation (op, mode)
5210 enum machine_mode mode ATTRIBUTE_UNUSED;
5212 int count = XVECLEN (op, 0);
5213 unsigned int dest_regno;
5217 /* Perform a quick check so we don't blow up below. */
5219 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5220 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5221 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5224 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5225 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5227 for (i = 1; i < count; i++)
5229 rtx elt = XVECEXP (op, 0, i);
5231 if (GET_CODE (elt) != SET
5232 || GET_CODE (SET_DEST (elt)) != REG
5233 || GET_MODE (SET_DEST (elt)) != SImode
5234 || REGNO (SET_DEST (elt)) != dest_regno + i
5235 || GET_CODE (SET_SRC (elt)) != MEM
5236 || GET_MODE (SET_SRC (elt)) != SImode
5237 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5238 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5239 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5240 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5247 /* Similar, but tests for store multiple. Here, the second vector element
5248 is a CLOBBER. It will be tested later. */
5251 store_multiple_operation (op, mode)
5253 enum machine_mode mode ATTRIBUTE_UNUSED;
5255 int count = XVECLEN (op, 0) - 1;
5256 unsigned int src_regno;
5260 /* Perform a quick check so we don't blow up below. */
5262 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5263 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5264 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5267 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5268 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5270 for (i = 1; i < count; i++)
5272 rtx elt = XVECEXP (op, 0, i + 1);
5274 if (GET_CODE (elt) != SET
5275 || GET_CODE (SET_SRC (elt)) != REG
5276 || GET_MODE (SET_SRC (elt)) != SImode
5277 || REGNO (SET_SRC (elt)) != src_regno + i
5278 || GET_CODE (SET_DEST (elt)) != MEM
5279 || GET_MODE (SET_DEST (elt)) != SImode
5280 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5281 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5282 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5283 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5290 /* Return 1 for a parallel vrsave operation. */
5293 vrsave_operation (op, mode)
5295 enum machine_mode mode ATTRIBUTE_UNUSED;
5297 int count = XVECLEN (op, 0);
5298 unsigned int dest_regno, src_regno;
5302 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5303 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5304 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5307 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5308 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5310 if (dest_regno != VRSAVE_REGNO
5311 && src_regno != VRSAVE_REGNO)
5314 for (i = 1; i < count; i++)
5316 rtx elt = XVECEXP (op, 0, i);
5318 if (GET_CODE (elt) != CLOBBER
5319 && GET_CODE (elt) != SET)
5326 /* Return 1 for an PARALLEL suitable for mtcrf. */
5329 mtcrf_operation (op, mode)
5331 enum machine_mode mode ATTRIBUTE_UNUSED;
5333 int count = XVECLEN (op, 0);
5337 /* Perform a quick check so we don't blow up below. */
5339 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5340 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5341 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5343 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5345 if (GET_CODE (src_reg) != REG
5346 || GET_MODE (src_reg) != SImode
5347 || ! INT_REGNO_P (REGNO (src_reg)))
5350 for (i = 0; i < count; i++)
5352 rtx exp = XVECEXP (op, 0, i);
5356 if (GET_CODE (exp) != SET
5357 || GET_CODE (SET_DEST (exp)) != REG
5358 || GET_MODE (SET_DEST (exp)) != CCmode
5359 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5361 unspec = SET_SRC (exp);
5362 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5364 if (GET_CODE (unspec) != UNSPEC
5365 || XINT (unspec, 1) != 20
5366 || XVECLEN (unspec, 0) != 2
5367 || XVECEXP (unspec, 0, 0) != src_reg
5368 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5369 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5375 /* Return 1 for an PARALLEL suitable for lmw. */
5378 lmw_operation (op, mode)
5380 enum machine_mode mode ATTRIBUTE_UNUSED;
5382 int count = XVECLEN (op, 0);
5383 unsigned int dest_regno;
5385 unsigned int base_regno;
5386 HOST_WIDE_INT offset;
5389 /* Perform a quick check so we don't blow up below. */
5391 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5392 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5393 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5396 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5397 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5400 || count != 32 - (int) dest_regno)
5403 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5406 base_regno = REGNO (src_addr);
5407 if (base_regno == 0)
5410 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5412 offset = INTVAL (XEXP (src_addr, 1));
5413 base_regno = REGNO (XEXP (src_addr, 0));
5418 for (i = 0; i < count; i++)
5420 rtx elt = XVECEXP (op, 0, i);
5423 HOST_WIDE_INT newoffset;
5425 if (GET_CODE (elt) != SET
5426 || GET_CODE (SET_DEST (elt)) != REG
5427 || GET_MODE (SET_DEST (elt)) != SImode
5428 || REGNO (SET_DEST (elt)) != dest_regno + i
5429 || GET_CODE (SET_SRC (elt)) != MEM
5430 || GET_MODE (SET_SRC (elt)) != SImode)
5432 newaddr = XEXP (SET_SRC (elt), 0);
5433 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5438 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5440 addr_reg = XEXP (newaddr, 0);
5441 newoffset = INTVAL (XEXP (newaddr, 1));
5445 if (REGNO (addr_reg) != base_regno
5446 || newoffset != offset + 4 * i)
5453 /* Return 1 for an PARALLEL suitable for stmw. */
5456 stmw_operation (op, mode)
5458 enum machine_mode mode ATTRIBUTE_UNUSED;
5460 int count = XVECLEN (op, 0);
5461 unsigned int src_regno;
5463 unsigned int base_regno;
5464 HOST_WIDE_INT offset;
5467 /* Perform a quick check so we don't blow up below. */
5469 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5470 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5471 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5474 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5475 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5478 || count != 32 - (int) src_regno)
5481 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5484 base_regno = REGNO (dest_addr);
5485 if (base_regno == 0)
5488 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5490 offset = INTVAL (XEXP (dest_addr, 1));
5491 base_regno = REGNO (XEXP (dest_addr, 0));
5496 for (i = 0; i < count; i++)
5498 rtx elt = XVECEXP (op, 0, i);
5501 HOST_WIDE_INT newoffset;
5503 if (GET_CODE (elt) != SET
5504 || GET_CODE (SET_SRC (elt)) != REG
5505 || GET_MODE (SET_SRC (elt)) != SImode
5506 || REGNO (SET_SRC (elt)) != src_regno + i
5507 || GET_CODE (SET_DEST (elt)) != MEM
5508 || GET_MODE (SET_DEST (elt)) != SImode)
5510 newaddr = XEXP (SET_DEST (elt), 0);
5511 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5516 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5518 addr_reg = XEXP (newaddr, 0);
5519 newoffset = INTVAL (XEXP (newaddr, 1));
5523 if (REGNO (addr_reg) != base_regno
5524 || newoffset != offset + 4 * i)
5531 /* A validation routine: say whether CODE, a condition code, and MODE
5532 match. The other alternatives either don't make sense or should
5533 never be generated. */
5536 validate_condition_mode (code, mode)
5538 enum machine_mode mode;
5540 if (GET_RTX_CLASS (code) != '<'
5541 || GET_MODE_CLASS (mode) != MODE_CC)
5544 /* These don't make sense. */
5545 if ((code == GT || code == LT || code == GE || code == LE)
5546 && mode == CCUNSmode)
5549 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5550 && mode != CCUNSmode)
5553 if (mode != CCFPmode
5554 && (code == ORDERED || code == UNORDERED
5555 || code == UNEQ || code == LTGT
5556 || code == UNGT || code == UNLT
5557 || code == UNGE || code == UNLE))
5560 /* These should never be generated except for
5561 flag_unsafe_math_optimizations. */
5562 if (mode == CCFPmode
5563 && ! flag_unsafe_math_optimizations
5564 && (code == LE || code == GE
5565 || code == UNEQ || code == LTGT
5566 || code == UNGT || code == UNLT))
5569 /* These are invalid; the information is not there. */
5570 if (mode == CCEQmode
5571 && code != EQ && code != NE)
5575 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5576 We only check the opcode against the mode of the CC value here. */
5579 branch_comparison_operator (op, mode)
5581 enum machine_mode mode ATTRIBUTE_UNUSED;
5583 enum rtx_code code = GET_CODE (op);
5584 enum machine_mode cc_mode;
5586 if (GET_RTX_CLASS (code) != '<')
5589 cc_mode = GET_MODE (XEXP (op, 0));
5590 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5593 validate_condition_mode (code, cc_mode);
5598 /* Return 1 if OP is a comparison operation that is valid for a branch
5599 insn and which is true if the corresponding bit in the CC register
5603 branch_positive_comparison_operator (op, mode)
5605 enum machine_mode mode;
5609 if (! branch_comparison_operator (op, mode))
5612 code = GET_CODE (op);
5613 return (code == EQ || code == LT || code == GT
5614 || code == LTU || code == GTU
5615 || code == UNORDERED);
5618 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5619 We check the opcode against the mode of the CC value and disallow EQ or
5620 NE comparisons for integers. */
5623 scc_comparison_operator (op, mode)
5625 enum machine_mode mode;
5627 enum rtx_code code = GET_CODE (op);
5628 enum machine_mode cc_mode;
5630 if (GET_MODE (op) != mode && mode != VOIDmode)
5633 if (GET_RTX_CLASS (code) != '<')
5636 cc_mode = GET_MODE (XEXP (op, 0));
5637 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5640 validate_condition_mode (code, cc_mode);
5642 if (code == NE && cc_mode != CCFPmode)
5649 trap_comparison_operator (op, mode)
5651 enum machine_mode mode;
5653 if (mode != VOIDmode && mode != GET_MODE (op))
5655 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5659 boolean_operator (op, mode)
5661 enum machine_mode mode ATTRIBUTE_UNUSED;
5663 enum rtx_code code = GET_CODE (op);
5664 return (code == AND || code == IOR || code == XOR);
5668 boolean_or_operator (op, mode)
5670 enum machine_mode mode ATTRIBUTE_UNUSED;
5672 enum rtx_code code = GET_CODE (op);
5673 return (code == IOR || code == XOR);
5677 min_max_operator (op, mode)
5679 enum machine_mode mode ATTRIBUTE_UNUSED;
5681 enum rtx_code code = GET_CODE (op);
5682 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5685 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5686 mask required to convert the result of a rotate insn into a shift
5687 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5690 includes_lshift_p (shiftop, andop)
5694 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5696 shift_mask <<= INTVAL (shiftop);
5698 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5701 /* Similar, but for right shift. */
5704 includes_rshift_p (shiftop, andop)
5708 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5710 shift_mask >>= INTVAL (shiftop);
5712 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5715 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5716 to perform a left shift. It must have exactly SHIFTOP least
5717 signifigant 0's, then one or more 1's, then zero or more 0's. */
5720 includes_rldic_lshift_p (shiftop, andop)
5724 if (GET_CODE (andop) == CONST_INT)
5726 HOST_WIDE_INT c, lsb, shift_mask;
5729 if (c == 0 || c == ~0)
5733 shift_mask <<= INTVAL (shiftop);
5735 /* Find the least signifigant one bit. */
5738 /* It must coincide with the LSB of the shift mask. */
5739 if (-lsb != shift_mask)
5742 /* Invert to look for the next transition (if any). */
5745 /* Remove the low group of ones (originally low group of zeros). */
5748 /* Again find the lsb, and check we have all 1's above. */
5752 else if (GET_CODE (andop) == CONST_DOUBLE
5753 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5755 HOST_WIDE_INT low, high, lsb;
5756 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5758 low = CONST_DOUBLE_LOW (andop);
5759 if (HOST_BITS_PER_WIDE_INT < 64)
5760 high = CONST_DOUBLE_HIGH (andop);
5762 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5763 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5766 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5768 shift_mask_high = ~0;
5769 if (INTVAL (shiftop) > 32)
5770 shift_mask_high <<= INTVAL (shiftop) - 32;
5774 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5781 return high == -lsb;
5784 shift_mask_low = ~0;
5785 shift_mask_low <<= INTVAL (shiftop);
5789 if (-lsb != shift_mask_low)
5792 if (HOST_BITS_PER_WIDE_INT < 64)
5797 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5800 return high == -lsb;
5804 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5810 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5811 to perform a left shift. It must have SHIFTOP or more least
5812 signifigant 0's, with the remainder of the word 1's. */
5815 includes_rldicr_lshift_p (shiftop, andop)
5819 if (GET_CODE (andop) == CONST_INT)
5821 HOST_WIDE_INT c, lsb, shift_mask;
5824 shift_mask <<= INTVAL (shiftop);
5827 /* Find the least signifigant one bit. */
5830 /* It must be covered by the shift mask.
5831 This test also rejects c == 0. */
5832 if ((lsb & shift_mask) == 0)
5835 /* Check we have all 1's above the transition, and reject all 1's. */
5836 return c == -lsb && lsb != 1;
5838 else if (GET_CODE (andop) == CONST_DOUBLE
5839 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5841 HOST_WIDE_INT low, lsb, shift_mask_low;
5843 low = CONST_DOUBLE_LOW (andop);
5845 if (HOST_BITS_PER_WIDE_INT < 64)
5847 HOST_WIDE_INT high, shift_mask_high;
5849 high = CONST_DOUBLE_HIGH (andop);
5853 shift_mask_high = ~0;
5854 if (INTVAL (shiftop) > 32)
5855 shift_mask_high <<= INTVAL (shiftop) - 32;
5859 if ((lsb & shift_mask_high) == 0)
5862 return high == -lsb;
5868 shift_mask_low = ~0;
5869 shift_mask_low <<= INTVAL (shiftop);
5873 if ((lsb & shift_mask_low) == 0)
5876 return low == -lsb && lsb != 1;
5882 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5883 for lfq and stfq insns.
5885 Note reg1 and reg2 *must* be hard registers. To be sure we will
5886 abort if we are passed pseudo registers. */
5889 registers_ok_for_quad_peep (reg1, reg2)
5892 /* We might have been passed a SUBREG. */
5893 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5896 return (REGNO (reg1) == REGNO (reg2) - 1);
5899 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5900 addr1 and addr2 must be in consecutive memory locations
5901 (addr2 == addr1 + 8). */
5904 addrs_ok_for_quad_peep (addr1, addr2)
5911 /* Extract an offset (if used) from the first addr. */
5912 if (GET_CODE (addr1) == PLUS)
5914 /* If not a REG, return zero. */
5915 if (GET_CODE (XEXP (addr1, 0)) != REG)
5919 reg1 = REGNO (XEXP (addr1, 0));
5920 /* The offset must be constant! */
5921 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5923 offset1 = INTVAL (XEXP (addr1, 1));
5926 else if (GET_CODE (addr1) != REG)
5930 reg1 = REGNO (addr1);
5931 /* This was a simple (mem (reg)) expression. Offset is 0. */
5935 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5936 if (GET_CODE (addr2) != PLUS)
5939 if (GET_CODE (XEXP (addr2, 0)) != REG
5940 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5943 if (reg1 != REGNO (XEXP (addr2, 0)))
5946 /* The offset for the second addr must be 8 more than the first addr. */
5947 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5950 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5955 /* Return the register class of a scratch register needed to copy IN into
5956 or out of a register in CLASS in MODE. If it can be done directly,
5957 NO_REGS is returned. */
5960 secondary_reload_class (class, mode, in)
5961 enum reg_class class;
5962 enum machine_mode mode ATTRIBUTE_UNUSED;
5967 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5969 /* We cannot copy a symbolic operand directly into anything
5970 other than BASE_REGS for TARGET_ELF. So indicate that a
5971 register from BASE_REGS is needed as an intermediate
5974 On Darwin, pic addresses require a load from memory, which
5975 needs a base register. */
5976 if (class != BASE_REGS
5977 && (GET_CODE (in) == SYMBOL_REF
5978 || GET_CODE (in) == HIGH
5979 || GET_CODE (in) == LABEL_REF
5980 || GET_CODE (in) == CONST))
5984 if (GET_CODE (in) == REG)
5987 if (regno >= FIRST_PSEUDO_REGISTER)
5989 regno = true_regnum (in);
5990 if (regno >= FIRST_PSEUDO_REGISTER)
5994 else if (GET_CODE (in) == SUBREG)
5996 regno = true_regnum (in);
5997 if (regno >= FIRST_PSEUDO_REGISTER)
6003 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6005 if (class == GENERAL_REGS || class == BASE_REGS
6006 || (regno >= 0 && INT_REGNO_P (regno)))
6009 /* Constants, memory, and FP registers can go into FP registers. */
6010 if ((regno == -1 || FP_REGNO_P (regno))
6011 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6014 /* Memory, and AltiVec registers can go into AltiVec registers. */
6015 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6016 && class == ALTIVEC_REGS)
6019 /* We can copy among the CR registers. */
6020 if ((class == CR_REGS || class == CR0_REGS)
6021 && regno >= 0 && CR_REGNO_P (regno))
6024 /* Otherwise, we need GENERAL_REGS. */
6025 return GENERAL_REGS;
6028 /* Given a comparison operation, return the bit number in CCR to test. We
6029 know this is a valid comparison.
6031 SCC_P is 1 if this is for an scc. That means that %D will have been
6032 used instead of %C, so the bits will be in different places.
6034 Return -1 if OP isn't a valid comparison for some reason. */
6041 enum rtx_code code = GET_CODE (op);
6042 enum machine_mode cc_mode;
6047 if (GET_RTX_CLASS (code) != '<')
6052 if (GET_CODE (reg) != REG
6053 || ! CR_REGNO_P (REGNO (reg)))
6056 cc_mode = GET_MODE (reg);
6057 cc_regnum = REGNO (reg);
6058 base_bit = 4 * (cc_regnum - CR0_REGNO);
6060 validate_condition_mode (code, cc_mode);
6065 return scc_p ? base_bit + 3 : base_bit + 2;
6067 return base_bit + 2;
6068 case GT: case GTU: case UNLE:
6069 return base_bit + 1;
6070 case LT: case LTU: case UNGE:
6072 case ORDERED: case UNORDERED:
6073 return base_bit + 3;
6076 /* If scc, we will have done a cror to put the bit in the
6077 unordered position. So test that bit. For integer, this is ! LT
6078 unless this is an scc insn. */
6079 return scc_p ? base_bit + 3 : base_bit;
6082 return scc_p ? base_bit + 3 : base_bit + 1;
6089 /* Return the GOT register. */
6092 rs6000_got_register (value)
6093 rtx value ATTRIBUTE_UNUSED;
6095 /* The second flow pass currently (June 1999) can't update
6096 regs_ever_live without disturbing other parts of the compiler, so
6097 update it here to make the prolog/epilogue code happy. */
6098 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6099 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6101 current_function_uses_pic_offset_table = 1;
6103 return pic_offset_table_rtx;
6106 /* Functions to init, mark and free struct machine_function.
6107 These will be called, via pointer variables,
6108 from push_function_context and pop_function_context. */
6111 rs6000_init_machine_status (p)
6114 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6118 rs6000_free_machine_status (p)
6121 if (p->machine == NULL)
6129 /* Print an operand. Recognize special options, documented below. */
6132 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6133 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6135 #define SMALL_DATA_RELOC "sda21"
6136 #define SMALL_DATA_REG 0
6140 print_operand (file, x, code)
6148 /* These macros test for integers and extract the low-order bits. */
6150 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6151 && GET_MODE (X) == VOIDmode)
6153 #define INT_LOWPART(X) \
6154 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6159 /* Write out an instruction after the call which may be replaced
6160 with glue code by the loader. This depends on the AIX version. */
6161 asm_fprintf (file, RS6000_CALL_GLUE);
6164 /* %a is output_address. */
6167 /* If X is a constant integer whose low-order 5 bits are zero,
6168 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6169 in the AIX assembler where "sri" with a zero shift count
6170 writes a trash instruction. */
6171 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6178 /* If constant, low-order 16 bits of constant, unsigned.
6179 Otherwise, write normally. */
6181 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6183 print_operand (file, x, 0);
6187 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6188 for 64-bit mask direction. */
6189 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6192 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6196 /* There used to be a comment for 'C' reading "This is an
6197 optional cror needed for certain floating-point
6198 comparisons. Otherwise write nothing." */
6200 /* Similar, except that this is for an scc, so we must be able to
6201 encode the test in a single bit that is one. We do the above
6202 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6203 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6204 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6206 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6208 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6210 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6213 else if (GET_CODE (x) == NE)
6215 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6217 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6218 base_bit + 2, base_bit + 2);
6223 /* X is a CR register. Print the number of the EQ bit of the CR */
6224 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6225 output_operand_lossage ("invalid %%E value");
6227 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6231 /* X is a CR register. Print the shift count needed to move it
6232 to the high-order four bits. */
6233 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6234 output_operand_lossage ("invalid %%f value");
6236 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6240 /* Similar, but print the count for the rotate in the opposite
6242 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6243 output_operand_lossage ("invalid %%F value");
6245 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6249 /* X is a constant integer. If it is negative, print "m",
6250 otherwise print "z". This is to make a aze or ame insn. */
6251 if (GET_CODE (x) != CONST_INT)
6252 output_operand_lossage ("invalid %%G value");
6253 else if (INTVAL (x) >= 0)
6260 /* If constant, output low-order five bits. Otherwise, write
6263 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6265 print_operand (file, x, 0);
6269 /* If constant, output low-order six bits. Otherwise, write
6272 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6274 print_operand (file, x, 0);
6278 /* Print `i' if this is a constant, else nothing. */
6284 /* Write the bit number in CCR for jump. */
6287 output_operand_lossage ("invalid %%j code");
6289 fprintf (file, "%d", i);
6293 /* Similar, but add one for shift count in rlinm for scc and pass
6294 scc flag to `ccr_bit'. */
6297 output_operand_lossage ("invalid %%J code");
6299 /* If we want bit 31, write a shift count of zero, not 32. */
6300 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6304 /* X must be a constant. Write the 1's complement of the
6307 output_operand_lossage ("invalid %%k value");
6309 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6313 /* X must be a symbolic constant on ELF. Write an
6314 expression suitable for an 'addi' that adds in the low 16
6316 if (GET_CODE (x) != CONST)
6318 print_operand_address (file, x);
6323 if (GET_CODE (XEXP (x, 0)) != PLUS
6324 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6325 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6326 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6327 output_operand_lossage ("invalid %%K value");
6328 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6330 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6334 /* %l is output_asm_label. */
6337 /* Write second word of DImode or DFmode reference. Works on register
6338 or non-indexed memory only. */
6339 if (GET_CODE (x) == REG)
6340 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6341 else if (GET_CODE (x) == MEM)
6343 /* Handle possible auto-increment. Since it is pre-increment and
6344 we have already done it, we can just use an offset of word. */
6345 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6346 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6347 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6350 output_address (XEXP (adjust_address_nv (x, SImode,
6354 if (small_data_operand (x, GET_MODE (x)))
6355 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6356 reg_names[SMALL_DATA_REG]);
6361 /* MB value for a mask operand. */
6362 if (! mask_operand (x, SImode))
6363 output_operand_lossage ("invalid %%m value");
6365 val = INT_LOWPART (x);
6367 /* If the high bit is set and the low bit is not, the value is zero.
6368 If the high bit is zero, the value is the first 1 bit we find from
6370 if ((val & 0x80000000) && ((val & 1) == 0))
6375 else if ((val & 0x80000000) == 0)
6377 for (i = 1; i < 32; i++)
6378 if ((val <<= 1) & 0x80000000)
6380 fprintf (file, "%d", i);
6384 /* Otherwise, look for the first 0 bit from the right. The result is its
6385 number plus 1. We know the low-order bit is one. */
6386 for (i = 0; i < 32; i++)
6387 if (((val >>= 1) & 1) == 0)
6390 /* If we ended in ...01, i would be 0. The correct value is 31, so
6392 fprintf (file, "%d", 31 - i);
6396 /* ME value for a mask operand. */
6397 if (! mask_operand (x, SImode))
6398 output_operand_lossage ("invalid %%M value");
6400 val = INT_LOWPART (x);
6402 /* If the low bit is set and the high bit is not, the value is 31.
6403 If the low bit is zero, the value is the first 1 bit we find from
6405 if ((val & 1) && ((val & 0x80000000) == 0))
6410 else if ((val & 1) == 0)
6412 for (i = 0; i < 32; i++)
6413 if ((val >>= 1) & 1)
6416 /* If we had ....10, i would be 0. The result should be
6417 30, so we need 30 - i. */
6418 fprintf (file, "%d", 30 - i);
6422 /* Otherwise, look for the first 0 bit from the left. The result is its
6423 number minus 1. We know the high-order bit is one. */
6424 for (i = 0; i < 32; i++)
6425 if (((val <<= 1) & 0x80000000) == 0)
6428 fprintf (file, "%d", i);
6431 /* %n outputs the negative of its operand. */
6434 /* Write the number of elements in the vector times 4. */
6435 if (GET_CODE (x) != PARALLEL)
6436 output_operand_lossage ("invalid %%N value");
6438 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6442 /* Similar, but subtract 1 first. */
6443 if (GET_CODE (x) != PARALLEL)
6444 output_operand_lossage ("invalid %%O value");
6446 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6450 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6452 || INT_LOWPART (x) < 0
6453 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6454 output_operand_lossage ("invalid %%p value");
6456 fprintf (file, "%d", i);
6460 /* The operand must be an indirect memory reference. The result
6461 is the register number. */
6462 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6463 || REGNO (XEXP (x, 0)) >= 32)
6464 output_operand_lossage ("invalid %%P value");
6466 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6470 /* This outputs the logical code corresponding to a boolean
6471 expression. The expression may have one or both operands
6472 negated (if one, only the first one). For condition register
6473 logical operations, it will also treat the negated
6474 CR codes as NOTs, but not handle NOTs of them. */
6476 const char *const *t = 0;
6478 enum rtx_code code = GET_CODE (x);
6479 static const char * const tbl[3][3] = {
6480 { "and", "andc", "nor" },
6481 { "or", "orc", "nand" },
6482 { "xor", "eqv", "xor" } };
6486 else if (code == IOR)
6488 else if (code == XOR)
6491 output_operand_lossage ("invalid %%q value");
6493 if (GET_CODE (XEXP (x, 0)) != NOT)
6497 if (GET_CODE (XEXP (x, 1)) == NOT)
6508 /* X is a CR register. Print the mask for `mtcrf'. */
6509 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6510 output_operand_lossage ("invalid %%R value");
6512 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6516 /* Low 5 bits of 32 - value */
6518 output_operand_lossage ("invalid %%s value");
6520 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6524 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6525 CONST_INT 32-bit mask is considered sign-extended so any
6526 transition must occur within the CONST_INT, not on the boundary. */
6527 if (! mask64_operand (x, DImode))
6528 output_operand_lossage ("invalid %%S value");
6530 val = INT_LOWPART (x);
6532 if (val & 1) /* Clear Left */
6534 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6535 if (!((val >>= 1) & 1))
6538 #if HOST_BITS_PER_WIDE_INT == 32
6539 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6541 val = CONST_DOUBLE_HIGH (x);
6546 for (i = 32; i < 64; i++)
6547 if (!((val >>= 1) & 1))
6551 /* i = index of last set bit from right
6552 mask begins at 63 - i from left */
6554 output_operand_lossage ("%%S computed all 1's mask");
6556 fprintf (file, "%d", 63 - i);
6559 else /* Clear Right */
6561 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6562 if ((val >>= 1) & 1)
6565 #if HOST_BITS_PER_WIDE_INT == 32
6566 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6568 val = CONST_DOUBLE_HIGH (x);
6570 if (val == (HOST_WIDE_INT) -1)
6573 for (i = 32; i < 64; i++)
6574 if ((val >>= 1) & 1)
6578 /* i = index of last clear bit from right
6579 mask ends at 62 - i from left */
6581 output_operand_lossage ("%%S computed all 0's mask");
6583 fprintf (file, "%d", 62 - i);
6588 /* Print the symbolic name of a branch target register. */
6589 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6590 && REGNO (x) != COUNT_REGISTER_REGNUM))
6591 output_operand_lossage ("invalid %%T value");
6592 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6593 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6595 fputs ("ctr", file);
6599 /* High-order 16 bits of constant for use in unsigned operand. */
6601 output_operand_lossage ("invalid %%u value");
6603 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6604 (INT_LOWPART (x) >> 16) & 0xffff);
6608 /* High-order 16 bits of constant for use in signed operand. */
6610 output_operand_lossage ("invalid %%v value");
6612 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6613 (INT_LOWPART (x) >> 16) & 0xffff);
6617 /* Print `u' if this has an auto-increment or auto-decrement. */
6618 if (GET_CODE (x) == MEM
6619 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6620 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6625 /* Print the trap code for this operand. */
6626 switch (GET_CODE (x))
6629 fputs ("eq", file); /* 4 */
6632 fputs ("ne", file); /* 24 */
6635 fputs ("lt", file); /* 16 */
6638 fputs ("le", file); /* 20 */
6641 fputs ("gt", file); /* 8 */
6644 fputs ("ge", file); /* 12 */
6647 fputs ("llt", file); /* 2 */
6650 fputs ("lle", file); /* 6 */
6653 fputs ("lgt", file); /* 1 */
6656 fputs ("lge", file); /* 5 */
6664 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6667 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6668 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6670 print_operand (file, x, 0);
6674 /* MB value for a PowerPC64 rldic operand. */
6675 val = (GET_CODE (x) == CONST_INT
6676 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6681 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6682 if ((val <<= 1) < 0)
6685 #if HOST_BITS_PER_WIDE_INT == 32
6686 if (GET_CODE (x) == CONST_INT && i >= 0)
6687 i += 32; /* zero-extend high-part was all 0's */
6688 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6690 val = CONST_DOUBLE_LOW (x);
6697 for ( ; i < 64; i++)
6698 if ((val <<= 1) < 0)
6703 fprintf (file, "%d", i + 1);
6707 if (GET_CODE (x) == MEM
6708 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6713 /* Like 'L', for third word of TImode */
6714 if (GET_CODE (x) == REG)
6715 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6716 else if (GET_CODE (x) == MEM)
6718 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6719 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6720 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6722 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6723 if (small_data_operand (x, GET_MODE (x)))
6724 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6725 reg_names[SMALL_DATA_REG]);
6730 /* X is a SYMBOL_REF. Write out the name preceded by a
6731 period and without any trailing data in brackets. Used for function
6732 names. If we are configured for System V (or the embedded ABI) on
6733 the PowerPC, do not emit the period, since those systems do not use
6734 TOCs and the like. */
6735 if (GET_CODE (x) != SYMBOL_REF)
6738 if (XSTR (x, 0)[0] != '.')
6740 switch (DEFAULT_ABI)
6750 case ABI_AIX_NODESC:
6756 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6758 assemble_name (file, XSTR (x, 0));
6763 /* Like 'L', for last word of TImode. */
6764 if (GET_CODE (x) == REG)
6765 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6766 else if (GET_CODE (x) == MEM)
6768 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6769 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6770 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6772 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6773 if (small_data_operand (x, GET_MODE (x)))
6774 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6775 reg_names[SMALL_DATA_REG]);
6779 /* Print AltiVec memory operand. */
6784 if (GET_CODE (x) != MEM)
6789 if (GET_CODE (tmp) == REG)
6790 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6791 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6793 if (REGNO (XEXP (tmp, 0)) == 0)
6794 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6795 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6797 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6798 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6806 if (GET_CODE (x) == REG)
6807 fprintf (file, "%s", reg_names[REGNO (x)]);
6808 else if (GET_CODE (x) == MEM)
6810 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6811 know the width from the mode. */
6812 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6813 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6814 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6815 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6816 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6817 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6819 output_address (XEXP (x, 0));
6822 output_addr_const (file, x);
6826 output_operand_lossage ("invalid %%xn code");
6830 /* Print the address of an operand. */
6833 print_operand_address (file, x)
6837 if (GET_CODE (x) == REG)
6838 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6839 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6840 || GET_CODE (x) == LABEL_REF)
6842 output_addr_const (file, x);
6843 if (small_data_operand (x, GET_MODE (x)))
6844 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6845 reg_names[SMALL_DATA_REG]);
6846 else if (TARGET_TOC)
6849 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6851 if (REGNO (XEXP (x, 0)) == 0)
6852 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6853 reg_names[ REGNO (XEXP (x, 0)) ]);
6855 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6856 reg_names[ REGNO (XEXP (x, 1)) ]);
6858 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6860 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6861 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6864 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6865 && CONSTANT_P (XEXP (x, 1)))
6867 output_addr_const (file, XEXP (x, 1));
6868 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6872 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6873 && CONSTANT_P (XEXP (x, 1)))
6875 fprintf (file, "lo16(");
6876 output_addr_const (file, XEXP (x, 1));
6877 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6880 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6882 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6884 rtx contains_minus = XEXP (x, 1);
6888 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6889 turn it into (sym) for output_addr_const. */
6890 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6891 contains_minus = XEXP (contains_minus, 0);
6893 minus = XEXP (contains_minus, 0);
6894 symref = XEXP (minus, 0);
6895 XEXP (contains_minus, 0) = symref;
6900 name = XSTR (symref, 0);
6901 newname = alloca (strlen (name) + sizeof ("@toc"));
6902 strcpy (newname, name);
6903 strcat (newname, "@toc");
6904 XSTR (symref, 0) = newname;
6906 output_addr_const (file, XEXP (x, 1));
6908 XSTR (symref, 0) = name;
6909 XEXP (contains_minus, 0) = minus;
6912 output_addr_const (file, XEXP (x, 1));
6914 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6920 /* Target hook for assembling integer objects. The powerpc version has
6921 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6922 is defined. It also needs to handle DI-mode objects on 64-bit
6926 rs6000_assemble_integer (x, size, aligned_p)
6931 #ifdef RELOCATABLE_NEEDS_FIXUP
6932 /* Special handling for SI values. */
6933 if (size == 4 && aligned_p)
6935 extern int in_toc_section PARAMS ((void));
6936 static int recurse = 0;
6938 /* For -mrelocatable, we mark all addresses that need to be fixed up
6939 in the .fixup section. */
6940 if (TARGET_RELOCATABLE
6941 && !in_toc_section ()
6942 && !in_text_section ()
6944 && GET_CODE (x) != CONST_INT
6945 && GET_CODE (x) != CONST_DOUBLE
6951 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6953 ASM_OUTPUT_LABEL (asm_out_file, buf);
6954 fprintf (asm_out_file, "\t.long\t(");
6955 output_addr_const (asm_out_file, x);
6956 fprintf (asm_out_file, ")@fixup\n");
6957 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6958 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6959 fprintf (asm_out_file, "\t.long\t");
6960 assemble_name (asm_out_file, buf);
6961 fprintf (asm_out_file, "\n\t.previous\n");
6965 /* Remove initial .'s to turn a -mcall-aixdesc function
6966 address into the address of the descriptor, not the function
6968 else if (GET_CODE (x) == SYMBOL_REF
6969 && XSTR (x, 0)[0] == '.'
6970 && DEFAULT_ABI == ABI_AIX)
6972 const char *name = XSTR (x, 0);
6973 while (*name == '.')
6976 fprintf (asm_out_file, "\t.long\t%s\n", name);
6980 #endif /* RELOCATABLE_NEEDS_FIXUP */
6981 return default_assemble_integer (x, size, aligned_p);
6985 rs6000_reverse_condition (mode, code)
6986 enum machine_mode mode;
6989 /* Reversal of FP compares takes care -- an ordered compare
6990 becomes an unordered compare and vice versa. */
6991 if (mode == CCFPmode)
6992 return reverse_condition_maybe_unordered (code);
6994 return reverse_condition (code);
6997 /* Generate a compare for CODE. Return a brand-new rtx that
6998 represents the result of the compare. */
7001 rs6000_generate_compare (code)
7004 enum machine_mode comp_mode;
7007 if (rs6000_compare_fp_p)
7008 comp_mode = CCFPmode;
7009 else if (code == GTU || code == LTU
7010 || code == GEU || code == LEU)
7011 comp_mode = CCUNSmode;
7015 /* First, the compare. */
7016 compare_result = gen_reg_rtx (comp_mode);
7017 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7018 gen_rtx_COMPARE (comp_mode,
7020 rs6000_compare_op1)));
7022 /* Some kinds of FP comparisons need an OR operation;
7023 except for flag_unsafe_math_optimizations we don't bother. */
7024 if (rs6000_compare_fp_p
7025 && ! flag_unsafe_math_optimizations
7026 && (code == LE || code == GE
7027 || code == UNEQ || code == LTGT
7028 || code == UNGT || code == UNLT))
7030 enum rtx_code or1, or2;
7031 rtx or1_rtx, or2_rtx, compare2_rtx;
7032 rtx or_result = gen_reg_rtx (CCEQmode);
7036 case LE: or1 = LT; or2 = EQ; break;
7037 case GE: or1 = GT; or2 = EQ; break;
7038 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
7039 case LTGT: or1 = LT; or2 = GT; break;
7040 case UNGT: or1 = UNORDERED; or2 = GT; break;
7041 case UNLT: or1 = UNORDERED; or2 = LT; break;
7044 validate_condition_mode (or1, comp_mode);
7045 validate_condition_mode (or2, comp_mode);
7046 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7047 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7048 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7049 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7051 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7053 compare_result = or_result;
7057 validate_condition_mode (code, GET_MODE (compare_result));
7059 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7063 /* Emit the RTL for an sCOND pattern. */
7066 rs6000_emit_sCOND (code, result)
7071 enum machine_mode op_mode;
7073 condition_rtx = rs6000_generate_compare (code);
7075 op_mode = GET_MODE (rs6000_compare_op0);
7076 if (op_mode == VOIDmode)
7077 op_mode = GET_MODE (rs6000_compare_op1);
7079 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7081 PUT_MODE (condition_rtx, DImode);
7082 convert_move (result, condition_rtx, 0);
7086 PUT_MODE (condition_rtx, SImode);
7087 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7091 /* Emit a branch of kind CODE to location LOC. */
7094 rs6000_emit_cbranch (code, loc)
7098 rtx condition_rtx, loc_ref;
7100 condition_rtx = rs6000_generate_compare (code);
7101 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7102 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7103 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7107 /* Return the string to output a conditional branch to LABEL, which is
7108 the operand number of the label, or -1 if the branch is really a
7111 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7112 condition code register and its mode specifies what kind of
7115 REVERSED is non-zero if we should reverse the sense of the comparison.
7117 INSN is the insn. */
7120 output_cbranch (op, label, reversed, insn)
7126 static char string[64];
7127 enum rtx_code code = GET_CODE (op);
7128 rtx cc_reg = XEXP (op, 0);
7129 enum machine_mode mode = GET_MODE (cc_reg);
7130 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7131 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7132 int really_reversed = reversed ^ need_longbranch;
7138 validate_condition_mode (code, mode);
7140 /* Work out which way this really branches. We could use
7141 reverse_condition_maybe_unordered here always but this
7142 makes the resulting assembler clearer. */
7143 if (really_reversed)
7144 code = rs6000_reverse_condition (mode, code);
7148 /* Not all of these are actually distinct opcodes, but
7149 we distinguish them for clarity of the resulting assembler. */
7151 ccode = "ne"; break;
7153 ccode = "eq"; break;
7155 ccode = "ge"; break;
7156 case GT: case GTU: case UNGT:
7157 ccode = "gt"; break;
7159 ccode = "le"; break;
7160 case LT: case LTU: case UNLT:
7161 ccode = "lt"; break;
7162 case UNORDERED: ccode = "un"; break;
7163 case ORDERED: ccode = "nu"; break;
7164 case UNGE: ccode = "nl"; break;
7165 case UNLE: ccode = "ng"; break;
7170 /* Maybe we have a guess as to how likely the branch is.
7171 The old mnemonics don't have a way to specify this information. */
7172 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7173 if (note != NULL_RTX)
7175 /* PROB is the difference from 50%. */
7176 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7178 /* For branches that are very close to 50%, assume not-taken. */
7179 if (abs (prob) > REG_BR_PROB_BASE / 20
7180 && ((prob > 0) ^ need_longbranch))
7189 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7191 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7193 /* We need to escape any '%' characters in the reg_names string.
7194 Assume they'd only be the first character... */
7195 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7197 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7201 /* If the branch distance was too far, we may have to use an
7202 unconditional branch to go the distance. */
7203 if (need_longbranch)
7204 s += sprintf (s, ",$+8\n\tb %s", label);
7206 s += sprintf (s, ",%s", label);
7212 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7213 operands of the last comparison is nonzero/true, FALSE_COND if it
7214 is zero/false. Return 0 if the hardware has no such operation. */
7217 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7223 enum rtx_code code = GET_CODE (op);
7224 rtx op0 = rs6000_compare_op0;
7225 rtx op1 = rs6000_compare_op1;
7227 enum machine_mode mode = GET_MODE (op0);
7230 /* First, work out if the hardware can do this at all, or
7231 if it's too slow... */
7232 /* If the comparison is an integer one, since we only have fsel
7233 it'll be cheaper to use a branch. */
7234 if (! rs6000_compare_fp_p)
7237 /* Eliminate half of the comparisons by switching operands, this
7238 makes the remaining code simpler. */
7239 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7240 || code == LTGT || code == LT)
7242 code = reverse_condition_maybe_unordered (code);
7244 true_cond = false_cond;
7248 /* UNEQ and LTGT take four instructions for a comparison with zero,
7249 it'll probably be faster to use a branch here too. */
7253 if (GET_CODE (op1) == CONST_DOUBLE)
7254 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7256 /* We're going to try to implement comparions by performing
7257 a subtract, then comparing against zero. Unfortunately,
7258 Inf - Inf is NaN which is not zero, and so if we don't
7259 know that the the operand is finite and the comparison
7260 would treat EQ different to UNORDERED, we can't do it. */
7261 if (! flag_unsafe_math_optimizations
7262 && code != GT && code != UNGE
7263 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7264 /* Constructs of the form (a OP b ? a : b) are safe. */
7265 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7266 || (! rtx_equal_p (op0, true_cond)
7267 && ! rtx_equal_p (op1, true_cond))))
7269 /* At this point we know we can use fsel. */
7271 /* Reduce the comparison to a comparison against zero. */
7272 temp = gen_reg_rtx (mode);
7273 emit_insn (gen_rtx_SET (VOIDmode, temp,
7274 gen_rtx_MINUS (mode, op0, op1)));
7276 op1 = CONST0_RTX (mode);
7278 /* If we don't care about NaNs we can reduce some of the comparisons
7279 down to faster ones. */
7280 if (flag_unsafe_math_optimizations)
7286 true_cond = false_cond;
7299 /* Now, reduce everything down to a GE. */
7306 temp = gen_reg_rtx (mode);
7307 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7312 temp = gen_reg_rtx (mode);
7313 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
7318 temp = gen_reg_rtx (mode);
7319 emit_insn (gen_rtx_SET (VOIDmode, temp,
7321 gen_rtx_ABS (mode, op0))));
7326 temp = gen_reg_rtx (mode);
7327 emit_insn (gen_rtx_SET (VOIDmode, temp,
7328 gen_rtx_IF_THEN_ELSE (mode,
7329 gen_rtx_GE (VOIDmode,
7331 true_cond, false_cond)));
7333 true_cond = false_cond;
7335 temp = gen_reg_rtx (mode);
7336 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7341 temp = gen_reg_rtx (mode);
7342 emit_insn (gen_rtx_SET (VOIDmode, temp,
7343 gen_rtx_IF_THEN_ELSE (mode,
7344 gen_rtx_GE (VOIDmode,
7346 true_cond, false_cond)));
7348 false_cond = true_cond;
7350 temp = gen_reg_rtx (mode);
7351 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7359 emit_insn (gen_rtx_SET (VOIDmode, dest,
7360 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
7361 gen_rtx_GE (VOIDmode,
7363 true_cond, false_cond)));
7368 rs6000_emit_minmax (dest, code, op0, op1)
7374 enum machine_mode mode = GET_MODE (op0);
7376 if (code == SMAX || code == UMAX)
7377 target = emit_conditional_move (dest, GE, op0, op1, mode,
7380 target = emit_conditional_move (dest, GE, op0, op1, mode,
7382 if (target == NULL_RTX)
7385 emit_move_insn (dest, target);
7388 /* This page contains routines that are used to determine what the
7389 function prologue and epilogue code will do and write them out. */
7391 /* Return the first fixed-point register that is required to be
7392 saved. 32 if none. */
7395 first_reg_to_save ()
7399 /* Find lowest numbered live register. */
7400 for (first_reg = 13; first_reg <= 31; first_reg++)
7401 if (regs_ever_live[first_reg]
7402 && (! call_used_regs[first_reg]
7403 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7404 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7405 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7408 if (current_function_profile)
7410 /* AIX must save/restore every register that contains a parameter
7411 before/after the .__mcount call plus an additional register
7412 for the static chain, if needed; use registers from 30 down to 22
7414 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7416 int last_parm_reg, profile_first_reg;
7418 /* Figure out last used parameter register. The proper thing
7419 to do is to walk incoming args of the function. A function
7420 might have live parameter registers even if it has no
7422 for (last_parm_reg = 10;
7423 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7427 /* Calculate first reg for saving parameter registers
7429 Skip reg 31 which may contain the frame pointer. */
7430 profile_first_reg = (33 - last_parm_reg
7431 - (current_function_needs_context ? 1 : 0));
7433 /* Need to skip another reg to account for R31 being PICBASE
7434 (when flag_pic is set) or R30 being used as the frame
7435 pointer (when flag_pic is not set). */
7436 --profile_first_reg;
7438 /* Do not save frame pointer if no parameters needs to be saved. */
7439 if (profile_first_reg == 31)
7440 profile_first_reg = 32;
7442 if (first_reg > profile_first_reg)
7443 first_reg = profile_first_reg;
7446 /* SVR4 may need one register to preserve the static chain. */
7447 else if (current_function_needs_context)
7449 /* Skip reg 31 which may contain the frame pointer. */
7456 if (flag_pic && current_function_uses_pic_offset_table &&
7457 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7458 return RS6000_PIC_OFFSET_TABLE_REGNUM;
7464 /* Similar, for FP regs. */
7467 first_fp_reg_to_save ()
7471 /* Find lowest numbered live register. */
7472 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7473 if (regs_ever_live[first_reg])
7479 /* Similar, for AltiVec regs. */
7482 first_altivec_reg_to_save ()
7486 /* Stack frame remains as is unless we are in AltiVec ABI. */
7487 if (! TARGET_ALTIVEC_ABI)
7488 return LAST_ALTIVEC_REGNO + 1;
7490 /* Find lowest numbered live register. */
7491 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7492 if (regs_ever_live[i])
7498 /* Return a 32-bit mask of the AltiVec registers we need to set in
7499 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7500 the 32-bit word is 0. */
7503 compute_vrsave_mask ()
7505 unsigned int i, mask = 0;
7507 /* First, find out if we use _any_ altivec registers. */
7508 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7509 if (regs_ever_live[i])
7510 mask |= ALTIVEC_REG_BIT (i);
7515 /* Next, add all registers that are call-clobbered. We do this
7516 because post-reload register optimizers such as regrename_optimize
7517 may choose to use them. They never change the register class
7518 chosen by reload, so cannot create new uses of altivec registers
7519 if there were none before, so the early exit above is safe. */
7520 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7521 altivec registers not saved in the mask, which might well make the
7522 adjustments below more effective in eliding the save/restore of
7523 VRSAVE in small functions. */
7524 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7525 if (call_used_regs[i])
7526 mask |= ALTIVEC_REG_BIT (i);
7528 /* Next, remove the argument registers from the set. These must
7529 be in the VRSAVE mask set by the caller, so we don't need to add
7530 them in again. More importantly, the mask we compute here is
7531 used to generate CLOBBERs in the set_vrsave insn, and we do not
7532 wish the argument registers to die. */
7533 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7534 mask &= ~ALTIVEC_REG_BIT (i);
7536 /* Similarly, remove the return value from the set. */
7539 diddle_return_value (is_altivec_return_reg, &yes);
7541 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7548 is_altivec_return_reg (reg, xyes)
7552 bool *yes = (bool *) xyes;
7553 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7558 /* Calculate the stack information for the current function. This is
7559 complicated by having two separate calling sequences, the AIX calling
7560 sequence and the V.4 calling sequence.
7562 AIX (and Darwin/Mac OS X) stack frames look like:
7564 SP----> +---------------------------------------+
7565 | back chain to caller | 0 0
7566 +---------------------------------------+
7567 | saved CR | 4 8 (8-11)
7568 +---------------------------------------+
7570 +---------------------------------------+
7571 | reserved for compilers | 12 24
7572 +---------------------------------------+
7573 | reserved for binders | 16 32
7574 +---------------------------------------+
7575 | saved TOC pointer | 20 40
7576 +---------------------------------------+
7577 | Parameter save area (P) | 24 48
7578 +---------------------------------------+
7579 | Alloca space (A) | 24+P etc.
7580 +---------------------------------------+
7581 | Local variable space (L) | 24+P+A
7582 +---------------------------------------+
7583 | Float/int conversion temporary (X) | 24+P+A+L
7584 +---------------------------------------+
7585 | Save area for AltiVec registers (W) | 24+P+A+L+X
7586 +---------------------------------------+
7587 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7588 +---------------------------------------+
7589 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7590 +---------------------------------------+
7591 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7592 +---------------------------------------+
7593 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7594 +---------------------------------------+
7595 old SP->| back chain to caller's caller |
7596 +---------------------------------------+
7598 The required alignment for AIX configurations is two words (i.e., 8
7602 V.4 stack frames look like:
7604 SP----> +---------------------------------------+
7605 | back chain to caller | 0
7606 +---------------------------------------+
7607 | caller's saved LR | 4
7608 +---------------------------------------+
7609 | Parameter save area (P) | 8
7610 +---------------------------------------+
7611 | Alloca space (A) | 8+P
7612 +---------------------------------------+
7613 | Varargs save area (V) | 8+P+A
7614 +---------------------------------------+
7615 | Local variable space (L) | 8+P+A+V
7616 +---------------------------------------+
7617 | Float/int conversion temporary (X) | 8+P+A+V+L
7618 +---------------------------------------+
7619 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7620 +---------------------------------------+
7621 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7622 +---------------------------------------+
7623 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7624 +---------------------------------------+
7625 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7626 +---------------------------------------+
7627 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7628 +---------------------------------------+
7629 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7630 +---------------------------------------+
7631 old SP->| back chain to caller's caller |
7632 +---------------------------------------+
7634 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7635 given. (But note below and in sysv4.h that we require only 8 and
7636 may round up the size of our stack frame anyways. The historical
7637 reason is early versions of powerpc-linux which didn't properly
7638 align the stack at program startup. A happy side-effect is that
7639 -mno-eabi libraries can be used with -meabi programs.)
7641 The EABI configuration defaults to the V.4 layout, unless
7642 -mcall-aix is used, in which case the AIX layout is used. However,
7643 the stack alignment requirements may differ. If -mno-eabi is not
7644 given, the required stack alignment is 8 bytes; if -mno-eabi is
7645 given, the required alignment is 16 bytes. (But see V.4 comment
7648 #ifndef ABI_STACK_BOUNDARY
7649 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7653 rs6000_stack_info ()
7655 static rs6000_stack_t info, zero_info;
7656 rs6000_stack_t *info_ptr = &info;
7657 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7658 enum rs6000_abi abi;
7662 /* Zero all fields portably. */
7665 /* Select which calling sequence. */
7666 info_ptr->abi = abi = DEFAULT_ABI;
7668 /* Calculate which registers need to be saved & save area size. */
7669 info_ptr->first_gp_reg_save = first_reg_to_save ();
7670 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7671 even if it currently looks like we won't. */
7672 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7673 || (flag_pic == 1 && abi == ABI_V4)
7674 || (flag_pic && abi == ABI_DARWIN))
7675 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7676 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7678 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7680 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7681 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7683 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7684 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7685 - info_ptr->first_altivec_reg_save);
7687 /* Does this function call anything? */
7688 info_ptr->calls_p = (! current_function_is_leaf
7689 || cfun->machine->ra_needs_full_frame);
7691 /* Determine if we need to save the link register. */
7692 if (rs6000_ra_ever_killed ()
7693 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7694 #ifdef TARGET_RELOCATABLE
7695 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7697 || (info_ptr->first_fp_reg_save != 64
7698 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7699 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7700 || (abi == ABI_V4 && current_function_calls_alloca)
7701 || (DEFAULT_ABI == ABI_DARWIN
7703 && current_function_uses_pic_offset_table)
7704 || info_ptr->calls_p)
7706 info_ptr->lr_save_p = 1;
7707 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7710 /* Determine if we need to save the condition code registers. */
7711 if (regs_ever_live[CR2_REGNO]
7712 || regs_ever_live[CR3_REGNO]
7713 || regs_ever_live[CR4_REGNO])
7715 info_ptr->cr_save_p = 1;
7717 info_ptr->cr_size = reg_size;
7720 /* If the current function calls __builtin_eh_return, then we need
7721 to allocate stack space for registers that will hold data for
7722 the exception handler. */
7723 if (current_function_calls_eh_return)
7726 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7728 ehrd_size = i * UNITS_PER_WORD;
7733 /* Determine various sizes. */
7734 info_ptr->reg_size = reg_size;
7735 info_ptr->fixed_size = RS6000_SAVE_AREA;
7736 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7737 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7738 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7741 if (TARGET_ALTIVEC_ABI)
7743 info_ptr->vrsave_mask = compute_vrsave_mask ();
7744 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7748 info_ptr->vrsave_mask = 0;
7749 info_ptr->vrsave_size = 0;
7752 /* Calculate the offsets. */
7760 case ABI_AIX_NODESC:
7762 info_ptr->fp_save_offset = - info_ptr->fp_size;
7763 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7765 if (TARGET_ALTIVEC_ABI)
7767 info_ptr->vrsave_save_offset
7768 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7770 /* Align stack so vector save area is on a quadword boundary. */
7771 if (info_ptr->altivec_size != 0)
7772 info_ptr->altivec_padding_size
7773 = 16 - (-info_ptr->vrsave_save_offset % 16);
7775 info_ptr->altivec_padding_size = 0;
7777 info_ptr->altivec_save_offset
7778 = info_ptr->vrsave_save_offset
7779 - info_ptr->altivec_padding_size
7780 - info_ptr->altivec_size;
7782 /* Adjust for AltiVec case. */
7783 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7786 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7787 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7788 info_ptr->lr_save_offset = 2*reg_size;
7792 info_ptr->fp_save_offset = - info_ptr->fp_size;
7793 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7794 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7796 if (TARGET_ALTIVEC_ABI)
7798 info_ptr->vrsave_save_offset
7799 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7801 /* Align stack so vector save area is on a quadword boundary. */
7802 if (info_ptr->altivec_size != 0)
7803 info_ptr->altivec_padding_size
7804 = 16 - (-info_ptr->vrsave_save_offset % 16);
7806 info_ptr->altivec_padding_size = 0;
7808 info_ptr->altivec_save_offset
7809 = info_ptr->vrsave_save_offset
7810 - info_ptr->altivec_padding_size
7811 - info_ptr->altivec_size;
7813 /* Adjust for AltiVec case. */
7814 info_ptr->toc_save_offset
7815 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7818 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7819 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7820 info_ptr->lr_save_offset = reg_size;
7824 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7826 + info_ptr->altivec_size
7827 + info_ptr->altivec_padding_size
7828 + info_ptr->vrsave_size
7832 + info_ptr->vrsave_size
7833 + info_ptr->toc_size,
7834 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7837 total_raw_size = (info_ptr->vars_size
7838 + info_ptr->parm_size
7839 + info_ptr->save_size
7840 + info_ptr->varargs_size
7841 + info_ptr->fixed_size);
7843 info_ptr->total_size =
7844 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7846 /* Determine if we need to allocate any stack frame:
7848 For AIX we need to push the stack if a frame pointer is needed
7849 (because the stack might be dynamically adjusted), if we are
7850 debugging, if we make calls, or if the sum of fp_save, gp_save,
7851 and local variables are more than the space needed to save all
7852 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7853 + 18*8 = 288 (GPR13 reserved).
7855 For V.4 we don't have the stack cushion that AIX uses, but assume
7856 that the debugger can handle stackless frames. */
7858 if (info_ptr->calls_p)
7859 info_ptr->push_p = 1;
7861 else if (abi == ABI_V4)
7862 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7865 info_ptr->push_p = (frame_pointer_needed
7866 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7867 || ((total_raw_size - info_ptr->fixed_size)
7868 > (TARGET_32BIT ? 220 : 288)));
7870 /* Zero offsets if we're not saving those registers. */
7871 if (info_ptr->fp_size == 0)
7872 info_ptr->fp_save_offset = 0;
7874 if (info_ptr->gp_size == 0)
7875 info_ptr->gp_save_offset = 0;
7877 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7878 info_ptr->altivec_save_offset = 0;
7880 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7881 info_ptr->vrsave_save_offset = 0;
7883 if (! info_ptr->lr_save_p)
7884 info_ptr->lr_save_offset = 0;
7886 if (! info_ptr->cr_save_p)
7887 info_ptr->cr_save_offset = 0;
7889 if (! info_ptr->toc_save_p)
7890 info_ptr->toc_save_offset = 0;
7896 debug_stack_info (info)
7897 rs6000_stack_t *info;
7899 const char *abi_string;
7902 info = rs6000_stack_info ();
7904 fprintf (stderr, "\nStack information for function %s:\n",
7905 ((current_function_decl && DECL_NAME (current_function_decl))
7906 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7911 default: abi_string = "Unknown"; break;
7912 case ABI_NONE: abi_string = "NONE"; break;
7914 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7915 case ABI_DARWIN: abi_string = "Darwin"; break;
7916 case ABI_V4: abi_string = "V.4"; break;
7919 fprintf (stderr, "\tABI = %5s\n", abi_string);
7921 if (TARGET_ALTIVEC_ABI)
7922 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7924 if (info->first_gp_reg_save != 32)
7925 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7927 if (info->first_fp_reg_save != 64)
7928 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7930 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7931 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7932 info->first_altivec_reg_save);
7934 if (info->lr_save_p)
7935 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7937 if (info->cr_save_p)
7938 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7940 if (info->toc_save_p)
7941 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7943 if (info->vrsave_mask)
7944 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7947 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7950 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7952 if (info->gp_save_offset)
7953 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7955 if (info->fp_save_offset)
7956 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7958 if (info->altivec_save_offset)
7959 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7960 info->altivec_save_offset);
7962 if (info->vrsave_save_offset)
7963 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7964 info->vrsave_save_offset);
7966 if (info->lr_save_offset)
7967 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7969 if (info->cr_save_offset)
7970 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7972 if (info->toc_save_offset)
7973 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7975 if (info->varargs_save_offset)
7976 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7978 if (info->total_size)
7979 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7981 if (info->varargs_size)
7982 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7984 if (info->vars_size)
7985 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7987 if (info->parm_size)
7988 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7990 if (info->fixed_size)
7991 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7994 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7997 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7999 if (info->altivec_size)
8000 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
8002 if (info->vrsave_size)
8003 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
8005 if (info->altivec_padding_size)
8006 fprintf (stderr, "\taltivec_padding_size= %5d\n",
8007 info->altivec_padding_size);
8010 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
8013 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
8016 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
8018 if (info->save_size)
8019 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
8021 if (info->reg_size != 4)
8022 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
8024 fprintf (stderr, "\n");
8028 rs6000_return_addr (count, frame)
8032 /* Currently we don't optimize very well between prolog and body
8033 code and for PIC code the code can be actually quite bad, so
8034 don't try to be too clever here. */
8035 if (count != 0 || flag_pic != 0)
8037 cfun->machine->ra_needs_full_frame = 1;
8044 plus_constant (copy_to_reg
8045 (gen_rtx_MEM (Pmode,
8046 memory_address (Pmode, frame))),
8047 RETURN_ADDRESS_OFFSET)));
8050 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8054 rs6000_ra_ever_killed ()
8058 #ifdef ASM_OUTPUT_MI_THUNK
8059 if (current_function_is_thunk)
8062 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8063 || cfun->machine->ra_needs_full_frame)
8064 return regs_ever_live[LINK_REGISTER_REGNUM];
8066 push_topmost_sequence ();
8068 pop_topmost_sequence ();
8070 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8074 /* Add a REG_MAYBE_DEAD note to the insn. */
8076 rs6000_maybe_dead (insn)
8079 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8084 /* Emit instructions needed to load the TOC register.
8085 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8086 a constant pool; or for SVR4 -fpic. */
8089 rs6000_emit_load_toc_table (fromprolog)
8093 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8095 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8097 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8099 rtx temp = (fromprolog
8100 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8101 : gen_reg_rtx (Pmode));
8102 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8103 rs6000_maybe_dead (emit_move_insn (dest, temp));
8105 else if (flag_pic == 2)
8108 rtx tempLR = (fromprolog
8109 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8110 : gen_reg_rtx (Pmode));
8111 rtx temp0 = (fromprolog
8112 ? gen_rtx_REG (Pmode, 0)
8113 : gen_reg_rtx (Pmode));
8116 /* possibly create the toc section */
8117 if (! toc_initialized)
8120 function_section (current_function_decl);
8127 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8128 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8130 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8131 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8133 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8135 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8136 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8143 static int reload_toc_labelno = 0;
8145 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8147 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8148 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8150 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8153 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8154 rs6000_maybe_dead (emit_move_insn (temp0,
8155 gen_rtx_MEM (Pmode, dest)));
8157 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8159 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8161 /* This is for AIX code running in non-PIC ELF. */
8164 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8165 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8167 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8168 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8176 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8178 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8183 get_TOC_alias_set ()
8185 static int set = -1;
8187 set = new_alias_set ();
8191 /* This retuns nonzero if the current function uses the TOC. This is
8192 determined by the presence of (unspec ... 7), which is generated by
8193 the various load_toc_* patterns. */
8200 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8203 rtx pat = PATTERN (insn);
8206 if (GET_CODE (pat) == PARALLEL)
8207 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8208 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8209 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8216 create_TOC_reference (symbol)
8219 return gen_rtx_PLUS (Pmode,
8220 gen_rtx_REG (Pmode, TOC_REGISTER),
8221 gen_rtx_CONST (Pmode,
8222 gen_rtx_MINUS (Pmode, symbol,
8223 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8227 /* __throw will restore its own return address to be the same as the
8228 return address of the function that the throw is being made to.
8229 This is unfortunate, because we want to check the original
8230 return address to see if we need to restore the TOC.
8231 So we have to squirrel it away here.
8232 This is used only in compiling __throw and __rethrow.
8234 Most of this code should be removed by CSE. */
8235 static rtx insn_after_throw;
8237 /* This does the saving... */
8239 rs6000_aix_emit_builtin_unwind_init ()
8242 rtx stack_top = gen_reg_rtx (Pmode);
8243 rtx opcode_addr = gen_reg_rtx (Pmode);
8245 insn_after_throw = gen_reg_rtx (SImode);
8247 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8248 emit_move_insn (stack_top, mem);
8250 mem = gen_rtx_MEM (Pmode,
8251 gen_rtx_PLUS (Pmode, stack_top,
8252 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8253 emit_move_insn (opcode_addr, mem);
8254 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8257 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8258 in _eh.o). Only used on AIX.
8260 The idea is that on AIX, function calls look like this:
8261 bl somefunction-trampoline
8265 somefunction-trampoline:
8267 ... load function address in the count register ...
8269 or like this, if the linker determines that this is not a cross-module call
8270 and so the TOC need not be restored:
8273 or like this, if the compiler could determine that this is not a
8276 now, the tricky bit here is that register 2 is saved and restored
8277 by the _linker_, so we can't readily generate debugging information
8278 for it. So we need to go back up the call chain looking at the
8279 insns at return addresses to see which calls saved the TOC register
8280 and so see where it gets restored from.
8282 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8283 just before the actual epilogue.
8285 On the bright side, this incurs no space or time overhead unless an
8286 exception is thrown, except for the extra code in libgcc.a.
8288 The parameter STACKSIZE is a register containing (at runtime)
8289 the amount to be popped off the stack in addition to the stack frame
8290 of this routine (which will be __throw or __rethrow, and so is
8291 guaranteed to have a stack frame). */
8294 rs6000_emit_eh_toc_restore (stacksize)
8298 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8299 rtx tocompare = gen_reg_rtx (SImode);
8300 rtx opcode = gen_reg_rtx (SImode);
8301 rtx opcode_addr = gen_reg_rtx (Pmode);
8303 rtx loop_start = gen_label_rtx ();
8304 rtx no_toc_restore_needed = gen_label_rtx ();
8305 rtx loop_exit = gen_label_rtx ();
8307 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8308 set_mem_alias_set (mem, rs6000_sr_alias_set);
8309 emit_move_insn (bottom_of_stack, mem);
8311 top_of_stack = expand_binop (Pmode, add_optab,
8312 bottom_of_stack, stacksize,
8313 NULL_RTX, 1, OPTAB_WIDEN);
8315 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
8316 : 0xE8410028, SImode));
8318 if (insn_after_throw == NULL_RTX)
8320 emit_move_insn (opcode, insn_after_throw);
8322 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8323 emit_label (loop_start);
8325 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8326 SImode, NULL_RTX, NULL_RTX,
8327 no_toc_restore_needed);
8329 mem = gen_rtx_MEM (Pmode,
8330 gen_rtx_PLUS (Pmode, bottom_of_stack,
8331 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8332 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8334 emit_label (no_toc_restore_needed);
8335 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8336 Pmode, NULL_RTX, NULL_RTX,
8339 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8340 set_mem_alias_set (mem, rs6000_sr_alias_set);
8341 emit_move_insn (bottom_of_stack, mem);
8343 mem = gen_rtx_MEM (Pmode,
8344 gen_rtx_PLUS (Pmode, bottom_of_stack,
8345 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8346 emit_move_insn (opcode_addr, mem);
8347 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8349 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8350 emit_jump (loop_start);
8351 emit_note (NULL, NOTE_INSN_LOOP_END);
8352 emit_label (loop_exit);
8354 #endif /* TARGET_AIX */
8356 /* This ties together stack memory (MEM with an alias set of
8357 rs6000_sr_alias_set) and the change to the stack pointer. */
8360 rs6000_emit_stack_tie ()
8362 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8364 set_mem_alias_set (mem, rs6000_sr_alias_set);
8365 emit_insn (gen_stack_tie (mem));
8368 /* Emit the correct code for allocating stack space, as insns.
8369 If COPY_R12, make sure a copy of the old frame is left in r12.
8370 The generated code may use hard register 0 as a temporary. */
8373 rs6000_emit_allocate_stack (size, copy_r12)
8378 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8379 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8380 rtx todec = GEN_INT (-size);
8382 if (current_function_limit_stack)
8384 if (REG_P (stack_limit_rtx)
8385 && REGNO (stack_limit_rtx) > 1
8386 && REGNO (stack_limit_rtx) <= 31)
8388 emit_insn (Pmode == SImode
8389 ? gen_addsi3 (tmp_reg,
8392 : gen_adddi3 (tmp_reg,
8396 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8399 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8401 && DEFAULT_ABI == ABI_V4)
8403 rtx toload = gen_rtx_CONST (VOIDmode,
8404 gen_rtx_PLUS (Pmode,
8408 emit_insn (gen_elf_high (tmp_reg, toload));
8409 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8410 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8414 warning ("stack limit expression is not supported");
8417 if (copy_r12 || ! TARGET_UPDATE)
8418 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8424 /* Need a note here so that try_split doesn't get confused. */
8425 if (get_last_insn() == NULL_RTX)
8426 emit_note (0, NOTE_INSN_DELETED);
8427 insn = emit_move_insn (tmp_reg, todec);
8428 try_split (PATTERN (insn), insn, 0);
8432 if (Pmode == SImode)
8433 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8436 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8441 if (Pmode == SImode)
8442 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8444 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8445 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8446 gen_rtx_REG (Pmode, 12));
8449 RTX_FRAME_RELATED_P (insn) = 1;
8451 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8452 gen_rtx_SET (VOIDmode, stack_reg,
8453 gen_rtx_PLUS (Pmode, stack_reg,
8458 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8461 (mem (plus (blah) (regXX)))
8465 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8468 altivec_frame_fixup (insn, reg, val)
8474 real = copy_rtx (PATTERN (insn));
8476 real = replace_rtx (real, reg, GEN_INT (val));
8478 RTX_FRAME_RELATED_P (insn) = 1;
8479 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8484 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8485 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8486 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8487 deduce these equivalences by itself so it wasn't necessary to hold
8488 its hand so much. */
8491 rs6000_frame_related (insn, reg, val, reg2, rreg)
8500 real = copy_rtx (PATTERN (insn));
8502 real = replace_rtx (real, reg,
8503 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8504 STACK_POINTER_REGNUM),
8507 /* We expect that 'real' is either a SET or a PARALLEL containing
8508 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8509 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8511 if (GET_CODE (real) == SET)
8515 temp = simplify_rtx (SET_SRC (set));
8517 SET_SRC (set) = temp;
8518 temp = simplify_rtx (SET_DEST (set));
8520 SET_DEST (set) = temp;
8521 if (GET_CODE (SET_DEST (set)) == MEM)
8523 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8525 XEXP (SET_DEST (set), 0) = temp;
8528 else if (GET_CODE (real) == PARALLEL)
8531 for (i = 0; i < XVECLEN (real, 0); i++)
8532 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8534 rtx set = XVECEXP (real, 0, i);
8536 temp = simplify_rtx (SET_SRC (set));
8538 SET_SRC (set) = temp;
8539 temp = simplify_rtx (SET_DEST (set));
8541 SET_DEST (set) = temp;
8542 if (GET_CODE (SET_DEST (set)) == MEM)
8544 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8546 XEXP (SET_DEST (set), 0) = temp;
8548 RTX_FRAME_RELATED_P (set) = 1;
8554 if (reg2 != NULL_RTX)
8555 real = replace_rtx (real, reg2, rreg);
8557 RTX_FRAME_RELATED_P (insn) = 1;
8558 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8563 /* Returns an insn that has a vrsave set operation with the
8564 appropriate CLOBBERs. */
8567 generate_set_vrsave (reg, info, epiloguep)
8569 rs6000_stack_t *info;
8573 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8574 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8577 = gen_rtx_SET (VOIDmode,
8579 gen_rtx_UNSPEC_VOLATILE (SImode,
8580 gen_rtvec (2, reg, vrsave),
8585 /* We need to clobber the registers in the mask so the scheduler
8586 does not move sets to VRSAVE before sets of AltiVec registers.
8588 However, if the function receives nonlocal gotos, reload will set
8589 all call saved registers live. We will end up with:
8591 (set (reg 999) (mem))
8592 (parallel [ (set (reg vrsave) (unspec blah))
8593 (clobber (reg 999))])
8595 The clobber will cause the store into reg 999 to be dead, and
8596 flow will attempt to delete an epilogue insn. In this case, we
8597 need an unspec use/set of the register. */
8599 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8600 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8602 if (!epiloguep || call_used_regs [i])
8603 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8604 gen_rtx_REG (V4SImode, i));
8607 rtx reg = gen_rtx_REG (V4SImode, i);
8610 = gen_rtx_SET (VOIDmode,
8612 gen_rtx_UNSPEC (V4SImode,
8613 gen_rtvec (1, reg), 27));
8617 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8619 for (i = 0; i < nclobs; ++i)
8620 XVECEXP (insn, 0, i) = clobs[i];
8625 /* Emit function prologue as insns. */
8628 rs6000_emit_prologue ()
8630 rs6000_stack_t *info = rs6000_stack_info ();
8631 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8632 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8633 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8634 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8635 rtx frame_reg_rtx = sp_reg_rtx;
8636 rtx cr_save_rtx = NULL;
8638 int saving_FPRs_inline;
8639 int using_store_multiple;
8640 HOST_WIDE_INT sp_offset = 0;
8642 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8643 && info->first_gp_reg_save < 31);
8644 saving_FPRs_inline = (info->first_fp_reg_save == 64
8645 || FP_SAVE_INLINE (info->first_fp_reg_save));
8647 /* For V.4, update stack before we do any saving and set back pointer. */
8648 if (info->push_p && DEFAULT_ABI == ABI_V4)
8650 if (info->total_size < 32767)
8651 sp_offset = info->total_size;
8653 frame_reg_rtx = frame_ptr_rtx;
8654 rs6000_emit_allocate_stack (info->total_size,
8655 (frame_reg_rtx != sp_reg_rtx
8658 || info->first_fp_reg_save < 64
8659 || info->first_gp_reg_save < 32
8661 if (frame_reg_rtx != sp_reg_rtx)
8662 rs6000_emit_stack_tie ();
8665 /* Save AltiVec registers if needed. */
8666 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8670 /* There should be a non inline version of this, for when we
8671 are saving lots of vector registers. */
8672 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8673 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8675 rtx areg, savereg, mem;
8678 offset = info->altivec_save_offset + sp_offset
8679 + 16 * (i - info->first_altivec_reg_save);
8681 savereg = gen_rtx_REG (V4SImode, i);
8683 areg = gen_rtx_REG (Pmode, 0);
8684 emit_move_insn (areg, GEN_INT (offset));
8686 /* AltiVec addressing mode is [reg+reg]. */
8687 mem = gen_rtx_MEM (V4SImode,
8688 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8690 set_mem_alias_set (mem, rs6000_sr_alias_set);
8692 insn = emit_move_insn (mem, savereg);
8694 altivec_frame_fixup (insn, areg, offset);
8698 /* VRSAVE is a bit vector representing which AltiVec registers
8699 are used. The OS uses this to determine which vector
8700 registers to save on a context switch. We need to save
8701 VRSAVE on the stack frame, add whatever AltiVec registers we
8702 used in this function, and do the corresponding magic in the
8705 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8707 rtx reg, mem, vrsave;
8710 /* Get VRSAVE onto a GPR. */
8711 reg = gen_rtx_REG (SImode, 12);
8712 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8714 emit_insn (gen_get_vrsave_internal (reg));
8716 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8719 offset = info->vrsave_save_offset + sp_offset;
8721 = gen_rtx_MEM (SImode,
8722 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8723 set_mem_alias_set (mem, rs6000_sr_alias_set);
8724 insn = emit_move_insn (mem, reg);
8726 /* Include the registers in the mask. */
8727 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8729 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8732 /* If we use the link register, get it into r0. */
8733 if (info->lr_save_p)
8734 emit_move_insn (gen_rtx_REG (Pmode, 0),
8735 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8737 /* If we need to save CR, put it into r12. */
8738 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8740 cr_save_rtx = gen_rtx_REG (SImode, 12);
8741 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8744 /* Do any required saving of fpr's. If only one or two to save, do
8745 it ourselves. Otherwise, call function. */
8746 if (saving_FPRs_inline)
8749 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8750 if ((regs_ever_live[info->first_fp_reg_save+i]
8751 && ! call_used_regs[info->first_fp_reg_save+i]))
8754 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8755 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8756 GEN_INT (info->fp_save_offset
8759 mem = gen_rtx_MEM (DFmode, addr);
8760 set_mem_alias_set (mem, rs6000_sr_alias_set);
8762 insn = emit_move_insn (mem, reg);
8763 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8764 NULL_RTX, NULL_RTX);
8767 else if (info->first_fp_reg_save != 64)
8771 const char *alloc_rname;
8773 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8775 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8777 LINK_REGISTER_REGNUM));
8778 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8779 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8780 alloc_rname = ggc_strdup (rname);
8781 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8782 gen_rtx_SYMBOL_REF (Pmode,
8784 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8787 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8788 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8789 GEN_INT (info->fp_save_offset
8790 + sp_offset + 8*i));
8791 mem = gen_rtx_MEM (DFmode, addr);
8792 set_mem_alias_set (mem, rs6000_sr_alias_set);
8794 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8796 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8797 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8798 NULL_RTX, NULL_RTX);
8801 /* Save GPRs. This is done as a PARALLEL if we are using
8802 the store-multiple instructions. */
8803 if (using_store_multiple)
8807 p = rtvec_alloc (32 - info->first_gp_reg_save);
8808 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8809 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8812 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8813 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8814 GEN_INT (info->gp_save_offset
8817 mem = gen_rtx_MEM (reg_mode, addr);
8818 set_mem_alias_set (mem, rs6000_sr_alias_set);
8820 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8822 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8823 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8824 NULL_RTX, NULL_RTX);
8829 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8830 if ((regs_ever_live[info->first_gp_reg_save+i]
8831 && ! call_used_regs[info->first_gp_reg_save+i])
8832 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8833 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8834 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8837 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8838 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8839 GEN_INT (info->gp_save_offset
8842 mem = gen_rtx_MEM (reg_mode, addr);
8843 set_mem_alias_set (mem, rs6000_sr_alias_set);
8845 insn = emit_move_insn (mem, reg);
8846 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8847 NULL_RTX, NULL_RTX);
8851 /* ??? There's no need to emit actual instructions here, but it's the
8852 easiest way to get the frame unwind information emitted. */
8853 if (current_function_calls_eh_return)
8855 unsigned int i, regno;
8861 regno = EH_RETURN_DATA_REGNO (i);
8862 if (regno == INVALID_REGNUM)
8865 reg = gen_rtx_REG (reg_mode, regno);
8866 addr = plus_constant (frame_reg_rtx,
8867 info->ehrd_offset + sp_offset
8868 + reg_size * (int) i);
8869 mem = gen_rtx_MEM (reg_mode, addr);
8870 set_mem_alias_set (mem, rs6000_sr_alias_set);
8872 insn = emit_move_insn (mem, reg);
8873 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8874 NULL_RTX, NULL_RTX);
8878 /* Save lr if we used it. */
8879 if (info->lr_save_p)
8881 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8882 GEN_INT (info->lr_save_offset + sp_offset));
8883 rtx reg = gen_rtx_REG (Pmode, 0);
8884 rtx mem = gen_rtx_MEM (Pmode, addr);
8885 /* This should not be of rs6000_sr_alias_set, because of
8886 __builtin_return_address. */
8888 insn = emit_move_insn (mem, reg);
8889 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8890 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8893 /* Save CR if we use any that must be preserved. */
8894 if (info->cr_save_p)
8896 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8897 GEN_INT (info->cr_save_offset + sp_offset));
8898 rtx mem = gen_rtx_MEM (SImode, addr);
8900 set_mem_alias_set (mem, rs6000_sr_alias_set);
8902 /* If r12 was used to hold the original sp, copy cr into r0 now
8904 if (REGNO (frame_reg_rtx) == 12)
8906 cr_save_rtx = gen_rtx_REG (SImode, 0);
8907 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8909 insn = emit_move_insn (mem, cr_save_rtx);
8911 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8912 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8913 OK. All we have to do is specify that _one_ condition code
8914 register is saved in this stack slot. The thrower's epilogue
8915 will then restore all the call-saved registers.
8916 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8917 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8918 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8921 /* Update stack and set back pointer unless this is V.4,
8922 for which it was done previously. */
8923 if (info->push_p && DEFAULT_ABI != ABI_V4)
8924 rs6000_emit_allocate_stack (info->total_size, FALSE);
8926 /* Set frame pointer, if needed. */
8927 if (frame_pointer_needed)
8929 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8931 RTX_FRAME_RELATED_P (insn) = 1;
8934 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8935 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8936 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8937 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
8939 /* If emit_load_toc_table will use the link register, we need to save
8940 it. We use R11 for this purpose because emit_load_toc_table
8941 can use register 0. This allows us to use a plain 'blr' to return
8942 from the procedure more often. */
8943 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8944 && ! info->lr_save_p
8945 && EXIT_BLOCK_PTR->pred != NULL);
8946 if (save_LR_around_toc_setup)
8947 emit_move_insn (gen_rtx_REG (Pmode, 11),
8948 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8950 rs6000_emit_load_toc_table (TRUE);
8952 if (save_LR_around_toc_setup)
8953 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8954 gen_rtx_REG (Pmode, 11));
8957 if (DEFAULT_ABI == ABI_DARWIN
8958 && flag_pic && current_function_uses_pic_offset_table)
8960 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8962 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8965 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
8966 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8970 /* Write function prologue. */
8973 rs6000_output_function_prologue (file, size)
8975 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8977 rs6000_stack_t *info = rs6000_stack_info ();
8979 if (TARGET_DEBUG_STACK)
8980 debug_stack_info (info);
8982 /* Write .extern for any function we will call to save and restore
8984 if (info->first_fp_reg_save < 64
8985 && !FP_SAVE_INLINE (info->first_fp_reg_save))
8986 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8987 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8988 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8991 /* Write .extern for AIX common mode routines, if needed. */
8992 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8994 fputs ("\t.extern __mulh\n", file);
8995 fputs ("\t.extern __mull\n", file);
8996 fputs ("\t.extern __divss\n", file);
8997 fputs ("\t.extern __divus\n", file);
8998 fputs ("\t.extern __quoss\n", file);
8999 fputs ("\t.extern __quous\n", file);
9000 common_mode_defined = 1;
9003 if (! HAVE_prologue)
9007 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9008 the "toplevel" insn chain. */
9009 emit_note (0, NOTE_INSN_DELETED);
9010 rs6000_emit_prologue ();
9011 emit_note (0, NOTE_INSN_DELETED);
9013 if (TARGET_DEBUG_STACK)
9014 debug_rtx_list (get_insns (), 100);
9015 final (get_insns (), file, FALSE, FALSE);
9019 rs6000_pic_labelno++;
9022 /* Emit function epilogue as insns.
9024 At present, dwarf2out_frame_debug_expr doesn't understand
9025 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9026 anywhere in the epilogue. Most of the insns below would in any case
9027 need special notes to explain where r11 is in relation to the stack. */
9030 rs6000_emit_epilogue (sibcall)
9033 rs6000_stack_t *info;
9034 int restoring_FPRs_inline;
9035 int using_load_multiple;
9036 int using_mfcr_multiple;
9037 int use_backchain_to_restore_sp;
9039 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9040 rtx frame_reg_rtx = sp_reg_rtx;
9041 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9042 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9045 info = rs6000_stack_info ();
9046 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9047 && info->first_gp_reg_save < 31);
9048 restoring_FPRs_inline = (sibcall
9049 || current_function_calls_eh_return
9050 || info->first_fp_reg_save == 64
9051 || FP_SAVE_INLINE (info->first_fp_reg_save));
9052 use_backchain_to_restore_sp = (frame_pointer_needed
9053 || current_function_calls_alloca
9054 || info->total_size > 32767);
9055 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9056 || rs6000_cpu == PROCESSOR_PPC603
9057 || rs6000_cpu == PROCESSOR_PPC750
9060 /* If we have a frame pointer, a call to alloca, or a large stack
9061 frame, restore the old stack pointer using the backchain. Otherwise,
9062 we know what size to update it with. */
9063 if (use_backchain_to_restore_sp)
9065 /* Under V.4, don't reset the stack pointer until after we're done
9066 loading the saved registers. */
9067 if (DEFAULT_ABI == ABI_V4)
9068 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9070 emit_move_insn (frame_reg_rtx,
9071 gen_rtx_MEM (Pmode, sp_reg_rtx));
9074 else if (info->push_p)
9076 if (DEFAULT_ABI == ABI_V4)
9077 sp_offset = info->total_size;
9080 emit_insn (TARGET_32BIT
9081 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9082 GEN_INT (info->total_size))
9083 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9084 GEN_INT (info->total_size)));
9088 /* Restore AltiVec registers if needed. */
9089 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9093 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9094 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9096 rtx addr, areg, mem;
9098 areg = gen_rtx_REG (Pmode, 0);
9100 (areg, GEN_INT (info->altivec_save_offset
9102 + 16 * (i - info->first_altivec_reg_save)));
9104 /* AltiVec addressing mode is [reg+reg]. */
9105 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9106 mem = gen_rtx_MEM (V4SImode, addr);
9107 set_mem_alias_set (mem, rs6000_sr_alias_set);
9109 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9113 /* Restore VRSAVE if needed. */
9114 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9118 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9119 GEN_INT (info->vrsave_save_offset + sp_offset));
9120 mem = gen_rtx_MEM (SImode, addr);
9121 set_mem_alias_set (mem, rs6000_sr_alias_set);
9122 reg = gen_rtx_REG (SImode, 12);
9123 emit_move_insn (reg, mem);
9125 emit_insn (generate_set_vrsave (reg, info, 1));
9128 /* Get the old lr if we saved it. */
9129 if (info->lr_save_p)
9131 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9132 GEN_INT (info->lr_save_offset + sp_offset));
9133 rtx mem = gen_rtx_MEM (Pmode, addr);
9135 set_mem_alias_set (mem, rs6000_sr_alias_set);
9137 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9140 /* Get the old cr if we saved it. */
9141 if (info->cr_save_p)
9143 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9144 GEN_INT (info->cr_save_offset + sp_offset));
9145 rtx mem = gen_rtx_MEM (SImode, addr);
9147 set_mem_alias_set (mem, rs6000_sr_alias_set);
9149 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9152 /* Set LR here to try to overlap restores below. */
9153 if (info->lr_save_p)
9154 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9155 gen_rtx_REG (Pmode, 0));
9157 /* Load exception handler data registers, if needed. */
9158 if (current_function_calls_eh_return)
9160 unsigned int i, regno;
9166 regno = EH_RETURN_DATA_REGNO (i);
9167 if (regno == INVALID_REGNUM)
9170 addr = plus_constant (frame_reg_rtx,
9171 info->ehrd_offset + sp_offset
9172 + reg_size * (int) i);
9173 mem = gen_rtx_MEM (reg_mode, addr);
9174 set_mem_alias_set (mem, rs6000_sr_alias_set);
9176 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9180 /* Restore GPRs. This is done as a PARALLEL if we are using
9181 the load-multiple instructions. */
9182 if (using_load_multiple)
9185 p = rtvec_alloc (32 - info->first_gp_reg_save);
9186 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9188 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9189 GEN_INT (info->gp_save_offset
9192 rtx mem = gen_rtx_MEM (reg_mode, addr);
9194 set_mem_alias_set (mem, rs6000_sr_alias_set);
9197 gen_rtx_SET (VOIDmode,
9198 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9201 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9204 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9205 if ((regs_ever_live[info->first_gp_reg_save+i]
9206 && ! call_used_regs[info->first_gp_reg_save+i])
9207 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9208 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9209 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9211 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9212 GEN_INT (info->gp_save_offset
9215 rtx mem = gen_rtx_MEM (reg_mode, addr);
9217 set_mem_alias_set (mem, rs6000_sr_alias_set);
9219 emit_move_insn (gen_rtx_REG (reg_mode,
9220 info->first_gp_reg_save + i),
9224 /* Restore fpr's if we need to do it without calling a function. */
9225 if (restoring_FPRs_inline)
9226 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9227 if ((regs_ever_live[info->first_fp_reg_save+i]
9228 && ! call_used_regs[info->first_fp_reg_save+i]))
9231 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9232 GEN_INT (info->fp_save_offset
9235 mem = gen_rtx_MEM (DFmode, addr);
9236 set_mem_alias_set (mem, rs6000_sr_alias_set);
9238 emit_move_insn (gen_rtx_REG (DFmode,
9239 info->first_fp_reg_save + i),
9243 /* If we saved cr, restore it here. Just those that were used. */
9244 if (info->cr_save_p)
9246 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9249 if (using_mfcr_multiple)
9251 for (i = 0; i < 8; i++)
9252 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9258 if (using_mfcr_multiple && count > 1)
9263 p = rtvec_alloc (count);
9266 for (i = 0; i < 8; i++)
9267 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9269 rtvec r = rtvec_alloc (2);
9270 RTVEC_ELT (r, 0) = r12_rtx;
9271 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9272 RTVEC_ELT (p, ndx) =
9273 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9274 gen_rtx_UNSPEC (CCmode, r, 20));
9277 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9282 for (i = 0; i < 8; i++)
9283 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9285 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9291 /* If this is V.4, unwind the stack pointer after all of the loads
9292 have been done. We need to emit a block here so that sched
9293 doesn't decide to move the sp change before the register restores
9294 (which may not have any obvious dependency on the stack). This
9295 doesn't hurt performance, because there is no scheduling that can
9296 be done after this point. */
9297 if (DEFAULT_ABI == ABI_V4)
9299 if (frame_reg_rtx != sp_reg_rtx)
9300 rs6000_emit_stack_tie ();
9302 if (use_backchain_to_restore_sp)
9304 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9306 else if (sp_offset != 0)
9308 emit_insn (Pmode == SImode
9309 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9310 GEN_INT (sp_offset))
9311 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9312 GEN_INT (sp_offset)));
9316 if (current_function_calls_eh_return)
9318 rtx sa = EH_RETURN_STACKADJ_RTX;
9319 emit_insn (Pmode == SImode
9320 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9321 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9327 if (! restoring_FPRs_inline)
9328 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9330 p = rtvec_alloc (2);
9332 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9333 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9335 LINK_REGISTER_REGNUM));
9337 /* If we have to restore more than two FP registers, branch to the
9338 restore function. It will return to our caller. */
9339 if (! restoring_FPRs_inline)
9343 const char *alloc_rname;
9345 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9346 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9347 alloc_rname = ggc_strdup (rname);
9348 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9349 gen_rtx_SYMBOL_REF (Pmode,
9352 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9355 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9356 GEN_INT (info->fp_save_offset + 8*i));
9357 mem = gen_rtx_MEM (DFmode, addr);
9358 set_mem_alias_set (mem, rs6000_sr_alias_set);
9360 RTVEC_ELT (p, i+3) =
9361 gen_rtx_SET (VOIDmode,
9362 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9367 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9371 /* Write function epilogue. */
9374 rs6000_output_function_epilogue (file, size)
9376 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9378 rs6000_stack_t *info = rs6000_stack_info ();
9379 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9381 if (! HAVE_epilogue)
9383 rtx insn = get_last_insn ();
9384 /* If the last insn was a BARRIER, we don't have to write anything except
9386 if (GET_CODE (insn) == NOTE)
9387 insn = prev_nonnote_insn (insn);
9388 if (insn == 0 || GET_CODE (insn) != BARRIER)
9390 /* This is slightly ugly, but at least we don't have two
9391 copies of the epilogue-emitting code. */
9394 /* A NOTE_INSN_DELETED is supposed to be at the start
9395 and end of the "toplevel" insn chain. */
9396 emit_note (0, NOTE_INSN_DELETED);
9397 rs6000_emit_epilogue (FALSE);
9398 emit_note (0, NOTE_INSN_DELETED);
9400 if (TARGET_DEBUG_STACK)
9401 debug_rtx_list (get_insns (), 100);
9402 final (get_insns (), file, FALSE, FALSE);
9407 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9410 We don't output a traceback table if -finhibit-size-directive was
9411 used. The documentation for -finhibit-size-directive reads
9412 ``don't output a @code{.size} assembler directive, or anything
9413 else that would cause trouble if the function is split in the
9414 middle, and the two halves are placed at locations far apart in
9415 memory.'' The traceback table has this property, since it
9416 includes the offset from the start of the function to the
9417 traceback table itself.
9419 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9420 different traceback table. */
9421 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9423 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9424 const char *language_string = lang_hooks.name;
9425 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9428 while (*fname == '.') /* V.4 encodes . in the name */
9431 /* Need label immediately before tbtab, so we can compute its offset
9432 from the function start. */
9435 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9436 ASM_OUTPUT_LABEL (file, fname);
9438 /* The .tbtab pseudo-op can only be used for the first eight
9439 expressions, since it can't handle the possibly variable
9440 length fields that follow. However, if you omit the optional
9441 fields, the assembler outputs zeros for all optional fields
9442 anyways, giving each variable length field is minimum length
9443 (as defined in sys/debug.h). Thus we can not use the .tbtab
9444 pseudo-op at all. */
9446 /* An all-zero word flags the start of the tbtab, for debuggers
9447 that have to find it by searching forward from the entry
9448 point or from the current pc. */
9449 fputs ("\t.long 0\n", file);
9451 /* Tbtab format type. Use format type 0. */
9452 fputs ("\t.byte 0,", file);
9454 /* Language type. Unfortunately, there doesn't seem to be any
9455 official way to get this info, so we use language_string. C
9456 is 0. C++ is 9. No number defined for Obj-C, so use the
9457 value for C for now. There is no official value for Java,
9458 although IBM appears to be using 13. There is no official value
9459 for Chill, so we've chosen 44 pseudo-randomly. */
9460 if (! strcmp (language_string, "GNU C")
9461 || ! strcmp (language_string, "GNU Objective-C"))
9463 else if (! strcmp (language_string, "GNU F77"))
9465 else if (! strcmp (language_string, "GNU Ada"))
9467 else if (! strcmp (language_string, "GNU Pascal"))
9469 else if (! strcmp (language_string, "GNU C++"))
9471 else if (! strcmp (language_string, "GNU Java"))
9473 else if (! strcmp (language_string, "GNU CHILL"))
9477 fprintf (file, "%d,", i);
9479 /* 8 single bit fields: global linkage (not set for C extern linkage,
9480 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9481 from start of procedure stored in tbtab, internal function, function
9482 has controlled storage, function has no toc, function uses fp,
9483 function logs/aborts fp operations. */
9484 /* Assume that fp operations are used if any fp reg must be saved. */
9485 fprintf (file, "%d,",
9486 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9488 /* 6 bitfields: function is interrupt handler, name present in
9489 proc table, function calls alloca, on condition directives
9490 (controls stack walks, 3 bits), saves condition reg, saves
9492 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9493 set up as a frame pointer, even when there is no alloca call. */
9494 fprintf (file, "%d,",
9495 ((optional_tbtab << 6)
9496 | ((optional_tbtab & frame_pointer_needed) << 5)
9497 | (info->cr_save_p << 1)
9498 | (info->lr_save_p)));
9500 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9502 fprintf (file, "%d,",
9503 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9505 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9506 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9510 /* Compute the parameter info from the function decl argument
9513 int next_parm_info_bit = 31;
9515 for (decl = DECL_ARGUMENTS (current_function_decl);
9516 decl; decl = TREE_CHAIN (decl))
9518 rtx parameter = DECL_INCOMING_RTL (decl);
9519 enum machine_mode mode = GET_MODE (parameter);
9521 if (GET_CODE (parameter) == REG)
9523 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9531 else if (mode == DFmode)
9536 /* If only one bit will fit, don't or in this entry. */
9537 if (next_parm_info_bit > 0)
9538 parm_info |= (bits << (next_parm_info_bit - 1));
9539 next_parm_info_bit -= 2;
9543 fixed_parms += ((GET_MODE_SIZE (mode)
9544 + (UNITS_PER_WORD - 1))
9546 next_parm_info_bit -= 1;
9552 /* Number of fixed point parameters. */
9553 /* This is actually the number of words of fixed point parameters; thus
9554 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9555 fprintf (file, "%d,", fixed_parms);
9557 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9559 /* This is actually the number of fp registers that hold parameters;
9560 and thus the maximum value is 13. */
9561 /* Set parameters on stack bit if parameters are not in their original
9562 registers, regardless of whether they are on the stack? Xlc
9563 seems to set the bit when not optimizing. */
9564 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9566 if (! optional_tbtab)
9569 /* Optional fields follow. Some are variable length. */
9571 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9573 /* There is an entry for each parameter in a register, in the order that
9574 they occur in the parameter list. Any intervening arguments on the
9575 stack are ignored. If the list overflows a long (max possible length
9576 34 bits) then completely leave off all elements that don't fit. */
9577 /* Only emit this long if there was at least one parameter. */
9578 if (fixed_parms || float_parms)
9579 fprintf (file, "\t.long %d\n", parm_info);
9581 /* Offset from start of code to tb table. */
9582 fputs ("\t.long ", file);
9583 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9585 RS6000_OUTPUT_BASENAME (file, fname);
9587 assemble_name (file, fname);
9591 RS6000_OUTPUT_BASENAME (file, fname);
9593 assemble_name (file, fname);
9597 /* Interrupt handler mask. */
9598 /* Omit this long, since we never set the interrupt handler bit
9601 /* Number of CTL (controlled storage) anchors. */
9602 /* Omit this long, since the has_ctl bit is never set above. */
9604 /* Displacement into stack of each CTL anchor. */
9605 /* Omit this list of longs, because there are no CTL anchors. */
9607 /* Length of function name. */
9608 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9610 /* Function name. */
9611 assemble_string (fname, strlen (fname));
9613 /* Register for alloca automatic storage; this is always reg 31.
9614 Only emit this if the alloca bit was set above. */
9615 if (frame_pointer_needed)
9616 fputs ("\t.byte 31\n", file);
9618 fputs ("\t.align 2\n", file);
9623 /* A C compound statement that outputs the assembler code for a thunk
9624 function, used to implement C++ virtual function calls with
9625 multiple inheritance. The thunk acts as a wrapper around a virtual
9626 function, adjusting the implicit object parameter before handing
9627 control off to the real function.
9629 First, emit code to add the integer DELTA to the location that
9630 contains the incoming first argument. Assume that this argument
9631 contains a pointer, and is the one used to pass the `this' pointer
9632 in C++. This is the incoming argument *before* the function
9633 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9634 values of all other incoming arguments.
9636 After the addition, emit code to jump to FUNCTION, which is a
9637 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9638 not touch the return address. Hence returning from FUNCTION will
9639 return to whoever called the current `thunk'.
9641 The effect must be as if FUNCTION had been called directly with the
9642 adjusted first argument. This macro is responsible for emitting
9643 all of the code for a thunk function; output_function_prologue()
9644 and output_function_epilogue() are not invoked.
9646 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9647 been extracted from it.) It might possibly be useful on some
9648 targets, but probably not.
9650 If you do not define this macro, the target-independent code in the
9651 C++ frontend will generate a less efficient heavyweight thunk that
9652 calls FUNCTION instead of jumping to it. The generic approach does
9653 not support varargs. */
9656 output_mi_thunk (file, thunk_fndecl, delta, function)
9658 tree thunk_fndecl ATTRIBUTE_UNUSED;
9662 const char *this_reg =
9663 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9666 const char *r0 = reg_names[0];
9667 const char *toc = reg_names[2];
9668 const char *schain = reg_names[11];
9669 const char *r12 = reg_names[12];
9671 static int labelno = 0;
9673 /* Small constants that can be done by one add instruction. */
9674 if (delta >= -32768 && delta <= 32767)
9676 if (! TARGET_NEW_MNEMONICS)
9677 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9679 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9682 /* Large constants that can be done by one addis instruction. */
9683 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9684 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9687 /* 32-bit constants that can be done by an add and addis instruction. */
9688 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9690 /* Break into two pieces, propagating the sign bit from the low
9691 word to the upper word. */
9692 int delta_high = delta >> 16;
9693 int delta_low = delta & 0xffff;
9694 if ((delta_low & 0x8000) != 0)
9697 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9700 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9703 if (! TARGET_NEW_MNEMONICS)
9704 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9706 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9709 /* 64-bit constants, fixme */
9713 /* Get the prefix in front of the names. */
9714 switch (DEFAULT_ABI)
9724 case ABI_AIX_NODESC:
9729 /* If the function is compiled in this module, jump to it directly.
9730 Otherwise, load up its address and jump to it. */
9732 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9734 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9735 && ! lookup_attribute ("longcall",
9736 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9738 fprintf (file, "\tb %s", prefix);
9739 assemble_name (file, fname);
9740 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9746 switch (DEFAULT_ABI)
9752 /* Set up a TOC entry for the function. */
9753 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9755 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9758 if (TARGET_MINIMAL_TOC)
9759 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9762 fputs ("\t.tc ", file);
9763 assemble_name (file, fname);
9764 fputs ("[TC],", file);
9766 assemble_name (file, fname);
9769 if (TARGET_MINIMAL_TOC)
9770 asm_fprintf (file, (TARGET_32BIT)
9771 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9772 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9773 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9774 assemble_name (file, buf);
9775 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9776 fputs ("-(.LCTOC1)", file);
9777 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9779 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9783 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9786 asm_fprintf (file, "\tmtctr %s\n", r0);
9788 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9791 asm_fprintf (file, "\tbctr\n");
9794 case ABI_AIX_NODESC:
9796 fprintf (file, "\tb %s", prefix);
9797 assemble_name (file, fname);
9798 if (flag_pic) fputs ("@plt", file);
9804 fprintf (file, "\tb %s", prefix);
9805 if (flag_pic && !machopic_name_defined_p (fname))
9806 assemble_name (file, machopic_stub_name (fname));
9808 assemble_name (file, fname);
9817 /* A quick summary of the various types of 'constant-pool tables'
9820 Target Flags Name One table per
9821 AIX (none) AIX TOC object file
9822 AIX -mfull-toc AIX TOC object file
9823 AIX -mminimal-toc AIX minimal TOC translation unit
9824 SVR4/EABI (none) SVR4 SDATA object file
9825 SVR4/EABI -fpic SVR4 pic object file
9826 SVR4/EABI -fPIC SVR4 PIC translation unit
9827 SVR4/EABI -mrelocatable EABI TOC function
9828 SVR4/EABI -maix AIX TOC object file
9829 SVR4/EABI -maix -mminimal-toc
9830 AIX minimal TOC translation unit
9832 Name Reg. Set by entries contains:
9833 made by addrs? fp? sum?
9835 AIX TOC 2 crt0 as Y option option
9836 AIX minimal TOC 30 prolog gcc Y Y option
9837 SVR4 SDATA 13 crt0 gcc N Y N
9838 SVR4 pic 30 prolog ld Y not yet N
9839 SVR4 PIC 30 prolog gcc Y option option
9840 EABI TOC 30 prolog gcc Y option option
9844 /* Hash table stuff for keeping track of TOC entries. */
9846 struct toc_hash_struct
9848 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9849 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9851 enum machine_mode key_mode;
9855 static htab_t toc_hash_table;
9857 /* Hash functions for the hash table. */
9860 rs6000_hash_constant (k)
9863 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9864 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9865 int flen = strlen (format);
9868 if (GET_CODE (k) == LABEL_REF)
9869 return result * 1231 + X0INT (XEXP (k, 0), 3);
9871 if (GET_CODE (k) == CONST_DOUBLE)
9873 else if (GET_CODE (k) == CODE_LABEL)
9878 for (; fidx < flen; fidx++)
9879 switch (format[fidx])
9884 const char *str = XSTR (k, fidx);
9886 result = result * 613 + len;
9887 for (i = 0; i < len; i++)
9888 result = result * 613 + (unsigned) str[i];
9893 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9897 result = result * 613 + (unsigned) XINT (k, fidx);
9900 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9901 result = result * 613 + (unsigned) XWINT (k, fidx);
9905 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9906 result = result * 613 + (unsigned) (XWINT (k, fidx)
9917 toc_hash_function (hash_entry)
9918 const void * hash_entry;
9920 const struct toc_hash_struct *thc =
9921 (const struct toc_hash_struct *) hash_entry;
9922 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9925 /* Compare H1 and H2 for equivalence. */
9928 toc_hash_eq (h1, h2)
9932 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9933 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9935 if (((const struct toc_hash_struct *) h1)->key_mode
9936 != ((const struct toc_hash_struct *) h2)->key_mode)
9939 /* Gotcha: One of these const_doubles will be in memory.
9940 The other may be on the constant-pool chain.
9941 So rtx_equal_p will think they are different... */
9944 if (GET_CODE (r1) != GET_CODE (r2)
9945 || GET_MODE (r1) != GET_MODE (r2))
9947 if (GET_CODE (r1) == CONST_DOUBLE)
9949 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9951 for (i = 1; i < format_len; i++)
9952 if (XWINT (r1, i) != XWINT (r2, i))
9957 else if (GET_CODE (r1) == LABEL_REF)
9958 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9959 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9961 return rtx_equal_p (r1, r2);
9964 /* Mark the hash table-entry HASH_ENTRY. */
9967 toc_hash_mark_entry (hash_slot, unused)
9969 void * unused ATTRIBUTE_UNUSED;
9971 const struct toc_hash_struct * hash_entry =
9972 *(const struct toc_hash_struct **) hash_slot;
9973 rtx r = hash_entry->key;
9974 ggc_set_mark (hash_entry);
9975 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
9976 if (GET_CODE (r) == LABEL_REF)
9979 ggc_set_mark (XEXP (r, 0));
9986 /* Mark all the elements of the TOC hash-table *HT. */
9989 toc_hash_mark_table (vht)
9994 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9997 /* These are the names given by the C++ front-end to vtables, and
9998 vtable-like objects. Ideally, this logic should not be here;
9999 instead, there should be some programmatic way of inquiring as
10000 to whether or not an object is a vtable. */
10002 #define VTABLE_NAME_P(NAME) \
10003 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10004 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10005 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10006 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10009 rs6000_output_symbol_ref (file, x)
10013 /* Currently C++ toc references to vtables can be emitted before it
10014 is decided whether the vtable is public or private. If this is
10015 the case, then the linker will eventually complain that there is
10016 a reference to an unknown section. Thus, for vtables only,
10017 we emit the TOC reference to reference the symbol and not the
10019 const char *name = XSTR (x, 0);
10021 if (VTABLE_NAME_P (name))
10023 RS6000_OUTPUT_BASENAME (file, name);
10026 assemble_name (file, name);
10029 /* Output a TOC entry. We derive the entry name from what is being
10033 output_toc (file, x, labelno, mode)
10037 enum machine_mode mode;
10040 const char *name = buf;
10041 const char *real_name;
10048 /* When the linker won't eliminate them, don't output duplicate
10049 TOC entries (this happens on AIX if there is any kind of TOC,
10050 and on SVR4 under -fPIC or -mrelocatable). */
10053 struct toc_hash_struct *h;
10056 h = ggc_alloc (sizeof (*h));
10058 h->key_mode = mode;
10059 h->labelno = labelno;
10061 found = htab_find_slot (toc_hash_table, h, 1);
10062 if (*found == NULL)
10064 else /* This is indeed a duplicate.
10065 Set this label equal to that label. */
10067 fputs ("\t.set ", file);
10068 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10069 fprintf (file, "%d,", labelno);
10070 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10071 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10077 /* If we're going to put a double constant in the TOC, make sure it's
10078 aligned properly when strict alignment is on. */
10079 if (GET_CODE (x) == CONST_DOUBLE
10080 && STRICT_ALIGNMENT
10081 && GET_MODE_BITSIZE (mode) >= 64
10082 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10083 ASM_OUTPUT_ALIGN (file, 3);
10086 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10088 /* Handle FP constants specially. Note that if we have a minimal
10089 TOC, things we put here aren't actually in the TOC, so we can allow
10091 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10093 REAL_VALUE_TYPE rv;
10096 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10097 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10101 if (TARGET_MINIMAL_TOC)
10102 fputs (DOUBLE_INT_ASM_OP, file);
10104 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10105 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
10110 if (TARGET_MINIMAL_TOC)
10111 fputs ("\t.long ", file);
10113 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10114 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
10118 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10120 REAL_VALUE_TYPE rv;
10123 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10124 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10128 if (TARGET_MINIMAL_TOC)
10129 fputs (DOUBLE_INT_ASM_OP, file);
10131 fprintf (file, "\t.tc FS_%lx[TC],", l);
10132 fprintf (file, "0x%lx00000000\n", l);
10137 if (TARGET_MINIMAL_TOC)
10138 fputs ("\t.long ", file);
10140 fprintf (file, "\t.tc FS_%lx[TC],", l);
10141 fprintf (file, "0x%lx\n", l);
10145 else if (GET_MODE (x) == VOIDmode
10146 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10148 unsigned HOST_WIDE_INT low;
10149 HOST_WIDE_INT high;
10151 if (GET_CODE (x) == CONST_DOUBLE)
10153 low = CONST_DOUBLE_LOW (x);
10154 high = CONST_DOUBLE_HIGH (x);
10157 #if HOST_BITS_PER_WIDE_INT == 32
10160 high = (low & 0x80000000) ? ~0 : 0;
10164 low = INTVAL (x) & 0xffffffff;
10165 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10169 /* TOC entries are always Pmode-sized, but since this
10170 is a bigendian machine then if we're putting smaller
10171 integer constants in the TOC we have to pad them.
10172 (This is still a win over putting the constants in
10173 a separate constant pool, because then we'd have
10174 to have both a TOC entry _and_ the actual constant.)
10176 For a 32-bit target, CONST_INT values are loaded and shifted
10177 entirely within `low' and can be stored in one TOC entry. */
10179 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10180 abort ();/* It would be easy to make this work, but it doesn't now. */
10182 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10183 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10184 POINTER_SIZE, &low, &high, 0);
10188 if (TARGET_MINIMAL_TOC)
10189 fputs (DOUBLE_INT_ASM_OP, file);
10191 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long) high, (long) low);
10192 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10197 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10199 if (TARGET_MINIMAL_TOC)
10200 fputs ("\t.long ", file);
10202 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10203 (long) high, (long) low);
10204 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10208 if (TARGET_MINIMAL_TOC)
10209 fputs ("\t.long ", file);
10211 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10212 fprintf (file, "0x%lx\n", (long) low);
10218 if (GET_CODE (x) == CONST)
10220 if (GET_CODE (XEXP (x, 0)) != PLUS)
10223 base = XEXP (XEXP (x, 0), 0);
10224 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10227 if (GET_CODE (base) == SYMBOL_REF)
10228 name = XSTR (base, 0);
10229 else if (GET_CODE (base) == LABEL_REF)
10230 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10231 else if (GET_CODE (base) == CODE_LABEL)
10232 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10236 STRIP_NAME_ENCODING (real_name, name);
10237 if (TARGET_MINIMAL_TOC)
10238 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10241 fprintf (file, "\t.tc %s", real_name);
10244 fprintf (file, ".N%d", - offset);
10246 fprintf (file, ".P%d", offset);
10248 fputs ("[TC],", file);
10251 /* Currently C++ toc references to vtables can be emitted before it
10252 is decided whether the vtable is public or private. If this is
10253 the case, then the linker will eventually complain that there is
10254 a TOC reference to an unknown section. Thus, for vtables only,
10255 we emit the TOC reference to reference the symbol and not the
10257 if (VTABLE_NAME_P (name))
10259 RS6000_OUTPUT_BASENAME (file, name);
10261 fprintf (file, "%d", offset);
10262 else if (offset > 0)
10263 fprintf (file, "+%d", offset);
10266 output_addr_const (file, x);
10270 /* Output an assembler pseudo-op to write an ASCII string of N characters
10271 starting at P to FILE.
10273 On the RS/6000, we have to do this using the .byte operation and
10274 write out special characters outside the quoted string.
10275 Also, the assembler is broken; very long strings are truncated,
10276 so we must artificially break them up early. */
10279 output_ascii (file, p, n)
10285 int i, count_string;
10286 const char *for_string = "\t.byte \"";
10287 const char *for_decimal = "\t.byte ";
10288 const char *to_close = NULL;
10291 for (i = 0; i < n; i++)
10294 if (c >= ' ' && c < 0177)
10297 fputs (for_string, file);
10300 /* Write two quotes to get one. */
10308 for_decimal = "\"\n\t.byte ";
10312 if (count_string >= 512)
10314 fputs (to_close, file);
10316 for_string = "\t.byte \"";
10317 for_decimal = "\t.byte ";
10325 fputs (for_decimal, file);
10326 fprintf (file, "%d", c);
10328 for_string = "\n\t.byte \"";
10329 for_decimal = ", ";
10335 /* Now close the string if we have written one. Then end the line. */
10337 fputs (to_close, file);
10340 /* Generate a unique section name for FILENAME for a section type
10341 represented by SECTION_DESC. Output goes into BUF.
10343 SECTION_DESC can be any string, as long as it is different for each
10344 possible section type.
10346 We name the section in the same manner as xlc. The name begins with an
10347 underscore followed by the filename (after stripping any leading directory
10348 names) with the last period replaced by the string SECTION_DESC. If
10349 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10353 rs6000_gen_section_name (buf, filename, section_desc)
10355 const char *filename;
10356 const char *section_desc;
10358 const char *q, *after_last_slash, *last_period = 0;
10362 after_last_slash = filename;
10363 for (q = filename; *q; q++)
10366 after_last_slash = q + 1;
10367 else if (*q == '.')
10371 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10372 *buf = (char *) permalloc (len);
10377 for (q = after_last_slash; *q; q++)
10379 if (q == last_period)
10381 strcpy (p, section_desc);
10382 p += strlen (section_desc);
10385 else if (ISALNUM (*q))
10389 if (last_period == 0)
10390 strcpy (p, section_desc);
10395 /* Emit profile function. */
10398 output_profile_hook (labelno)
10401 if (DEFAULT_ABI == ABI_AIX)
10404 const char *label_name;
10409 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10410 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10411 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10413 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10416 else if (DEFAULT_ABI == ABI_DARWIN)
10418 const char *mcount_name = RS6000_MCOUNT;
10419 int caller_addr_regno = LINK_REGISTER_REGNUM;
10421 /* Be conservative and always set this, at least for now. */
10422 current_function_uses_pic_offset_table = 1;
10425 /* For PIC code, set up a stub and collect the caller's address
10426 from r0, which is where the prologue puts it. */
10429 mcount_name = machopic_stub_name (mcount_name);
10430 if (current_function_uses_pic_offset_table)
10431 caller_addr_regno = 0;
10434 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10436 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10440 /* Write function profiler code. */
10443 output_function_profiler (file, labelno)
10449 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10450 switch (DEFAULT_ABI)
10456 case ABI_AIX_NODESC:
10457 fprintf (file, "\tmflr %s\n", reg_names[0]);
10460 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10461 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10462 reg_names[0], reg_names[1]);
10463 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10464 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10465 assemble_name (file, buf);
10466 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10468 else if (flag_pic > 1)
10470 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10471 reg_names[0], reg_names[1]);
10472 /* Now, we need to get the address of the label. */
10473 fputs ("\tbl 1f\n\t.long ", file);
10474 assemble_name (file, buf);
10475 fputs ("-.\n1:", file);
10476 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10477 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10478 reg_names[0], reg_names[11]);
10479 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10480 reg_names[0], reg_names[0], reg_names[11]);
10484 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10485 assemble_name (file, buf);
10486 fputs ("@ha\n", file);
10487 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10488 reg_names[0], reg_names[1]);
10489 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10490 assemble_name (file, buf);
10491 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10494 if (current_function_needs_context)
10495 asm_fprintf (file, "\tmr %s,%s\n",
10496 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10497 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10498 if (current_function_needs_context)
10499 asm_fprintf (file, "\tmr %s,%s\n",
10500 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10505 /* Don't do anything, done in output_profile_hook (). */
10511 /* Adjust the cost of a scheduling dependency. Return the new cost of
10512 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10515 rs6000_adjust_cost (insn, link, dep_insn, cost)
10518 rtx dep_insn ATTRIBUTE_UNUSED;
10521 if (! recog_memoized (insn))
10524 if (REG_NOTE_KIND (link) != 0)
10527 if (REG_NOTE_KIND (link) == 0)
10529 /* Data dependency; DEP_INSN writes a register that INSN reads
10530 some cycles later. */
10531 switch (get_attr_type (insn))
10534 /* Tell the first scheduling pass about the latency between
10535 a mtctr and bctr (and mtlr and br/blr). The first
10536 scheduling pass will not know about this latency since
10537 the mtctr instruction, which has the latency associated
10538 to it, will be generated by reload. */
10539 return TARGET_POWER ? 5 : 4;
10541 /* Leave some extra cycles between a compare and its
10542 dependent branch, to inhibit expensive mispredicts. */
10543 if ((rs6000_cpu_attr == CPU_PPC750
10544 || rs6000_cpu_attr == CPU_PPC7400
10545 || rs6000_cpu_attr == CPU_PPC7450)
10546 && recog_memoized (dep_insn)
10547 && (INSN_CODE (dep_insn) >= 0)
10548 && (get_attr_type (dep_insn) == TYPE_COMPARE
10549 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10550 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10551 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10556 /* Fall out to return default cost. */
10562 /* A C statement (sans semicolon) to update the integer scheduling
10563 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10564 INSN earlier, increase the priority to execute INSN later. Do not
10565 define this macro if you do not need to adjust the scheduling
10566 priorities of insns. */
10569 rs6000_adjust_priority (insn, priority)
10570 rtx insn ATTRIBUTE_UNUSED;
10573 /* On machines (like the 750) which have asymmetric integer units,
10574 where one integer unit can do multiply and divides and the other
10575 can't, reduce the priority of multiply/divide so it is scheduled
10576 before other integer operations. */
10579 if (! INSN_P (insn))
10582 if (GET_CODE (PATTERN (insn)) == USE)
10585 switch (rs6000_cpu_attr) {
10587 switch (get_attr_type (insn))
10594 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10595 priority, priority);
10596 if (priority >= 0 && priority < 0x01000000)
10606 /* Return how many instructions the machine can issue per cycle. */
10609 rs6000_issue_rate ()
10611 switch (rs6000_cpu_attr) {
10612 case CPU_RIOS1: /* ? */
10614 case CPU_PPC601: /* ? */
10633 /* Length in units of the trampoline for entering a nested function. */
10636 rs6000_trampoline_size ()
10640 switch (DEFAULT_ABI)
10646 ret = (TARGET_32BIT) ? 12 : 24;
10651 case ABI_AIX_NODESC:
10652 ret = (TARGET_32BIT) ? 40 : 48;
10659 /* Emit RTL insns to initialize the variable parts of a trampoline.
10660 FNADDR is an RTX for the address of the function's pure code.
10661 CXT is an RTX for the static chain value for the function. */
10664 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10669 enum machine_mode pmode = Pmode;
10670 int regsize = (TARGET_32BIT) ? 4 : 8;
10671 rtx ctx_reg = force_reg (pmode, cxt);
10673 switch (DEFAULT_ABI)
10678 /* Macros to shorten the code expansions below. */
10679 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10680 #define MEM_PLUS(addr,offset) \
10681 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10683 /* Under AIX, just build the 3 word function descriptor */
10686 rtx fn_reg = gen_reg_rtx (pmode);
10687 rtx toc_reg = gen_reg_rtx (pmode);
10688 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10689 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10690 emit_move_insn (MEM_DEREF (addr), fn_reg);
10691 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10692 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10696 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10699 case ABI_AIX_NODESC:
10700 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10701 FALSE, VOIDmode, 4,
10703 GEN_INT (rs6000_trampoline_size ()), SImode,
10713 /* Table of valid machine attributes. */
10715 const struct attribute_spec rs6000_attribute_table[] =
10717 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10718 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10719 { NULL, 0, 0, false, false, false, NULL }
10722 /* Handle a "longcall" attribute; arguments as in struct
10723 attribute_spec.handler. */
10726 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10729 tree args ATTRIBUTE_UNUSED;
10730 int flags ATTRIBUTE_UNUSED;
10731 bool *no_add_attrs;
10733 if (TREE_CODE (*node) != FUNCTION_TYPE
10734 && TREE_CODE (*node) != FIELD_DECL
10735 && TREE_CODE (*node) != TYPE_DECL)
10737 warning ("`%s' attribute only applies to functions",
10738 IDENTIFIER_POINTER (name));
10739 *no_add_attrs = true;
10745 /* Return a reference suitable for calling a function with the
10746 longcall attribute. */
10749 rs6000_longcall_ref (call_ref)
10752 const char *call_name;
10755 if (GET_CODE (call_ref) != SYMBOL_REF)
10758 /* System V adds '.' to the internal name, so skip them. */
10759 call_name = XSTR (call_ref, 0);
10760 if (*call_name == '.')
10762 while (*call_name == '.')
10765 node = get_identifier (call_name);
10766 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10769 return force_reg (Pmode, call_ref);
10773 /* A C statement or statements to switch to the appropriate section
10774 for output of RTX in mode MODE. You can assume that RTX is some
10775 kind of constant in RTL. The argument MODE is redundant except in
10776 the case of a `const_int' rtx. Select the section by calling
10777 `text_section' or one of the alternatives for other sections.
10779 Do not define this macro if you put all constants in the read-only
10782 #ifdef USING_ELFOS_H
10785 rs6000_select_rtx_section (mode, x)
10786 enum machine_mode mode;
10789 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10792 && (GET_CODE (x) == SYMBOL_REF
10793 || GET_CODE (x) == LABEL_REF
10794 || GET_CODE (x) == CONST))
10800 /* A C statement or statements to switch to the appropriate
10801 section for output of DECL. DECL is either a `VAR_DECL' node
10802 or a constant of some sort. RELOC indicates whether forming
10803 the initial value of DECL requires link-time relocations. */
10806 rs6000_select_section (decl, reloc)
10810 int size = int_size_in_bytes (TREE_TYPE (decl));
10813 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10820 needs_sdata = (size > 0
10821 && size <= g_switch_value
10822 && rs6000_sdata != SDATA_NONE
10823 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10825 if (TREE_CODE (decl) == STRING_CST)
10826 readonly = ! flag_writable_strings;
10827 else if (TREE_CODE (decl) == VAR_DECL)
10828 readonly = (! (flag_pic && reloc)
10829 && TREE_READONLY (decl)
10830 && ! TREE_SIDE_EFFECTS (decl)
10831 && DECL_INITIAL (decl)
10832 && DECL_INITIAL (decl) != error_mark_node
10833 && TREE_CONSTANT (DECL_INITIAL (decl)));
10834 else if (TREE_CODE (decl) == CONSTRUCTOR)
10835 readonly = (! (flag_pic && reloc)
10836 && ! TREE_SIDE_EFFECTS (decl)
10837 && TREE_CONSTANT (decl));
10840 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10843 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10846 /* A C statement to build up a unique section name, expressed as a
10847 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10848 RELOC indicates whether the initial value of EXP requires
10849 link-time relocations. If you do not define this macro, GCC will use
10850 the symbol name prefixed by `.' as the section name. Note - this
10851 macro can now be called for uninitialized data items as well as
10852 initialised data and functions. */
10855 rs6000_unique_section (decl, reloc)
10863 const char *prefix;
10865 static const char *const prefixes[7][2] =
10867 { ".rodata.", ".gnu.linkonce.r." },
10868 { ".sdata2.", ".gnu.linkonce.s2." },
10869 { ".data.", ".gnu.linkonce.d." },
10870 { ".sdata.", ".gnu.linkonce.s." },
10871 { ".bss.", ".gnu.linkonce.b." },
10872 { ".sbss.", ".gnu.linkonce.sb." },
10873 { ".text.", ".gnu.linkonce.t." }
10876 if (TREE_CODE (decl) == FUNCTION_DECL)
10885 if (TREE_CODE (decl) == STRING_CST)
10886 readonly = ! flag_writable_strings;
10887 else if (TREE_CODE (decl) == VAR_DECL)
10888 readonly = (! (flag_pic && reloc)
10889 && TREE_READONLY (decl)
10890 && ! TREE_SIDE_EFFECTS (decl)
10891 && TREE_CONSTANT (DECL_INITIAL (decl)));
10893 size = int_size_in_bytes (TREE_TYPE (decl));
10894 needs_sdata = (size > 0
10895 && size <= g_switch_value
10896 && rs6000_sdata != SDATA_NONE
10897 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10899 if (DECL_INITIAL (decl) == 0
10900 || DECL_INITIAL (decl) == error_mark_node)
10902 else if (! readonly)
10909 /* .sdata2 is only for EABI. */
10910 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10916 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10917 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10918 len = strlen (name) + strlen (prefix);
10919 string = alloca (len + 1);
10921 sprintf (string, "%s%s", prefix, name);
10923 DECL_SECTION_NAME (decl) = build_string (len, string);
10927 /* If we are referencing a function that is static or is known to be
10928 in this file, make the SYMBOL_REF special. We can use this to indicate
10929 that we can branch to this function without emitting a no-op after the
10930 call. For real AIX calling sequences, we also replace the
10931 function name with the real name (1 or 2 leading .'s), rather than
10932 the function descriptor name. This saves a lot of overriding code
10933 to read the prefixes. */
10936 rs6000_encode_section_info (decl, first)
10943 if (TREE_CODE (decl) == FUNCTION_DECL)
10945 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10946 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10947 && ! DECL_WEAK (decl))
10948 SYMBOL_REF_FLAG (sym_ref) = 1;
10950 if (DEFAULT_ABI == ABI_AIX)
10952 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10953 size_t len2 = strlen (XSTR (sym_ref, 0));
10954 char *str = alloca (len1 + len2 + 1);
10957 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10959 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10962 else if (rs6000_sdata != SDATA_NONE
10963 && DEFAULT_ABI == ABI_V4
10964 && TREE_CODE (decl) == VAR_DECL)
10966 int size = int_size_in_bytes (TREE_TYPE (decl));
10967 tree section_name = DECL_SECTION_NAME (decl);
10968 const char *name = (char *)0;
10973 if (TREE_CODE (section_name) == STRING_CST)
10975 name = TREE_STRING_POINTER (section_name);
10976 len = TREE_STRING_LENGTH (section_name);
10982 if ((size > 0 && size <= g_switch_value)
10984 && ((len == sizeof (".sdata") - 1
10985 && strcmp (name, ".sdata") == 0)
10986 || (len == sizeof (".sdata2") - 1
10987 && strcmp (name, ".sdata2") == 0)
10988 || (len == sizeof (".sbss") - 1
10989 && strcmp (name, ".sbss") == 0)
10990 || (len == sizeof (".sbss2") - 1
10991 && strcmp (name, ".sbss2") == 0)
10992 || (len == sizeof (".PPC.EMB.sdata0") - 1
10993 && strcmp (name, ".PPC.EMB.sdata0") == 0)
10994 || (len == sizeof (".PPC.EMB.sbss0") - 1
10995 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10997 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10998 size_t len = strlen (XSTR (sym_ref, 0));
10999 char *str = alloca (len + 2);
11002 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11003 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11008 #endif /* USING_ELFOS_H */
11011 /* Return a REG that occurs in ADDR with coefficient 1.
11012 ADDR can be effectively incremented by incrementing REG.
11014 r0 is special and we must not select it as an address
11015 register by this routine since our caller will try to
11016 increment the returned register via an "la" instruction. */
11019 find_addr_reg (addr)
11022 while (GET_CODE (addr) == PLUS)
11024 if (GET_CODE (XEXP (addr, 0)) == REG
11025 && REGNO (XEXP (addr, 0)) != 0)
11026 addr = XEXP (addr, 0);
11027 else if (GET_CODE (XEXP (addr, 1)) == REG
11028 && REGNO (XEXP (addr, 1)) != 0)
11029 addr = XEXP (addr, 1);
11030 else if (CONSTANT_P (XEXP (addr, 0)))
11031 addr = XEXP (addr, 1);
11032 else if (CONSTANT_P (XEXP (addr, 1)))
11033 addr = XEXP (addr, 0);
11037 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11043 rs6000_fatal_bad_address (op)
11046 fatal_insn ("bad address", op);
11049 /* Called to register all of our global variables with the garbage
11053 rs6000_add_gc_roots ()
11055 ggc_add_rtx_root (&rs6000_compare_op0, 1);
11056 ggc_add_rtx_root (&rs6000_compare_op1, 1);
11058 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11059 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11060 toc_hash_mark_table);
11063 machopic_add_gc_roots ();
11070 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11071 reference and a constant. */
11074 symbolic_operand (op)
11077 switch (GET_CODE (op))
11084 return (GET_CODE (op) == SYMBOL_REF ||
11085 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11086 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11087 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11094 #ifdef RS6000_LONG_BRANCH
11096 static tree stub_list = 0;
11098 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11099 procedure calls to the linked list. */
11102 add_compiler_stub (label_name, function_name, line_number)
11104 tree function_name;
11107 tree stub = build_tree_list (function_name, label_name);
11108 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11109 TREE_CHAIN (stub) = stub_list;
11113 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11114 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11115 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11117 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11118 handling procedure calls from the linked list and initializes the
11122 output_compiler_stub ()
11125 char label_buf[256];
11127 tree tmp_stub, stub;
11130 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11132 fprintf (asm_out_file,
11133 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11135 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11136 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11137 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11138 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11140 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11142 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11145 label_buf[0] = '_';
11146 strcpy (label_buf+1,
11147 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11150 strcpy (tmp_buf, "lis r12,hi16(");
11151 strcat (tmp_buf, label_buf);
11152 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11153 strcat (tmp_buf, label_buf);
11154 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11155 output_asm_insn (tmp_buf, 0);
11157 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11158 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11159 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11160 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11166 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11167 already there or not. */
11170 no_previous_def (function_name)
11171 tree function_name;
11174 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11175 if (function_name == STUB_FUNCTION_NAME (stub))
11180 /* GET_PREV_LABEL gets the label name from the previous definition of
11184 get_prev_label (function_name)
11185 tree function_name;
11188 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11189 if (function_name == STUB_FUNCTION_NAME (stub))
11190 return STUB_LABEL_NAME (stub);
11194 /* INSN is either a function call or a millicode call. It may have an
11195 unconditional jump in its delay slot.
11197 CALL_DEST is the routine we are calling. */
11200 output_call (insn, call_dest, operand_number)
11203 int operand_number;
11205 static char buf[256];
11206 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11209 tree funname = get_identifier (XSTR (call_dest, 0));
11211 if (no_previous_def (funname))
11214 rtx label_rtx = gen_label_rtx ();
11215 char *label_buf, temp_buf[256];
11216 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11217 CODE_LABEL_NUMBER (label_rtx));
11218 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11219 labelname = get_identifier (label_buf);
11220 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11222 line_number = NOTE_LINE_NUMBER (insn);
11223 add_compiler_stub (labelname, funname, line_number);
11226 labelname = get_prev_label (funname);
11228 sprintf (buf, "jbsr %%z%d,%.246s",
11229 operand_number, IDENTIFIER_POINTER (labelname));
11234 sprintf (buf, "bl %%z%d", operand_number);
11239 #endif /* RS6000_LONG_BRANCH */
11241 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11243 const char *const symbol_ = (SYMBOL); \
11244 char *buffer_ = (BUF); \
11245 if (symbol_[0] == '"') \
11247 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11249 else if (name_needs_quotes(symbol_)) \
11251 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11255 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11260 /* Generate PIC and indirect symbol stubs. */
11263 machopic_output_stub (file, symb, stub)
11265 const char *symb, *stub;
11267 unsigned int length;
11268 char *symbol_name, *lazy_ptr_name;
11269 char *local_label_0;
11270 static int label = 0;
11272 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11273 STRIP_NAME_ENCODING (symb, symb);
11277 length = strlen (symb);
11278 symbol_name = alloca (length + 32);
11279 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11281 lazy_ptr_name = alloca (length + 32);
11282 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11284 local_label_0 = alloca (length + 32);
11285 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11288 machopic_picsymbol_stub_section ();
11290 machopic_symbol_stub_section ();
11292 fprintf (file, "%s:\n", stub);
11293 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11297 fprintf (file, "\tmflr r0\n");
11298 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11299 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11300 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11301 lazy_ptr_name, local_label_0);
11302 fprintf (file, "\tmtlr r0\n");
11303 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11304 lazy_ptr_name, local_label_0);
11305 fprintf (file, "\tmtctr r12\n");
11306 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11307 lazy_ptr_name, local_label_0);
11308 fprintf (file, "\tbctr\n");
11311 fprintf (file, "non-pure not supported\n");
11313 machopic_lazy_symbol_ptr_section ();
11314 fprintf (file, "%s:\n", lazy_ptr_name);
11315 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11316 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11319 /* Legitimize PIC addresses. If the address is already
11320 position-independent, we return ORIG. Newly generated
11321 position-independent addresses go into a reg. This is REG if non
11322 zero, otherwise we allocate register(s) as necessary. */
11324 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11327 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11329 enum machine_mode mode;
11334 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11335 reg = gen_reg_rtx (Pmode);
11337 if (GET_CODE (orig) == CONST)
11339 if (GET_CODE (XEXP (orig, 0)) == PLUS
11340 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11343 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11346 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11349 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11355 if (GET_CODE (offset) == CONST_INT)
11357 if (SMALL_INT (offset))
11358 return plus_constant (base, INTVAL (offset));
11359 else if (! reload_in_progress && ! reload_completed)
11360 offset = force_reg (Pmode, offset);
11363 rtx mem = force_const_mem (Pmode, orig);
11364 return machopic_legitimize_pic_address (mem, Pmode, reg);
11367 return gen_rtx (PLUS, Pmode, base, offset);
11370 /* Fall back on generic machopic code. */
11371 return machopic_legitimize_pic_address (orig, mode, reg);
11374 /* This is just a placeholder to make linking work without having to
11375 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11376 ever needed for Darwin (not too likely!) this would have to get a
11377 real definition. */
11384 #endif /* TARGET_MACHO */
11387 static unsigned int
11388 rs6000_elf_section_type_flags (decl, name, reloc)
11393 unsigned int flags = default_section_type_flags (decl, name, reloc);
11395 if (TARGET_RELOCATABLE)
11396 flags |= SECTION_WRITE;
11401 /* Record an element in the table of global constructors. SYMBOL is
11402 a SYMBOL_REF of the function to be called; PRIORITY is a number
11403 between 0 and MAX_INIT_PRIORITY.
11405 This differs from default_named_section_asm_out_constructor in
11406 that we have special handling for -mrelocatable. */
11409 rs6000_elf_asm_out_constructor (symbol, priority)
11413 const char *section = ".ctors";
11416 if (priority != DEFAULT_INIT_PRIORITY)
11418 sprintf (buf, ".ctors.%.5u",
11419 /* Invert the numbering so the linker puts us in the proper
11420 order; constructors are run from right to left, and the
11421 linker sorts in increasing order. */
11422 MAX_INIT_PRIORITY - priority);
11426 named_section_flags (section, SECTION_WRITE);
11427 assemble_align (POINTER_SIZE);
11429 if (TARGET_RELOCATABLE)
11431 fputs ("\t.long (", asm_out_file);
11432 output_addr_const (asm_out_file, symbol);
11433 fputs (")@fixup\n", asm_out_file);
11436 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11440 rs6000_elf_asm_out_destructor (symbol, priority)
11444 const char *section = ".dtors";
11447 if (priority != DEFAULT_INIT_PRIORITY)
11449 sprintf (buf, ".dtors.%.5u",
11450 /* Invert the numbering so the linker puts us in the proper
11451 order; constructors are run from right to left, and the
11452 linker sorts in increasing order. */
11453 MAX_INIT_PRIORITY - priority);
11457 named_section_flags (section, SECTION_WRITE);
11458 assemble_align (POINTER_SIZE);
11460 if (TARGET_RELOCATABLE)
11462 fputs ("\t.long (", asm_out_file);
11463 output_addr_const (asm_out_file, symbol);
11464 fputs (")@fixup\n", asm_out_file);
11467 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11471 #ifdef OBJECT_FORMAT_COFF
11473 xcoff_asm_named_section (name, flags)
11475 unsigned int flags ATTRIBUTE_UNUSED;
11477 fprintf (asm_out_file, "\t.csect %s\n", name);