1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined;
80 /* Save information from a "cmpxx" operation until the branch or scc is
82 rtx rs6000_compare_op0, rs6000_compare_op1;
83 int rs6000_compare_fp_p;
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno;
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name = RS6000_ABI_NAME;
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
96 /* Which small data model to use */
97 const char *rs6000_sdata_name = (char *)0;
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno = 0;
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi;
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string;
110 const char *rs6000_debug_name;
111 int rs6000_debug_stack; /* debug stack applications */
112 int rs6000_debug_arg; /* debug argument handling */
114 /* Flag to say the TOC is initialized */
116 char toc_label_name[10];
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set;
121 static void rs6000_add_gc_roots PARAMS ((void));
122 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code, enum machine_mode));
126 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127 static void rs6000_maybe_dead PARAMS ((rtx));
128 static void rs6000_emit_stack_tie PARAMS ((void));
129 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131 static unsigned rs6000_hash_constant PARAMS ((rtx));
132 static unsigned toc_hash_function PARAMS ((const void *));
133 static int toc_hash_eq PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry PARAMS ((void **, void *));
135 static void toc_hash_mark_table PARAMS ((void *));
136 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137 static void rs6000_free_machine_status PARAMS ((struct function *));
138 static void rs6000_init_machine_status PARAMS ((struct function *));
139 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
167 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
168 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
169 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
170 static void rs6000_parse_abi_options PARAMS ((void));
171 static int first_altivec_reg_to_save PARAMS ((void));
172 static unsigned int compute_vrsave_mask PARAMS ((void));
173 static void is_altivec_return_reg PARAMS ((rtx, void *));
174 int vrsave_operation PARAMS ((rtx, enum machine_mode));
175 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
176 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
177 static int easy_vector_constant PARAMS ((rtx));
179 /* Default register names. */
180 char rs6000_reg_names[][8] =
182 "0", "1", "2", "3", "4", "5", "6", "7",
183 "8", "9", "10", "11", "12", "13", "14", "15",
184 "16", "17", "18", "19", "20", "21", "22", "23",
185 "24", "25", "26", "27", "28", "29", "30", "31",
186 "0", "1", "2", "3", "4", "5", "6", "7",
187 "8", "9", "10", "11", "12", "13", "14", "15",
188 "16", "17", "18", "19", "20", "21", "22", "23",
189 "24", "25", "26", "27", "28", "29", "30", "31",
190 "mq", "lr", "ctr","ap",
191 "0", "1", "2", "3", "4", "5", "6", "7",
193 /* AltiVec registers. */
194 "0", "1", "2", "3", "4", "5", "6", "7",
195 "8", "9", "10", "11", "12", "13", "14", "15",
196 "16", "17", "18", "19", "20", "21", "22", "23",
197 "24", "25", "26", "27", "28", "29", "30", "31",
201 #ifdef TARGET_REGNAMES
202 static const char alt_reg_names[][8] =
204 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
205 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
206 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
207 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
208 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
209 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
210 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
211 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
212 "mq", "lr", "ctr", "ap",
213 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
215 /* AltiVec registers. */
216 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
217 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
218 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
219 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
224 #ifndef MASK_STRICT_ALIGN
225 #define MASK_STRICT_ALIGN 0
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ATTRIBUTE_TABLE
230 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
232 #undef TARGET_ASM_ALIGNED_DI_OP
233 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
235 /* Default unaligned ops are only provided for ELF. Find the ops needed
236 for non-ELF systems. */
237 #ifndef OBJECT_FORMAT_ELF
238 #ifdef OBJECT_FORMAT_COFF
239 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
241 #undef TARGET_ASM_UNALIGNED_HI_OP
242 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
243 #undef TARGET_ASM_UNALIGNED_SI_OP
244 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
245 #undef TARGET_ASM_UNALIGNED_DI_OP
246 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
249 #undef TARGET_ASM_UNALIGNED_HI_OP
250 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
251 #undef TARGET_ASM_UNALIGNED_SI_OP
252 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
256 /* This hook deals with fixups for relocatable code and DI-mode objects
258 #undef TARGET_ASM_INTEGER
259 #define TARGET_ASM_INTEGER rs6000_assemble_integer
261 #undef TARGET_ASM_FUNCTION_PROLOGUE
262 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
263 #undef TARGET_ASM_FUNCTION_EPILOGUE
264 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
267 #undef TARGET_SECTION_TYPE_FLAGS
268 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
271 #undef TARGET_SCHED_ISSUE_RATE
272 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
273 #undef TARGET_SCHED_ADJUST_COST
274 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
275 #undef TARGET_SCHED_ADJUST_PRIORITY
276 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
278 #undef TARGET_INIT_BUILTINS
279 #define TARGET_INIT_BUILTINS rs6000_init_builtins
281 #undef TARGET_EXPAND_BUILTIN
282 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
284 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
285 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
287 struct gcc_target targetm = TARGET_INITIALIZER;
289 /* Override command line options. Mostly we process the processor
290 type and sometimes adjust other TARGET_ options. */
293 rs6000_override_options (default_cpu)
294 const char *default_cpu;
297 struct rs6000_cpu_select *ptr;
299 /* Simplify the entries below by making a mask for any POWER
300 variant and any PowerPC variant. */
302 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
303 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
304 | MASK_PPC_GFXOPT | MASK_POWERPC64)
305 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
309 const char *const name; /* Canonical processor name. */
310 const enum processor_type processor; /* Processor type enum value. */
311 const int target_enable; /* Target flags to enable. */
312 const int target_disable; /* Target flags to disable. */
313 } const processor_target_table[]
314 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
315 POWER_MASKS | POWERPC_MASKS},
316 {"power", PROCESSOR_POWER,
317 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
318 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
319 {"power2", PROCESSOR_POWER,
320 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
321 POWERPC_MASKS | MASK_NEW_MNEMONICS},
322 {"power3", PROCESSOR_PPC630,
323 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
324 POWER_MASKS | MASK_PPC_GPOPT},
325 {"powerpc", PROCESSOR_POWERPC,
326 MASK_POWERPC | MASK_NEW_MNEMONICS,
327 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
328 {"powerpc64", PROCESSOR_POWERPC64,
329 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
330 POWER_MASKS | POWERPC_OPT_MASKS},
331 {"rios", PROCESSOR_RIOS1,
332 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
333 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
334 {"rios1", PROCESSOR_RIOS1,
335 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
336 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
337 {"rsc", PROCESSOR_PPC601,
338 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
339 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
340 {"rsc1", PROCESSOR_PPC601,
341 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
342 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
343 {"rios2", PROCESSOR_RIOS2,
344 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
345 POWERPC_MASKS | MASK_NEW_MNEMONICS},
346 {"rs64a", PROCESSOR_RS64A,
347 MASK_POWERPC | MASK_NEW_MNEMONICS,
348 POWER_MASKS | POWERPC_OPT_MASKS},
349 {"401", PROCESSOR_PPC403,
350 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
351 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
352 {"403", PROCESSOR_PPC403,
353 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
354 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
355 {"405", PROCESSOR_PPC405,
356 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
357 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
358 {"505", PROCESSOR_MPCCORE,
359 MASK_POWERPC | MASK_NEW_MNEMONICS,
360 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
361 {"601", PROCESSOR_PPC601,
362 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
363 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
364 {"602", PROCESSOR_PPC603,
365 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
366 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
367 {"603", PROCESSOR_PPC603,
368 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
369 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
370 {"603e", PROCESSOR_PPC603,
371 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
372 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
373 {"ec603e", PROCESSOR_PPC603,
374 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
375 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
376 {"604", PROCESSOR_PPC604,
377 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
378 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
379 {"604e", PROCESSOR_PPC604e,
380 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
381 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
382 {"620", PROCESSOR_PPC620,
383 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
384 POWER_MASKS | MASK_PPC_GPOPT},
385 {"630", PROCESSOR_PPC630,
386 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
387 POWER_MASKS | MASK_PPC_GPOPT},
388 {"740", PROCESSOR_PPC750,
389 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
390 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
391 {"750", PROCESSOR_PPC750,
392 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
393 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
394 {"7400", PROCESSOR_PPC7400,
395 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
396 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
397 {"7450", PROCESSOR_PPC7450,
398 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
399 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
400 {"801", PROCESSOR_MPCCORE,
401 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
402 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
403 {"821", PROCESSOR_MPCCORE,
404 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
405 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
406 {"823", PROCESSOR_MPCCORE,
407 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
408 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
409 {"860", PROCESSOR_MPCCORE,
410 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
411 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
413 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
415 /* Save current -mmultiple/-mno-multiple status. */
416 int multiple = TARGET_MULTIPLE;
417 /* Save current -mstring/-mno-string status. */
418 int string = TARGET_STRING;
420 /* Identify the processor type. */
421 rs6000_select[0].string = default_cpu;
422 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
424 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
426 ptr = &rs6000_select[i];
427 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
429 for (j = 0; j < ptt_size; j++)
430 if (! strcmp (ptr->string, processor_target_table[j].name))
433 rs6000_cpu = processor_target_table[j].processor;
437 target_flags |= processor_target_table[j].target_enable;
438 target_flags &= ~processor_target_table[j].target_disable;
444 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
448 /* If we are optimizing big endian systems for space, use the store
449 multiple instructions. */
450 if (BYTES_BIG_ENDIAN && optimize_size)
451 target_flags |= MASK_MULTIPLE;
453 /* If -mmultiple or -mno-multiple was explicitly used, don't
454 override with the processor default */
455 if (TARGET_MULTIPLE_SET)
456 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
458 /* If -mstring or -mno-string was explicitly used, don't override
459 with the processor default. */
460 if (TARGET_STRING_SET)
461 target_flags = (target_flags & ~MASK_STRING) | string;
463 /* Don't allow -mmultiple or -mstring on little endian systems
464 unless the cpu is a 750, because the hardware doesn't support the
465 instructions used in little endian mode, and causes an alignment
466 trap. The 750 does not cause an alignment trap (except when the
467 target is unaligned). */
469 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
473 target_flags &= ~MASK_MULTIPLE;
474 if (TARGET_MULTIPLE_SET)
475 warning ("-mmultiple is not supported on little endian systems");
480 target_flags &= ~MASK_STRING;
481 if (TARGET_STRING_SET)
482 warning ("-mstring is not supported on little endian systems");
486 if (flag_pic && DEFAULT_ABI == ABI_AIX && extra_warnings)
488 warning ("-f%s ignored (all code is position independent)",
489 (flag_pic > 1) ? "PIC" : "pic");
493 #ifdef XCOFF_DEBUGGING_INFO
494 if (flag_function_sections && (write_symbols != NO_DEBUG)
495 && DEFAULT_ABI == ABI_AIX)
497 warning ("-ffunction-sections disabled on AIX when debugging");
498 flag_function_sections = 0;
501 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
503 warning ("-fdata-sections not supported on AIX");
504 flag_data_sections = 0;
508 /* Set debug flags */
509 if (rs6000_debug_name)
511 if (! strcmp (rs6000_debug_name, "all"))
512 rs6000_debug_stack = rs6000_debug_arg = 1;
513 else if (! strcmp (rs6000_debug_name, "stack"))
514 rs6000_debug_stack = 1;
515 else if (! strcmp (rs6000_debug_name, "arg"))
516 rs6000_debug_arg = 1;
518 error ("unknown -mdebug-%s switch", rs6000_debug_name);
521 /* Set size of long double */
522 rs6000_long_double_type_size = 64;
523 if (rs6000_long_double_size_string)
526 int size = strtol (rs6000_long_double_size_string, &tail, 10);
527 if (*tail != '\0' || (size != 64 && size != 128))
528 error ("Unknown switch -mlong-double-%s",
529 rs6000_long_double_size_string);
531 rs6000_long_double_type_size = size;
534 /* Handle -mabi= options. */
535 rs6000_parse_abi_options ();
537 #ifdef TARGET_REGNAMES
538 /* If the user desires alternate register names, copy in the
539 alternate names now. */
541 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
544 #ifdef SUBTARGET_OVERRIDE_OPTIONS
545 SUBTARGET_OVERRIDE_OPTIONS;
547 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
548 SUBSUBTARGET_OVERRIDE_OPTIONS;
551 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
552 If -maix-struct-return or -msvr4-struct-return was explicitly
553 used, don't override with the ABI default. */
554 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
556 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
557 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
559 target_flags |= MASK_AIX_STRUCT_RET;
562 /* Register global variables with the garbage collector. */
563 rs6000_add_gc_roots ();
565 /* Allocate an alias set for register saves & restores from stack. */
566 rs6000_sr_alias_set = new_alias_set ();
569 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
571 /* We can only guarantee the availability of DI pseudo-ops when
572 assembling for 64-bit targets. */
575 targetm.asm_out.aligned_op.di = NULL;
576 targetm.asm_out.unaligned_op.di = NULL;
579 /* Arrange to save and restore machine status around nested functions. */
580 init_machine_status = rs6000_init_machine_status;
581 free_machine_status = rs6000_free_machine_status;
584 /* Handle -mabi= options. */
586 rs6000_parse_abi_options ()
588 if (rs6000_abi_string == 0)
590 else if (! strcmp (rs6000_abi_string, "altivec"))
591 rs6000_altivec_abi = 1;
592 else if (! strcmp (rs6000_abi_string, "no-altivec"))
593 rs6000_altivec_abi = 0;
595 error ("unknown ABI specified: '%s'", rs6000_abi_string);
599 optimization_options (level, size)
600 int level ATTRIBUTE_UNUSED;
601 int size ATTRIBUTE_UNUSED;
605 /* Do anything needed at the start of the asm file. */
608 rs6000_file_start (file, default_cpu)
610 const char *default_cpu;
614 const char *start = buffer;
615 struct rs6000_cpu_select *ptr;
617 if (flag_verbose_asm)
619 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
620 rs6000_select[0].string = default_cpu;
622 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
624 ptr = &rs6000_select[i];
625 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
627 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
633 switch (rs6000_sdata)
635 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
636 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
637 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
638 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
641 if (rs6000_sdata && g_switch_value)
643 fprintf (file, "%s -G %d", start, g_switch_value);
654 /* Create a CONST_DOUBLE from a string. */
657 rs6000_float_const (string, mode)
659 enum machine_mode mode;
661 REAL_VALUE_TYPE value;
662 value = REAL_VALUE_ATOF (string, mode);
663 return immed_real_const_1 (value, mode);
666 /* Return non-zero if this function is known to have a null epilogue. */
671 if (reload_completed)
673 rs6000_stack_t *info = rs6000_stack_info ();
675 if (info->first_gp_reg_save == 32
676 && info->first_fp_reg_save == 64
677 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
680 && info->vrsave_mask == 0
688 /* Returns 1 always. */
691 any_operand (op, mode)
692 rtx op ATTRIBUTE_UNUSED;
693 enum machine_mode mode ATTRIBUTE_UNUSED;
698 /* Returns 1 if op is the count register. */
700 count_register_operand (op, mode)
702 enum machine_mode mode ATTRIBUTE_UNUSED;
704 if (GET_CODE (op) != REG)
707 if (REGNO (op) == COUNT_REGISTER_REGNUM)
710 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
716 /* Returns 1 if op is an altivec register. */
718 altivec_register_operand (op, mode)
720 enum machine_mode mode ATTRIBUTE_UNUSED;
723 return (register_operand (op, mode)
724 && (GET_CODE (op) != REG
725 || REGNO (op) > FIRST_PSEUDO_REGISTER
726 || ALTIVEC_REGNO_P (REGNO (op))));
730 xer_operand (op, mode)
732 enum machine_mode mode ATTRIBUTE_UNUSED;
734 if (GET_CODE (op) != REG)
737 if (XER_REGNO_P (REGNO (op)))
743 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
744 by such constants completes more quickly. */
747 s8bit_cint_operand (op, mode)
749 enum machine_mode mode ATTRIBUTE_UNUSED;
751 return ( GET_CODE (op) == CONST_INT
752 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
755 /* Return 1 if OP is a constant that can fit in a D field. */
758 short_cint_operand (op, mode)
760 enum machine_mode mode ATTRIBUTE_UNUSED;
762 return (GET_CODE (op) == CONST_INT
763 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
766 /* Similar for an unsigned D field. */
769 u_short_cint_operand (op, mode)
771 enum machine_mode mode ATTRIBUTE_UNUSED;
773 return (GET_CODE (op) == CONST_INT
774 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
777 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
780 non_short_cint_operand (op, mode)
782 enum machine_mode mode ATTRIBUTE_UNUSED;
784 return (GET_CODE (op) == CONST_INT
785 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
788 /* Returns 1 if OP is a CONST_INT that is a positive value
789 and an exact power of 2. */
792 exact_log2_cint_operand (op, mode)
794 enum machine_mode mode ATTRIBUTE_UNUSED;
796 return (GET_CODE (op) == CONST_INT
798 && exact_log2 (INTVAL (op)) >= 0);
801 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
805 gpc_reg_operand (op, mode)
807 enum machine_mode mode;
809 return (register_operand (op, mode)
810 && (GET_CODE (op) != REG
811 || (REGNO (op) >= ARG_POINTER_REGNUM
812 && !XER_REGNO_P (REGNO (op)))
813 || REGNO (op) < MQ_REGNO));
816 /* Returns 1 if OP is either a pseudo-register or a register denoting a
820 cc_reg_operand (op, mode)
822 enum machine_mode mode;
824 return (register_operand (op, mode)
825 && (GET_CODE (op) != REG
826 || REGNO (op) >= FIRST_PSEUDO_REGISTER
827 || CR_REGNO_P (REGNO (op))));
830 /* Returns 1 if OP is either a pseudo-register or a register denoting a
831 CR field that isn't CR0. */
834 cc_reg_not_cr0_operand (op, mode)
836 enum machine_mode mode;
838 return (register_operand (op, mode)
839 && (GET_CODE (op) != REG
840 || REGNO (op) >= FIRST_PSEUDO_REGISTER
841 || CR_REGNO_NOT_CR0_P (REGNO (op))));
844 /* Returns 1 if OP is either a constant integer valid for a D-field or
845 a non-special register. If a register, it must be in the proper
846 mode unless MODE is VOIDmode. */
849 reg_or_short_operand (op, mode)
851 enum machine_mode mode;
853 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
856 /* Similar, except check if the negation of the constant would be
857 valid for a D-field. */
860 reg_or_neg_short_operand (op, mode)
862 enum machine_mode mode;
864 if (GET_CODE (op) == CONST_INT)
865 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
867 return gpc_reg_operand (op, mode);
870 /* Returns 1 if OP is either a constant integer valid for a DS-field or
871 a non-special register. If a register, it must be in the proper
872 mode unless MODE is VOIDmode. */
875 reg_or_aligned_short_operand (op, mode)
877 enum machine_mode mode;
879 if (gpc_reg_operand (op, mode))
881 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
888 /* Return 1 if the operand is either a register or an integer whose
889 high-order 16 bits are zero. */
892 reg_or_u_short_operand (op, mode)
894 enum machine_mode mode;
896 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
899 /* Return 1 is the operand is either a non-special register or ANY
903 reg_or_cint_operand (op, mode)
905 enum machine_mode mode;
907 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
910 /* Return 1 is the operand is either a non-special register or ANY
911 32-bit signed constant integer. */
914 reg_or_arith_cint_operand (op, mode)
916 enum machine_mode mode;
918 return (gpc_reg_operand (op, mode)
919 || (GET_CODE (op) == CONST_INT
920 #if HOST_BITS_PER_WIDE_INT != 32
921 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
922 < (unsigned HOST_WIDE_INT) 0x100000000ll)
927 /* Return 1 is the operand is either a non-special register or a 32-bit
928 signed constant integer valid for 64-bit addition. */
931 reg_or_add_cint64_operand (op, mode)
933 enum machine_mode mode;
935 return (gpc_reg_operand (op, mode)
936 || (GET_CODE (op) == CONST_INT
937 #if HOST_BITS_PER_WIDE_INT == 32
938 && INTVAL (op) < 0x7fff8000
940 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
946 /* Return 1 is the operand is either a non-special register or a 32-bit
947 signed constant integer valid for 64-bit subtraction. */
950 reg_or_sub_cint64_operand (op, mode)
952 enum machine_mode mode;
954 return (gpc_reg_operand (op, mode)
955 || (GET_CODE (op) == CONST_INT
956 #if HOST_BITS_PER_WIDE_INT == 32
957 && (- INTVAL (op)) < 0x7fff8000
959 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
965 /* Return 1 is the operand is either a non-special register or ANY
966 32-bit unsigned constant integer. */
969 reg_or_logical_cint_operand (op, mode)
971 enum machine_mode mode;
973 if (GET_CODE (op) == CONST_INT)
975 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
977 if (GET_MODE_BITSIZE (mode) <= 32)
984 return ((INTVAL (op) & GET_MODE_MASK (mode)
985 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
987 else if (GET_CODE (op) == CONST_DOUBLE)
989 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
993 return CONST_DOUBLE_HIGH (op) == 0;
996 return gpc_reg_operand (op, mode);
999 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1002 got_operand (op, mode)
1004 enum machine_mode mode ATTRIBUTE_UNUSED;
1006 return (GET_CODE (op) == SYMBOL_REF
1007 || GET_CODE (op) == CONST
1008 || GET_CODE (op) == LABEL_REF);
1011 /* Return 1 if the operand is a simple references that can be loaded via
1012 the GOT (labels involving addition aren't allowed). */
1015 got_no_const_operand (op, mode)
1017 enum machine_mode mode ATTRIBUTE_UNUSED;
1019 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1022 /* Return the number of instructions it takes to form a constant in an
1023 integer register. */
1026 num_insns_constant_wide (value)
1027 HOST_WIDE_INT value;
1029 /* signed constant loadable with {cal|addi} */
1030 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1033 /* constant loadable with {cau|addis} */
1034 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1037 #if HOST_BITS_PER_WIDE_INT == 64
1038 else if (TARGET_POWERPC64)
1040 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1041 HOST_WIDE_INT high = value >> 31;
1043 if (high == 0 || high == -1)
1049 return num_insns_constant_wide (high) + 1;
1051 return (num_insns_constant_wide (high)
1052 + num_insns_constant_wide (low) + 1);
1061 num_insns_constant (op, mode)
1063 enum machine_mode mode;
1065 if (GET_CODE (op) == CONST_INT)
1067 #if HOST_BITS_PER_WIDE_INT == 64
1068 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1069 && mask64_operand (op, mode))
1073 return num_insns_constant_wide (INTVAL (op));
1076 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1081 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1082 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1083 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1086 else if (GET_CODE (op) == CONST_DOUBLE)
1092 int endian = (WORDS_BIG_ENDIAN == 0);
1094 if (mode == VOIDmode || mode == DImode)
1096 high = CONST_DOUBLE_HIGH (op);
1097 low = CONST_DOUBLE_LOW (op);
1101 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1102 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1104 low = l[1 - endian];
1108 return (num_insns_constant_wide (low)
1109 + num_insns_constant_wide (high));
1113 if (high == 0 && low >= 0)
1114 return num_insns_constant_wide (low);
1116 else if (high == -1 && low < 0)
1117 return num_insns_constant_wide (low);
1119 else if (mask64_operand (op, mode))
1123 return num_insns_constant_wide (high) + 1;
1126 return (num_insns_constant_wide (high)
1127 + num_insns_constant_wide (low) + 1);
1135 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1136 register with one instruction per word. We only do this if we can
1137 safely read CONST_DOUBLE_{LOW,HIGH}. */
1140 easy_fp_constant (op, mode)
1142 enum machine_mode mode;
1144 if (GET_CODE (op) != CONST_DOUBLE
1145 || GET_MODE (op) != mode
1146 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1149 /* Consider all constants with -msoft-float to be easy. */
1150 if (TARGET_SOFT_FLOAT && mode != DImode)
1153 /* If we are using V.4 style PIC, consider all constants to be hard. */
1154 if (flag_pic && DEFAULT_ABI == ABI_V4)
1157 #ifdef TARGET_RELOCATABLE
1158 /* Similarly if we are using -mrelocatable, consider all constants
1160 if (TARGET_RELOCATABLE)
1169 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1170 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1172 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1173 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1176 else if (mode == SFmode)
1181 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1182 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1184 return num_insns_constant_wide (l) == 1;
1187 else if (mode == DImode)
1188 return ((TARGET_POWERPC64
1189 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1190 || (num_insns_constant (op, DImode) <= 2));
1192 else if (mode == SImode)
1198 /* Return 1 if the operand is a CONST_INT and can be put into a
1199 register with one instruction. */
1202 easy_vector_constant (op)
1208 if (GET_CODE (op) != CONST_VECTOR)
1211 units = CONST_VECTOR_NUNITS (op);
1213 /* We can generate 0 easily. Look for that. */
1214 for (i = 0; i < units; ++i)
1216 elt = CONST_VECTOR_ELT (op, i);
1218 /* We could probably simplify this by just checking for equality
1219 with CONST0_RTX for the current mode, but let's be safe
1222 if (GET_CODE (elt) == CONST_INT && INTVAL (elt) != 0)
1225 if (GET_CODE (elt) == CONST_DOUBLE
1226 && (CONST_DOUBLE_LOW (elt) != 0
1227 || CONST_DOUBLE_HIGH (elt) != 0))
1231 /* We could probably generate a few other constants trivially, but
1232 gcc doesn't generate them yet. FIXME later. */
1236 /* Return 1 if the operand is the constant 0. This works for scalars
1237 as well as vectors. */
1239 zero_constant (op, mode)
1241 enum machine_mode mode;
1243 return op == CONST0_RTX (mode);
1246 /* Return 1 if the operand is 0.0. */
1248 zero_fp_constant (op, mode)
1250 enum machine_mode mode;
1252 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1255 /* Return 1 if the operand is in volatile memory. Note that during
1256 the RTL generation phase, memory_operand does not return TRUE for
1257 volatile memory references. So this function allows us to
1258 recognize volatile references where its safe. */
1261 volatile_mem_operand (op, mode)
1263 enum machine_mode mode;
1265 if (GET_CODE (op) != MEM)
1268 if (!MEM_VOLATILE_P (op))
1271 if (mode != GET_MODE (op))
1274 if (reload_completed)
1275 return memory_operand (op, mode);
1277 if (reload_in_progress)
1278 return strict_memory_address_p (mode, XEXP (op, 0));
1280 return memory_address_p (mode, XEXP (op, 0));
1283 /* Return 1 if the operand is an offsettable memory operand. */
1286 offsettable_mem_operand (op, mode)
1288 enum machine_mode mode;
1290 return ((GET_CODE (op) == MEM)
1291 && offsettable_address_p (reload_completed || reload_in_progress,
1292 mode, XEXP (op, 0)));
1295 /* Return 1 if the operand is either an easy FP constant (see above) or
1299 mem_or_easy_const_operand (op, mode)
1301 enum machine_mode mode;
1303 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1306 /* Return 1 if the operand is either a non-special register or an item
1307 that can be used as the operand of a `mode' add insn. */
1310 add_operand (op, mode)
1312 enum machine_mode mode;
1314 if (GET_CODE (op) == CONST_INT)
1315 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1316 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1318 return gpc_reg_operand (op, mode);
1321 /* Return 1 if OP is a constant but not a valid add_operand. */
1324 non_add_cint_operand (op, mode)
1326 enum machine_mode mode ATTRIBUTE_UNUSED;
1328 return (GET_CODE (op) == CONST_INT
1329 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1330 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1333 /* Return 1 if the operand is a non-special register or a constant that
1334 can be used as the operand of an OR or XOR insn on the RS/6000. */
1337 logical_operand (op, mode)
1339 enum machine_mode mode;
1341 HOST_WIDE_INT opl, oph;
1343 if (gpc_reg_operand (op, mode))
1346 if (GET_CODE (op) == CONST_INT)
1348 opl = INTVAL (op) & GET_MODE_MASK (mode);
1350 #if HOST_BITS_PER_WIDE_INT <= 32
1351 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1355 else if (GET_CODE (op) == CONST_DOUBLE)
1357 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1360 opl = CONST_DOUBLE_LOW (op);
1361 oph = CONST_DOUBLE_HIGH (op);
1368 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1369 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1372 /* Return 1 if C is a constant that is not a logical operand (as
1373 above), but could be split into one. */
1376 non_logical_cint_operand (op, mode)
1378 enum machine_mode mode;
1380 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1381 && ! logical_operand (op, mode)
1382 && reg_or_logical_cint_operand (op, mode));
1385 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1386 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1387 Reject all ones and all zeros, since these should have been optimized
1388 away and confuse the making of MB and ME. */
1391 mask_operand (op, mode)
1393 enum machine_mode mode ATTRIBUTE_UNUSED;
1395 HOST_WIDE_INT c, lsb;
1397 if (GET_CODE (op) != CONST_INT)
1402 /* Fail in 64-bit mode if the mask wraps around because the upper
1403 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1404 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1407 /* We don't change the number of transitions by inverting,
1408 so make sure we start with the LS bit zero. */
1412 /* Reject all zeros or all ones. */
1416 /* Find the first transition. */
1419 /* Invert to look for a second transition. */
1422 /* Erase first transition. */
1425 /* Find the second transition (if any). */
1428 /* Match if all the bits above are 1's (or c is zero). */
1432 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1433 It is if there are no more than one 1->0 or 0->1 transitions.
1434 Reject all ones and all zeros, since these should have been optimized
1435 away and confuse the making of MB and ME. */
1438 mask64_operand (op, mode)
1440 enum machine_mode mode;
1442 if (GET_CODE (op) == CONST_INT)
1444 HOST_WIDE_INT c, lsb;
1446 /* We don't change the number of transitions by inverting,
1447 so make sure we start with the LS bit zero. */
1452 /* Reject all zeros or all ones. */
1456 /* Find the transition, and check that all bits above are 1's. */
1460 else if (GET_CODE (op) == CONST_DOUBLE
1461 && (mode == VOIDmode || mode == DImode))
1463 HOST_WIDE_INT low, high, lsb;
1465 if (HOST_BITS_PER_WIDE_INT < 64)
1466 high = CONST_DOUBLE_HIGH (op);
1468 low = CONST_DOUBLE_LOW (op);
1471 if (HOST_BITS_PER_WIDE_INT < 64)
1478 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1482 return high == -lsb;
1486 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1492 /* Return 1 if the operand is either a non-special register or a constant
1493 that can be used as the operand of a PowerPC64 logical AND insn. */
1496 and64_operand (op, mode)
1498 enum machine_mode mode;
1500 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1501 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1503 return (logical_operand (op, mode) || mask64_operand (op, mode));
1506 /* Return 1 if the operand is either a non-special register or a
1507 constant that can be used as the operand of an RS/6000 logical AND insn. */
1510 and_operand (op, mode)
1512 enum machine_mode mode;
1514 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1515 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1517 return (logical_operand (op, mode) || mask_operand (op, mode));
1520 /* Return 1 if the operand is a general register or memory operand. */
1523 reg_or_mem_operand (op, mode)
1525 enum machine_mode mode;
1527 return (gpc_reg_operand (op, mode)
1528 || memory_operand (op, mode)
1529 || volatile_mem_operand (op, mode));
1532 /* Return 1 if the operand is a general register or memory operand without
1533 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1537 lwa_operand (op, mode)
1539 enum machine_mode mode;
1543 if (reload_completed && GET_CODE (inner) == SUBREG)
1544 inner = SUBREG_REG (inner);
1546 return gpc_reg_operand (inner, mode)
1547 || (memory_operand (inner, mode)
1548 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1549 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1550 && (GET_CODE (XEXP (inner, 0)) != PLUS
1551 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1552 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1555 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1556 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1560 call_operand (op, mode)
1562 enum machine_mode mode;
1564 if (mode != VOIDmode && GET_MODE (op) != mode)
1567 return (GET_CODE (op) == SYMBOL_REF
1568 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1571 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1572 this file and the function is not weakly defined. */
1575 current_file_function_operand (op, mode)
1577 enum machine_mode mode ATTRIBUTE_UNUSED;
1579 return (GET_CODE (op) == SYMBOL_REF
1580 && (SYMBOL_REF_FLAG (op)
1581 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1582 && ! DECL_WEAK (current_function_decl))));
1585 /* Return 1 if this operand is a valid input for a move insn. */
1588 input_operand (op, mode)
1590 enum machine_mode mode;
1592 /* Memory is always valid. */
1593 if (memory_operand (op, mode))
1596 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1597 if (GET_CODE (op) == CONSTANT_P_RTX)
1600 /* For floating-point, easy constants are valid. */
1601 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1603 && easy_fp_constant (op, mode))
1606 /* Allow any integer constant. */
1607 if (GET_MODE_CLASS (mode) == MODE_INT
1608 && (GET_CODE (op) == CONST_INT
1609 || GET_CODE (op) == CONST_DOUBLE))
1612 /* For floating-point or multi-word mode, the only remaining valid type
1614 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1615 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1616 return register_operand (op, mode);
1618 /* The only cases left are integral modes one word or smaller (we
1619 do not get called for MODE_CC values). These can be in any
1621 if (register_operand (op, mode))
1624 /* A SYMBOL_REF referring to the TOC is valid. */
1625 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1628 /* A constant pool expression (relative to the TOC) is valid */
1629 if (TOC_RELATIVE_EXPR_P (op))
1632 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1634 if (DEFAULT_ABI == ABI_V4
1635 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1636 && small_data_operand (op, Pmode))
1642 /* Return 1 for an operand in small memory on V.4/eabi. */
1645 small_data_operand (op, mode)
1646 rtx op ATTRIBUTE_UNUSED;
1647 enum machine_mode mode ATTRIBUTE_UNUSED;
1652 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1655 if (DEFAULT_ABI != ABI_V4)
1658 if (GET_CODE (op) == SYMBOL_REF)
1661 else if (GET_CODE (op) != CONST
1662 || GET_CODE (XEXP (op, 0)) != PLUS
1663 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1664 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1669 rtx sum = XEXP (op, 0);
1670 HOST_WIDE_INT summand;
1672 /* We have to be careful here, because it is the referenced address
1673 that must be 32k from _SDA_BASE_, not just the symbol. */
1674 summand = INTVAL (XEXP (sum, 1));
1675 if (summand < 0 || summand > g_switch_value)
1678 sym_ref = XEXP (sum, 0);
1681 if (*XSTR (sym_ref, 0) != '@')
1692 constant_pool_expr_1 (op, have_sym, have_toc)
1697 switch (GET_CODE(op))
1700 if (CONSTANT_POOL_ADDRESS_P (op))
1702 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1710 else if (! strcmp (XSTR (op, 0), toc_label_name))
1719 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1720 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1722 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1731 constant_pool_expr_p (op)
1736 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1740 toc_relative_expr_p (op)
1745 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1748 /* Try machine-dependent ways of modifying an illegitimate address
1749 to be legitimate. If we find one, return the new, valid address.
1750 This is used from only one place: `memory_address' in explow.c.
1752 OLDX is the address as it was before break_out_memory_refs was
1753 called. In some cases it is useful to look at this to decide what
1756 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1758 It is always safe for this function to do nothing. It exists to
1759 recognize opportunities to optimize the output.
1761 On RS/6000, first check for the sum of a register with a constant
1762 integer that is out of range. If so, generate code to add the
1763 constant with the low-order 16 bits masked to the register and force
1764 this result into another register (this can be done with `cau').
1765 Then generate an address of REG+(CONST&0xffff), allowing for the
1766 possibility of bit 16 being a one.
1768 Then check for the sum of a register and something not constant, try to
1769 load the other things into a register and return the sum. */
1771 rs6000_legitimize_address (x, oldx, mode)
1773 rtx oldx ATTRIBUTE_UNUSED;
1774 enum machine_mode mode;
1776 if (GET_CODE (x) == PLUS
1777 && GET_CODE (XEXP (x, 0)) == REG
1778 && GET_CODE (XEXP (x, 1)) == CONST_INT
1779 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1781 HOST_WIDE_INT high_int, low_int;
1783 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1784 high_int = INTVAL (XEXP (x, 1)) - low_int;
1785 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1786 GEN_INT (high_int)), 0);
1787 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1789 else if (GET_CODE (x) == PLUS
1790 && GET_CODE (XEXP (x, 0)) == REG
1791 && GET_CODE (XEXP (x, 1)) != CONST_INT
1792 && GET_MODE_NUNITS (mode) == 1
1793 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1794 && (TARGET_POWERPC64 || mode != DImode)
1797 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1798 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1800 else if (ALTIVEC_VECTOR_MODE (mode))
1804 /* Make sure both operands are registers. */
1805 if (GET_CODE (x) == PLUS)
1806 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1807 force_reg (Pmode, XEXP (x, 1)));
1809 reg = force_reg (Pmode, x);
1812 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1813 && GET_CODE (x) != CONST_INT
1814 && GET_CODE (x) != CONST_DOUBLE
1816 && GET_MODE_NUNITS (mode) == 1
1817 && (GET_MODE_BITSIZE (mode) <= 32
1818 || (TARGET_HARD_FLOAT && mode == DFmode)))
1820 rtx reg = gen_reg_rtx (Pmode);
1821 emit_insn (gen_elf_high (reg, (x)));
1822 return gen_rtx_LO_SUM (Pmode, reg, (x));
1824 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1826 && GET_CODE (x) != CONST_INT
1827 && GET_CODE (x) != CONST_DOUBLE
1829 && (TARGET_HARD_FLOAT || mode != DFmode)
1833 rtx reg = gen_reg_rtx (Pmode);
1834 emit_insn (gen_macho_high (reg, (x)));
1835 return gen_rtx_LO_SUM (Pmode, reg, (x));
1838 && CONSTANT_POOL_EXPR_P (x)
1839 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1841 return create_TOC_reference (x);
1847 /* The convention appears to be to define this wherever it is used.
1848 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1849 is now used here. */
1850 #ifndef REG_MODE_OK_FOR_BASE_P
1851 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1854 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1855 replace the input X, or the original X if no replacement is called for.
1856 The output parameter *WIN is 1 if the calling macro should goto WIN,
1859 For RS/6000, we wish to handle large displacements off a base
1860 register by splitting the addend across an addiu/addis and the mem insn.
1861 This cuts number of extra insns needed from 3 to 1.
1863 On Darwin, we use this to generate code for floating point constants.
1864 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1865 The Darwin code is inside #if TARGET_MACHO because only then is
1866 machopic_function_base_name() defined. */
1868 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1870 enum machine_mode mode;
1873 int ind_levels ATTRIBUTE_UNUSED;
1876 /* We must recognize output that we have already generated ourselves. */
1877 if (GET_CODE (x) == PLUS
1878 && GET_CODE (XEXP (x, 0)) == PLUS
1879 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1880 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1881 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1883 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1884 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1885 opnum, (enum reload_type)type);
1891 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1892 && GET_CODE (x) == LO_SUM
1893 && GET_CODE (XEXP (x, 0)) == PLUS
1894 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1895 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1896 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1897 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1898 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1899 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1900 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1902 /* Result of previous invocation of this function on Darwin
1903 floating point constant. */
1904 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1905 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1906 opnum, (enum reload_type)type);
1911 if (GET_CODE (x) == PLUS
1912 && GET_CODE (XEXP (x, 0)) == REG
1913 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1914 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1915 && GET_CODE (XEXP (x, 1)) == CONST_INT
1916 && !ALTIVEC_VECTOR_MODE (mode))
1918 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1919 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1921 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1923 /* Check for 32-bit overflow. */
1924 if (high + low != val)
1930 /* Reload the high part into a base reg; leave the low part
1931 in the mem directly. */
1933 x = gen_rtx_PLUS (GET_MODE (x),
1934 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1938 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1939 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1940 opnum, (enum reload_type)type);
1945 if (GET_CODE (x) == SYMBOL_REF
1946 && DEFAULT_ABI == ABI_DARWIN
1947 && !ALTIVEC_VECTOR_MODE (mode)
1950 /* Darwin load of floating point constant. */
1951 rtx offset = gen_rtx (CONST, Pmode,
1952 gen_rtx (MINUS, Pmode, x,
1953 gen_rtx (SYMBOL_REF, Pmode,
1954 machopic_function_base_name ())));
1955 x = gen_rtx (LO_SUM, GET_MODE (x),
1956 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1957 gen_rtx (HIGH, Pmode, offset)), offset);
1958 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1959 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1960 opnum, (enum reload_type)type);
1966 && CONSTANT_POOL_EXPR_P (x)
1967 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1969 (x) = create_TOC_reference (x);
1977 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1978 that is a valid memory address for an instruction.
1979 The MODE argument is the machine mode for the MEM expression
1980 that wants to use this address.
1982 On the RS/6000, there are four valid address: a SYMBOL_REF that
1983 refers to a constant pool entry of an address (or the sum of it
1984 plus a constant), a short (16-bit signed) constant plus a register,
1985 the sum of two registers, or a register indirect, possibly with an
1986 auto-increment. For DFmode and DImode with an constant plus register,
1987 we must ensure that both words are addressable or PowerPC64 with offset
1990 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1991 32-bit DImode, TImode), indexed addressing cannot be used because
1992 adjacent memory cells are accessed by adding word-sized offsets
1993 during assembly output. */
1995 rs6000_legitimate_address (mode, x, reg_ok_strict)
1996 enum machine_mode mode;
2000 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2002 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2004 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2006 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2008 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2010 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2012 && GET_CODE (x) == PLUS
2013 && GET_CODE (XEXP (x, 0)) == REG
2014 && XEXP (x, 0) == virtual_stack_vars_rtx
2015 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2017 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2020 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2021 && (TARGET_POWERPC64 || mode != DImode)
2022 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2024 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2029 /* Try to output insns to set TARGET equal to the constant C if it can
2030 be done in less than N insns. Do all computations in MODE.
2031 Returns the place where the output has been placed if it can be
2032 done and the insns have been emitted. If it would take more than N
2033 insns, zero is returned and no insns and emitted. */
2036 rs6000_emit_set_const (dest, mode, source, n)
2038 enum machine_mode mode;
2039 int n ATTRIBUTE_UNUSED;
2041 HOST_WIDE_INT c0, c1;
2043 if (mode == QImode || mode == HImode || mode == SImode)
2046 dest = gen_reg_rtx (mode);
2047 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2051 if (GET_CODE (source) == CONST_INT)
2053 c0 = INTVAL (source);
2056 else if (GET_CODE (source) == CONST_DOUBLE)
2058 #if HOST_BITS_PER_WIDE_INT >= 64
2059 c0 = CONST_DOUBLE_LOW (source);
2062 c0 = CONST_DOUBLE_LOW (source);
2063 c1 = CONST_DOUBLE_HIGH (source);
2069 return rs6000_emit_set_long_const (dest, c0, c1);
2072 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2073 fall back to a straight forward decomposition. We do this to avoid
2074 exponential run times encountered when looking for longer sequences
2075 with rs6000_emit_set_const. */
2077 rs6000_emit_set_long_const (dest, c1, c2)
2079 HOST_WIDE_INT c1, c2;
2081 if (!TARGET_POWERPC64)
2083 rtx operand1, operand2;
2085 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2087 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2089 emit_move_insn (operand1, GEN_INT (c1));
2090 emit_move_insn (operand2, GEN_INT (c2));
2094 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2097 ud2 = (c1 & 0xffff0000) >> 16;
2098 #if HOST_BITS_PER_WIDE_INT >= 64
2102 ud4 = (c2 & 0xffff0000) >> 16;
2104 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2105 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2108 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2110 emit_move_insn (dest, GEN_INT (ud1));
2113 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2114 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2117 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2120 emit_move_insn (dest, GEN_INT (ud2 << 16));
2122 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2124 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2125 || (ud4 == 0 && ! (ud3 & 0x8000)))
2128 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2131 emit_move_insn (dest, GEN_INT (ud3 << 16));
2134 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2135 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2137 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2142 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2145 emit_move_insn (dest, GEN_INT (ud4 << 16));
2148 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2150 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2152 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2153 GEN_INT (ud2 << 16)));
2155 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2161 /* Emit a move from SOURCE to DEST in mode MODE. */
2163 rs6000_emit_move (dest, source, mode)
2166 enum machine_mode mode;
2170 operands[1] = source;
2172 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2173 if (GET_CODE (operands[1]) == CONST_DOUBLE
2174 && ! FLOAT_MODE_P (mode)
2175 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2177 /* FIXME. This should never happen. */
2178 /* Since it seems that it does, do the safe thing and convert
2180 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2182 if (GET_CODE (operands[1]) == CONST_DOUBLE
2183 && ! FLOAT_MODE_P (mode)
2184 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2185 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2186 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2187 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2190 /* Check if GCC is setting up a block move that will end up using FP
2191 registers as temporaries. We must make sure this is acceptable. */
2192 if (GET_CODE (operands[0]) == MEM
2193 && GET_CODE (operands[1]) == MEM
2195 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2196 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2197 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2198 ? 32 : MEM_ALIGN (operands[0])))
2199 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2201 : MEM_ALIGN (operands[1]))))
2202 && ! MEM_VOLATILE_P (operands [0])
2203 && ! MEM_VOLATILE_P (operands [1]))
2205 emit_move_insn (adjust_address (operands[0], SImode, 0),
2206 adjust_address (operands[1], SImode, 0));
2207 emit_move_insn (adjust_address (operands[0], SImode, 4),
2208 adjust_address (operands[1], SImode, 4));
2212 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2213 operands[1] = force_reg (mode, operands[1]);
2215 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2216 && GET_CODE (operands[0]) == MEM)
2220 if (reload_in_progress || reload_completed)
2221 regnum = true_regnum (operands[1]);
2222 else if (GET_CODE (operands[1]) == REG)
2223 regnum = REGNO (operands[1]);
2227 /* If operands[1] is a register, on POWER it may have
2228 double-precision data in it, so truncate it to single
2230 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2233 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2234 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2235 operands[1] = newreg;
2239 /* Handle the case where reload calls us with an invalid address;
2240 and the case of CONSTANT_P_RTX. */
2241 if (!VECTOR_MODE_P (mode)
2242 && (! general_operand (operands[1], mode)
2243 || ! nonimmediate_operand (operands[0], mode)
2244 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2246 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2250 /* FIXME: In the long term, this switch statement should go away
2251 and be replaced by a sequence of tests based on things like
2257 if (CONSTANT_P (operands[1])
2258 && GET_CODE (operands[1]) != CONST_INT)
2259 operands[1] = force_const_mem (mode, operands[1]);
2265 if (CONSTANT_P (operands[1])
2266 && ! easy_fp_constant (operands[1], mode))
2267 operands[1] = force_const_mem (mode, operands[1]);
2274 if (CONSTANT_P (operands[1])
2275 && !easy_vector_constant (operands[1]))
2276 operands[1] = force_const_mem (mode, operands[1]);
2281 /* Use default pattern for address of ELF small data */
2284 && DEFAULT_ABI == ABI_V4
2285 && (GET_CODE (operands[1]) == SYMBOL_REF
2286 || GET_CODE (operands[1]) == CONST)
2287 && small_data_operand (operands[1], mode))
2289 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2293 if (DEFAULT_ABI == ABI_V4
2294 && mode == Pmode && mode == SImode
2295 && flag_pic == 1 && got_operand (operands[1], mode))
2297 emit_insn (gen_movsi_got (operands[0], operands[1]));
2301 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2302 && TARGET_NO_TOC && ! flag_pic
2304 && CONSTANT_P (operands[1])
2305 && GET_CODE (operands[1]) != HIGH
2306 && GET_CODE (operands[1]) != CONST_INT)
2308 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2310 /* If this is a function address on -mcall-aixdesc,
2311 convert it to the address of the descriptor. */
2312 if (DEFAULT_ABI == ABI_AIX
2313 && GET_CODE (operands[1]) == SYMBOL_REF
2314 && XSTR (operands[1], 0)[0] == '.')
2316 const char *name = XSTR (operands[1], 0);
2318 while (*name == '.')
2320 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2321 CONSTANT_POOL_ADDRESS_P (new_ref)
2322 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2323 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2324 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2325 operands[1] = new_ref;
2328 if (DEFAULT_ABI == ABI_DARWIN)
2330 emit_insn (gen_macho_high (target, operands[1]));
2331 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2335 emit_insn (gen_elf_high (target, operands[1]));
2336 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2340 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2341 and we have put it in the TOC, we just need to make a TOC-relative
2344 && GET_CODE (operands[1]) == SYMBOL_REF
2345 && CONSTANT_POOL_EXPR_P (operands[1])
2346 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2347 get_pool_mode (operands[1])))
2349 operands[1] = create_TOC_reference (operands[1]);
2351 else if (mode == Pmode
2352 && CONSTANT_P (operands[1])
2353 && ((GET_CODE (operands[1]) != CONST_INT
2354 && ! easy_fp_constant (operands[1], mode))
2355 || (GET_CODE (operands[1]) == CONST_INT
2356 && num_insns_constant (operands[1], mode) > 2)
2357 || (GET_CODE (operands[0]) == REG
2358 && FP_REGNO_P (REGNO (operands[0]))))
2359 && GET_CODE (operands[1]) != HIGH
2360 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2361 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2363 /* Emit a USE operation so that the constant isn't deleted if
2364 expensive optimizations are turned on because nobody
2365 references it. This should only be done for operands that
2366 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2367 This should not be done for operands that contain LABEL_REFs.
2368 For now, we just handle the obvious case. */
2369 if (GET_CODE (operands[1]) != LABEL_REF)
2370 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2373 /* Darwin uses a special PIC legitimizer. */
2374 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2377 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2379 if (operands[0] != operands[1])
2380 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2385 /* If we are to limit the number of things we put in the TOC and
2386 this is a symbol plus a constant we can add in one insn,
2387 just put the symbol in the TOC and add the constant. Don't do
2388 this if reload is in progress. */
2389 if (GET_CODE (operands[1]) == CONST
2390 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2391 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2392 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2393 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2394 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2395 && ! side_effects_p (operands[0]))
2398 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2399 rtx other = XEXP (XEXP (operands[1], 0), 1);
2401 sym = force_reg (mode, sym);
2403 emit_insn (gen_addsi3 (operands[0], sym, other));
2405 emit_insn (gen_adddi3 (operands[0], sym, other));
2409 operands[1] = force_const_mem (mode, operands[1]);
2412 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2413 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2414 get_pool_constant (XEXP (operands[1], 0)),
2415 get_pool_mode (XEXP (operands[1], 0))))
2418 = gen_rtx_MEM (mode,
2419 create_TOC_reference (XEXP (operands[1], 0)));
2420 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2421 RTX_UNCHANGING_P (operands[1]) = 1;
2427 if (GET_CODE (operands[0]) == MEM
2428 && GET_CODE (XEXP (operands[0], 0)) != REG
2429 && ! reload_in_progress)
2431 = replace_equiv_address (operands[0],
2432 copy_addr_to_reg (XEXP (operands[0], 0)));
2434 if (GET_CODE (operands[1]) == MEM
2435 && GET_CODE (XEXP (operands[1], 0)) != REG
2436 && ! reload_in_progress)
2438 = replace_equiv_address (operands[1],
2439 copy_addr_to_reg (XEXP (operands[1], 0)));
2446 /* Above, we may have called force_const_mem which may have returned
2447 an invalid address. If we can, fix this up; otherwise, reload will
2448 have to deal with it. */
2449 if (GET_CODE (operands[1]) == MEM
2450 && ! memory_address_p (mode, XEXP (operands[1], 0))
2451 && ! reload_in_progress)
2452 operands[1] = adjust_address (operands[1], mode, 0);
2454 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2458 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2459 for a call to a function whose data type is FNTYPE.
2460 For a library call, FNTYPE is 0.
2462 For incoming args we set the number of arguments in the prototype large
2463 so we never return a PARALLEL. */
2466 init_cumulative_args (cum, fntype, libname, incoming)
2467 CUMULATIVE_ARGS *cum;
2469 rtx libname ATTRIBUTE_UNUSED;
2472 static CUMULATIVE_ARGS zero_cumulative;
2474 *cum = zero_cumulative;
2476 cum->fregno = FP_ARG_MIN_REG;
2477 cum->vregno = ALTIVEC_ARG_MIN_REG;
2478 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2479 cum->call_cookie = CALL_NORMAL;
2480 cum->sysv_gregno = GP_ARG_MIN_REG;
2483 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2485 else if (cum->prototype)
2486 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2487 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2488 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2491 cum->nargs_prototype = 0;
2493 cum->orig_nargs = cum->nargs_prototype;
2495 /* Check for longcall's */
2496 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2497 cum->call_cookie = CALL_LONG;
2499 if (TARGET_DEBUG_ARG)
2501 fprintf (stderr, "\ninit_cumulative_args:");
2504 tree ret_type = TREE_TYPE (fntype);
2505 fprintf (stderr, " ret code = %s,",
2506 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2509 if (cum->call_cookie & CALL_LONG)
2510 fprintf (stderr, " longcall,");
2512 fprintf (stderr, " proto = %d, nargs = %d\n",
2513 cum->prototype, cum->nargs_prototype);
2517 /* If defined, a C expression which determines whether, and in which
2518 direction, to pad out an argument with extra space. The value
2519 should be of type `enum direction': either `upward' to pad above
2520 the argument, `downward' to pad below, or `none' to inhibit
2523 For the AIX ABI structs are always stored left shifted in their
2527 function_arg_padding (mode, type)
2528 enum machine_mode mode;
2531 if (type != 0 && AGGREGATE_TYPE_P (type))
2534 /* This is the default definition. */
2535 return (! BYTES_BIG_ENDIAN
2538 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2539 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2540 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2541 ? downward : upward));
2544 /* If defined, a C expression that gives the alignment boundary, in bits,
2545 of an argument with the specified mode and type. If it is not defined,
2546 PARM_BOUNDARY is used for all arguments.
2548 V.4 wants long longs to be double word aligned. */
2551 function_arg_boundary (mode, type)
2552 enum machine_mode mode;
2553 tree type ATTRIBUTE_UNUSED;
2555 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2557 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2560 return PARM_BOUNDARY;
2563 /* Update the data in CUM to advance over an argument
2564 of mode MODE and data type TYPE.
2565 (TYPE is null for libcalls where that information may not be available.) */
2568 function_arg_advance (cum, mode, type, named)
2569 CUMULATIVE_ARGS *cum;
2570 enum machine_mode mode;
2574 cum->nargs_prototype--;
2576 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2578 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2581 cum->words += RS6000_ARG_SIZE (mode, type);
2583 else if (DEFAULT_ABI == ABI_V4)
2585 if (TARGET_HARD_FLOAT
2586 && (mode == SFmode || mode == DFmode))
2588 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2593 cum->words += cum->words & 1;
2594 cum->words += RS6000_ARG_SIZE (mode, type);
2600 int gregno = cum->sysv_gregno;
2602 /* Aggregates and IEEE quad get passed by reference. */
2603 if ((type && AGGREGATE_TYPE_P (type))
2607 n_words = RS6000_ARG_SIZE (mode, type);
2609 /* Long long is put in odd registers. */
2610 if (n_words == 2 && (gregno & 1) == 0)
2613 /* Long long is not split between registers and stack. */
2614 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2616 /* Long long is aligned on the stack. */
2618 cum->words += cum->words & 1;
2619 cum->words += n_words;
2622 /* Note: continuing to accumulate gregno past when we've started
2623 spilling to the stack indicates the fact that we've started
2624 spilling to the stack to expand_builtin_saveregs. */
2625 cum->sysv_gregno = gregno + n_words;
2628 if (TARGET_DEBUG_ARG)
2630 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2631 cum->words, cum->fregno);
2632 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2633 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2634 fprintf (stderr, "mode = %4s, named = %d\n",
2635 GET_MODE_NAME (mode), named);
2640 int align = (TARGET_32BIT && (cum->words & 1) != 0
2641 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2643 cum->words += align + RS6000_ARG_SIZE (mode, type);
2645 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2648 if (TARGET_DEBUG_ARG)
2650 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2651 cum->words, cum->fregno);
2652 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2653 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2654 fprintf (stderr, "named = %d, align = %d\n", named, align);
2659 /* Determine where to put an argument to a function.
2660 Value is zero to push the argument on the stack,
2661 or a hard register in which to store the argument.
2663 MODE is the argument's machine mode.
2664 TYPE is the data type of the argument (as a tree).
2665 This is null for libcalls where that information may
2667 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2668 the preceding args and about the function being called.
2669 NAMED is nonzero if this argument is a named parameter
2670 (otherwise it is an extra parameter matching an ellipsis).
2672 On RS/6000 the first eight words of non-FP are normally in registers
2673 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2674 Under V.4, the first 8 FP args are in registers.
2676 If this is floating-point and no prototype is specified, we use
2677 both an FP and integer register (or possibly FP reg and stack). Library
2678 functions (when TYPE is zero) always have the proper types for args,
2679 so we can pass the FP value just in one register. emit_library_function
2680 doesn't support PARALLEL anyway. */
2683 function_arg (cum, mode, type, named)
2684 CUMULATIVE_ARGS *cum;
2685 enum machine_mode mode;
2689 enum rs6000_abi abi = DEFAULT_ABI;
2691 /* Return a marker to indicate whether CR1 needs to set or clear the
2692 bit that V.4 uses to say fp args were passed in registers.
2693 Assume that we don't need the marker for software floating point,
2694 or compiler generated library calls. */
2695 if (mode == VOIDmode)
2698 && TARGET_HARD_FLOAT
2699 && cum->nargs_prototype < 0
2700 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2702 return GEN_INT (cum->call_cookie
2703 | ((cum->fregno == FP_ARG_MIN_REG)
2704 ? CALL_V4_SET_FP_ARGS
2705 : CALL_V4_CLEAR_FP_ARGS));
2708 return GEN_INT (cum->call_cookie);
2711 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2713 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2714 return gen_rtx_REG (mode, cum->vregno);
2718 else if (abi == ABI_V4)
2720 if (TARGET_HARD_FLOAT
2721 && (mode == SFmode || mode == DFmode))
2723 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2724 return gen_rtx_REG (mode, cum->fregno);
2731 int gregno = cum->sysv_gregno;
2733 /* Aggregates and IEEE quad get passed by reference. */
2734 if ((type && AGGREGATE_TYPE_P (type))
2738 n_words = RS6000_ARG_SIZE (mode, type);
2740 /* Long long is put in odd registers. */
2741 if (n_words == 2 && (gregno & 1) == 0)
2744 /* Long long is not split between registers and stack. */
2745 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2746 return gen_rtx_REG (mode, gregno);
2753 int align = (TARGET_32BIT && (cum->words & 1) != 0
2754 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2755 int align_words = cum->words + align;
2757 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2760 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2763 || ((cum->nargs_prototype > 0)
2764 /* IBM AIX extended its linkage convention definition always
2765 to require FP args after register save area hole on the
2767 && (DEFAULT_ABI != ABI_AIX
2769 || (align_words < GP_ARG_NUM_REG))))
2770 return gen_rtx_REG (mode, cum->fregno);
2772 return gen_rtx_PARALLEL (mode,
2774 gen_rtx_EXPR_LIST (VOIDmode,
2775 ((align_words >= GP_ARG_NUM_REG)
2778 + RS6000_ARG_SIZE (mode, type)
2780 /* If this is partially on the stack, then
2781 we only include the portion actually
2782 in registers here. */
2783 ? gen_rtx_REG (SImode,
2784 GP_ARG_MIN_REG + align_words)
2785 : gen_rtx_REG (mode,
2786 GP_ARG_MIN_REG + align_words))),
2788 gen_rtx_EXPR_LIST (VOIDmode,
2789 gen_rtx_REG (mode, cum->fregno),
2792 else if (align_words < GP_ARG_NUM_REG)
2793 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2799 /* For an arg passed partly in registers and partly in memory,
2800 this is the number of registers used.
2801 For args passed entirely in registers or entirely in memory, zero. */
2804 function_arg_partial_nregs (cum, mode, type, named)
2805 CUMULATIVE_ARGS *cum;
2806 enum machine_mode mode;
2808 int named ATTRIBUTE_UNUSED;
2810 if (DEFAULT_ABI == ABI_V4)
2813 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2814 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2816 if (cum->nargs_prototype >= 0)
2820 if (cum->words < GP_ARG_NUM_REG
2821 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2823 int ret = GP_ARG_NUM_REG - cum->words;
2824 if (ret && TARGET_DEBUG_ARG)
2825 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2833 /* A C expression that indicates when an argument must be passed by
2834 reference. If nonzero for an argument, a copy of that argument is
2835 made in memory and a pointer to the argument is passed instead of
2836 the argument itself. The pointer is passed in whatever way is
2837 appropriate for passing a pointer to that type.
2839 Under V.4, structures and unions are passed by reference. */
2842 function_arg_pass_by_reference (cum, mode, type, named)
2843 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2844 enum machine_mode mode ATTRIBUTE_UNUSED;
2846 int named ATTRIBUTE_UNUSED;
2848 if (DEFAULT_ABI == ABI_V4
2849 && ((type && AGGREGATE_TYPE_P (type))
2852 if (TARGET_DEBUG_ARG)
2853 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2861 /* Perform any needed actions needed for a function that is receiving a
2862 variable number of arguments.
2866 MODE and TYPE are the mode and type of the current parameter.
2868 PRETEND_SIZE is a variable that should be set to the amount of stack
2869 that must be pushed by the prolog to pretend that our caller pushed
2872 Normally, this macro will push all remaining incoming registers on the
2873 stack and set PRETEND_SIZE to the length of the registers pushed. */
2876 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2877 CUMULATIVE_ARGS *cum;
2878 enum machine_mode mode;
2884 CUMULATIVE_ARGS next_cum;
2885 int reg_size = TARGET_32BIT ? 4 : 8;
2886 rtx save_area = NULL_RTX, mem;
2887 int first_reg_offset, set;
2891 fntype = TREE_TYPE (current_function_decl);
2892 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2893 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2894 != void_type_node));
2896 /* For varargs, we do not want to skip the dummy va_dcl argument.
2897 For stdargs, we do want to skip the last named argument. */
2900 function_arg_advance (&next_cum, mode, type, 1);
2902 if (DEFAULT_ABI == ABI_V4)
2904 /* Indicate to allocate space on the stack for varargs save area. */
2905 cfun->machine->sysv_varargs_p = 1;
2907 save_area = plus_constant (virtual_stack_vars_rtx,
2908 - RS6000_VARARGS_SIZE);
2910 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2914 first_reg_offset = next_cum.words;
2915 save_area = virtual_incoming_args_rtx;
2916 cfun->machine->sysv_varargs_p = 0;
2918 if (MUST_PASS_IN_STACK (mode, type))
2919 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2922 set = get_varargs_alias_set ();
2923 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2925 mem = gen_rtx_MEM (BLKmode,
2926 plus_constant (save_area,
2927 first_reg_offset * reg_size)),
2928 set_mem_alias_set (mem, set);
2929 set_mem_align (mem, BITS_PER_WORD);
2932 (GP_ARG_MIN_REG + first_reg_offset, mem,
2933 GP_ARG_NUM_REG - first_reg_offset,
2934 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2936 /* ??? Does ABI_V4 need this at all? */
2937 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2940 /* Save FP registers if needed. */
2941 if (DEFAULT_ABI == ABI_V4
2942 && TARGET_HARD_FLOAT && ! no_rtl
2943 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2945 int fregno = next_cum.fregno;
2946 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2947 rtx lab = gen_label_rtx ();
2948 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2950 emit_jump_insn (gen_rtx_SET (VOIDmode,
2952 gen_rtx_IF_THEN_ELSE (VOIDmode,
2953 gen_rtx_NE (VOIDmode, cr1,
2955 gen_rtx_LABEL_REF (VOIDmode, lab),
2958 while (fregno <= FP_ARG_V4_MAX_REG)
2960 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2961 set_mem_alias_set (mem, set);
2962 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2971 /* Create the va_list data type. */
2974 rs6000_build_va_list ()
2976 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2978 /* For AIX, prefer 'char *' because that's what the system
2979 header files like. */
2980 if (DEFAULT_ABI != ABI_V4)
2981 return build_pointer_type (char_type_node);
2983 record = make_lang_type (RECORD_TYPE);
2984 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2986 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2987 unsigned_char_type_node);
2988 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2989 unsigned_char_type_node);
2990 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2992 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2995 DECL_FIELD_CONTEXT (f_gpr) = record;
2996 DECL_FIELD_CONTEXT (f_fpr) = record;
2997 DECL_FIELD_CONTEXT (f_ovf) = record;
2998 DECL_FIELD_CONTEXT (f_sav) = record;
3000 TREE_CHAIN (record) = type_decl;
3001 TYPE_NAME (record) = type_decl;
3002 TYPE_FIELDS (record) = f_gpr;
3003 TREE_CHAIN (f_gpr) = f_fpr;
3004 TREE_CHAIN (f_fpr) = f_ovf;
3005 TREE_CHAIN (f_ovf) = f_sav;
3007 layout_type (record);
3009 /* The correct type is an array type of one element. */
3010 return build_array_type (record, build_index_type (size_zero_node));
3013 /* Implement va_start. */
3016 rs6000_va_start (stdarg_p, valist, nextarg)
3021 HOST_WIDE_INT words, n_gpr, n_fpr;
3022 tree f_gpr, f_fpr, f_ovf, f_sav;
3023 tree gpr, fpr, ovf, sav, t;
3025 /* Only SVR4 needs something special. */
3026 if (DEFAULT_ABI != ABI_V4)
3028 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3032 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3033 f_fpr = TREE_CHAIN (f_gpr);
3034 f_ovf = TREE_CHAIN (f_fpr);
3035 f_sav = TREE_CHAIN (f_ovf);
3037 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3038 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3039 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3040 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3041 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3043 /* Count number of gp and fp argument registers used. */
3044 words = current_function_args_info.words;
3045 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3046 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3048 if (TARGET_DEBUG_ARG)
3050 fputs ("va_start: words = ", stderr);
3051 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3052 fputs (", n_gpr = ", stderr);
3053 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3054 fputs (", n_fpr = ", stderr);
3055 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3056 putc ('\n', stderr);
3059 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3060 TREE_SIDE_EFFECTS (t) = 1;
3061 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3063 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3064 TREE_SIDE_EFFECTS (t) = 1;
3065 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3067 /* Find the overflow area. */
3068 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3070 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3071 build_int_2 (words * UNITS_PER_WORD, 0));
3072 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3073 TREE_SIDE_EFFECTS (t) = 1;
3074 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3076 /* Find the register save area. */
3077 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3078 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3079 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3080 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3081 TREE_SIDE_EFFECTS (t) = 1;
3082 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3085 /* Implement va_arg. */
3088 rs6000_va_arg (valist, type)
3091 tree f_gpr, f_fpr, f_ovf, f_sav;
3092 tree gpr, fpr, ovf, sav, reg, t, u;
3093 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3094 rtx lab_false, lab_over, addr_rtx, r;
3096 if (DEFAULT_ABI != ABI_V4)
3097 return std_expand_builtin_va_arg (valist, type);
3099 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3100 f_fpr = TREE_CHAIN (f_gpr);
3101 f_ovf = TREE_CHAIN (f_fpr);
3102 f_sav = TREE_CHAIN (f_ovf);
3104 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3105 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3106 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3107 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3108 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3110 size = int_size_in_bytes (type);
3111 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3113 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3115 /* Aggregates and long doubles are passed by reference. */
3121 size = rsize = UNITS_PER_WORD;
3123 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3125 /* FP args go in FP registers, if present. */
3134 /* Otherwise into GP registers. */
3142 /* Pull the value out of the saved registers ... */
3144 lab_false = gen_label_rtx ();
3145 lab_over = gen_label_rtx ();
3146 addr_rtx = gen_reg_rtx (Pmode);
3148 /* Vectors never go in registers. */
3149 if (TREE_CODE (type) != VECTOR_TYPE)
3151 TREE_THIS_VOLATILE (reg) = 1;
3152 emit_cmp_and_jump_insns
3153 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3154 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3157 /* Long long is aligned in the registers. */
3160 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3161 build_int_2 (n_reg - 1, 0));
3162 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3163 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3164 TREE_SIDE_EFFECTS (u) = 1;
3165 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3169 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3173 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3174 build_int_2 (n_reg, 0));
3175 TREE_SIDE_EFFECTS (u) = 1;
3177 u = build1 (CONVERT_EXPR, integer_type_node, u);
3178 TREE_SIDE_EFFECTS (u) = 1;
3180 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3181 TREE_SIDE_EFFECTS (u) = 1;
3183 t = build (PLUS_EXPR, ptr_type_node, t, u);
3184 TREE_SIDE_EFFECTS (t) = 1;
3186 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3188 emit_move_insn (addr_rtx, r);
3190 emit_jump_insn (gen_jump (lab_over));
3194 emit_label (lab_false);
3196 /* ... otherwise out of the overflow area. */
3198 /* Make sure we don't find reg 7 for the next int arg.
3200 All AltiVec vectors go in the overflow area. So in the AltiVec
3201 case we need to get the vectors from the overflow area, but
3202 remember where the GPRs and FPRs are. */
3203 if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3205 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3206 TREE_SIDE_EFFECTS (t) = 1;
3207 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3210 /* Care for on-stack alignment if needed. */
3217 /* Vectors are 16 byte aligned. */
3218 if (TREE_CODE (type) == VECTOR_TYPE)
3223 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3224 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3228 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3230 emit_move_insn (addr_rtx, r);
3232 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3233 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3234 TREE_SIDE_EFFECTS (t) = 1;
3235 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3237 emit_label (lab_over);
3241 r = gen_rtx_MEM (Pmode, addr_rtx);
3242 set_mem_alias_set (r, get_varargs_alias_set ());
3243 emit_move_insn (addr_rtx, r);
3251 #define def_builtin(MASK, NAME, TYPE, CODE) \
3253 if ((MASK) & target_flags) \
3254 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3257 struct builtin_description
3259 const unsigned int mask;
3260 const enum insn_code icode;
3261 const char *const name;
3262 const enum rs6000_builtins code;
3265 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3267 static const struct builtin_description bdesc_3arg[] =
3269 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3270 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3271 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3272 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3273 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3274 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3275 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3276 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3277 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3278 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3279 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3280 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3281 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3282 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3283 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3284 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3285 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3286 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3287 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3288 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3289 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3290 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3291 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3294 /* DST operations: void foo (void *, const int, const char). */
3296 static const struct builtin_description bdesc_dst[] =
3298 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3299 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3300 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3301 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3304 /* Simple binary operations: VECc = foo (VECa, VECb). */
3306 static const struct builtin_description bdesc_2arg[] =
3308 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3309 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3310 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3311 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3312 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3313 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3314 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3315 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3316 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3317 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3318 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3319 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3320 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3321 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3322 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3323 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3324 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3325 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3326 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3327 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3328 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3329 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3330 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3331 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3332 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3333 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3334 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3335 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3336 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3337 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3338 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3339 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3340 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3341 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3342 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3343 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3344 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3345 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3346 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3347 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3348 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3349 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3350 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3351 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3357 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3358 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3359 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3360 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3361 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3362 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3363 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3369 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3373 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3374 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3375 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3377 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3378 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3379 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3380 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3381 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3382 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3383 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3384 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3385 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3386 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3387 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3388 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3389 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3390 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3391 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3392 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3393 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3395 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3404 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3405 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3406 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3407 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3420 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3423 /* AltiVec predicates. */
3425 struct builtin_description_predicates
3427 const unsigned int mask;
3428 const enum insn_code icode;
3430 const char *const name;
3431 const enum rs6000_builtins code;
3434 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3436 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3442 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3443 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3444 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3445 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3446 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3447 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3448 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3451 /* ABS* opreations. */
3453 static const struct builtin_description bdesc_abs[] =
3455 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3456 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3457 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3458 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3459 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3461 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3464 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3467 static const struct builtin_description bdesc_1arg[] =
3469 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3470 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3471 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3472 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3473 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3474 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3475 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3476 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3477 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3478 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3479 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3480 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3481 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3482 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3483 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3484 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3485 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3489 altivec_expand_unop_builtin (icode, arglist, target)
3490 enum insn_code icode;
3495 tree arg0 = TREE_VALUE (arglist);
3496 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3497 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3498 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3500 /* If we got invalid arguments bail out before generating bad rtl. */
3501 if (arg0 == error_mark_node)
3505 || GET_MODE (target) != tmode
3506 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3507 target = gen_reg_rtx (tmode);
3509 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3510 op0 = copy_to_mode_reg (mode0, op0);
3512 pat = GEN_FCN (icode) (target, op0);
3521 altivec_expand_abs_builtin (icode, arglist, target)
3522 enum insn_code icode;
3526 rtx pat, scratch1, scratch2;
3527 tree arg0 = TREE_VALUE (arglist);
3528 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3529 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3530 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3532 /* If we have invalid arguments, bail out before generating bad rtl. */
3533 if (arg0 == error_mark_node)
3537 || GET_MODE (target) != tmode
3538 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3539 target = gen_reg_rtx (tmode);
3541 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3542 op0 = copy_to_mode_reg (mode0, op0);
3544 scratch1 = gen_reg_rtx (mode0);
3545 scratch2 = gen_reg_rtx (mode0);
3547 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3556 altivec_expand_binop_builtin (icode, arglist, target)
3557 enum insn_code icode;
3562 tree arg0 = TREE_VALUE (arglist);
3563 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3564 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3565 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3566 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3567 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3568 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3570 /* If we got invalid arguments bail out before generating bad rtl. */
3571 if (arg0 == error_mark_node || arg1 == error_mark_node)
3575 || GET_MODE (target) != tmode
3576 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3577 target = gen_reg_rtx (tmode);
3579 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3580 op0 = copy_to_mode_reg (mode0, op0);
3581 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3582 op1 = copy_to_mode_reg (mode1, op1);
3584 pat = GEN_FCN (icode) (target, op0, op1);
3593 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3594 enum insn_code icode;
3600 tree cr6_form = TREE_VALUE (arglist);
3601 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3602 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3603 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3604 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3605 enum machine_mode tmode = SImode;
3606 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3607 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3610 if (TREE_CODE (cr6_form) != INTEGER_CST)
3612 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3616 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3621 /* If we have invalid arguments, bail out before generating bad rtl. */
3622 if (arg0 == error_mark_node || arg1 == error_mark_node)
3626 || GET_MODE (target) != tmode
3627 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3628 target = gen_reg_rtx (tmode);
3630 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3631 op0 = copy_to_mode_reg (mode0, op0);
3632 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3633 op1 = copy_to_mode_reg (mode1, op1);
3635 scratch = gen_reg_rtx (mode0);
3637 pat = GEN_FCN (icode) (scratch, op0, op1,
3638 gen_rtx (SYMBOL_REF, Pmode, opcode));
3643 /* The vec_any* and vec_all* predicates use the same opcodes for two
3644 different operations, but the bits in CR6 will be different
3645 depending on what information we want. So we have to play tricks
3646 with CR6 to get the right bits out.
3648 If you think this is disgusting, look at the specs for the
3649 AltiVec predicates. */
3651 switch (cr6_form_int)
3654 emit_insn (gen_cr6_test_for_zero (target));
3657 emit_insn (gen_cr6_test_for_zero_reverse (target));
3660 emit_insn (gen_cr6_test_for_lt (target));
3663 emit_insn (gen_cr6_test_for_lt_reverse (target));
3666 error ("argument 1 of __builtin_altivec_predicate is out of range");
3674 altivec_expand_stv_builtin (icode, arglist)
3675 enum insn_code icode;
3678 tree arg0 = TREE_VALUE (arglist);
3679 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3680 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3681 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3682 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3683 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3685 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3686 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3687 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3689 /* Invalid arguments. Bail before doing anything stoopid! */
3690 if (arg0 == error_mark_node
3691 || arg1 == error_mark_node
3692 || arg2 == error_mark_node)
3695 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3696 op0 = copy_to_mode_reg (mode2, op0);
3697 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3698 op1 = copy_to_mode_reg (mode0, op1);
3699 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3700 op2 = copy_to_mode_reg (mode1, op2);
3702 pat = GEN_FCN (icode) (op1, op2, op0);
3709 altivec_expand_ternop_builtin (icode, arglist, target)
3710 enum insn_code icode;
3715 tree arg0 = TREE_VALUE (arglist);
3716 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3717 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3718 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3719 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3720 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3721 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3722 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3723 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3724 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3726 /* If we got invalid arguments bail out before generating bad rtl. */
3727 if (arg0 == error_mark_node
3728 || arg1 == error_mark_node
3729 || arg2 == error_mark_node)
3733 || GET_MODE (target) != tmode
3734 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3735 target = gen_reg_rtx (tmode);
3737 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3738 op0 = copy_to_mode_reg (mode0, op0);
3739 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3740 op1 = copy_to_mode_reg (mode1, op1);
3741 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3742 op2 = copy_to_mode_reg (mode2, op2);
3744 pat = GEN_FCN (icode) (target, op0, op1, op2);
3752 altivec_expand_builtin (exp, target)
3756 struct builtin_description *d;
3757 struct builtin_description_predicates *dp;
3759 enum insn_code icode;
3760 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3761 tree arglist = TREE_OPERAND (exp, 1);
3762 tree arg0, arg1, arg2;
3763 rtx op0, op1, op2, pat;
3764 enum machine_mode tmode, mode0, mode1, mode2;
3765 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3769 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3770 icode = CODE_FOR_altivec_lvx_16qi;
3771 arg0 = TREE_VALUE (arglist);
3772 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3773 tmode = insn_data[icode].operand[0].mode;
3774 mode0 = insn_data[icode].operand[1].mode;
3777 || GET_MODE (target) != tmode
3778 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3779 target = gen_reg_rtx (tmode);
3781 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3782 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3784 pat = GEN_FCN (icode) (target, op0);
3790 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3791 icode = CODE_FOR_altivec_lvx_8hi;
3792 arg0 = TREE_VALUE (arglist);
3793 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3794 tmode = insn_data[icode].operand[0].mode;
3795 mode0 = insn_data[icode].operand[1].mode;
3798 || GET_MODE (target) != tmode
3799 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3800 target = gen_reg_rtx (tmode);
3802 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3803 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3805 pat = GEN_FCN (icode) (target, op0);
3811 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3812 icode = CODE_FOR_altivec_lvx_4si;
3813 arg0 = TREE_VALUE (arglist);
3814 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3815 tmode = insn_data[icode].operand[0].mode;
3816 mode0 = insn_data[icode].operand[1].mode;
3819 || GET_MODE (target) != tmode
3820 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3821 target = gen_reg_rtx (tmode);
3823 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3824 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3826 pat = GEN_FCN (icode) (target, op0);
3832 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3833 icode = CODE_FOR_altivec_lvx_4sf;
3834 arg0 = TREE_VALUE (arglist);
3835 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3836 tmode = insn_data[icode].operand[0].mode;
3837 mode0 = insn_data[icode].operand[1].mode;
3840 || GET_MODE (target) != tmode
3841 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3842 target = gen_reg_rtx (tmode);
3844 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3845 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3847 pat = GEN_FCN (icode) (target, op0);
3853 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3854 icode = CODE_FOR_altivec_stvx_16qi;
3855 arg0 = TREE_VALUE (arglist);
3856 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3857 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3858 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3859 mode0 = insn_data[icode].operand[0].mode;
3860 mode1 = insn_data[icode].operand[1].mode;
3862 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3863 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3864 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3865 op1 = copy_to_mode_reg (mode1, op1);
3867 pat = GEN_FCN (icode) (op0, op1);
3872 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3873 icode = CODE_FOR_altivec_stvx_8hi;
3874 arg0 = TREE_VALUE (arglist);
3875 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3876 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3877 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3878 mode0 = insn_data[icode].operand[0].mode;
3879 mode1 = insn_data[icode].operand[1].mode;
3881 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3882 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3883 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3884 op1 = copy_to_mode_reg (mode1, op1);
3886 pat = GEN_FCN (icode) (op0, op1);
3891 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3892 icode = CODE_FOR_altivec_stvx_4si;
3893 arg0 = TREE_VALUE (arglist);
3894 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3895 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3896 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3897 mode0 = insn_data[icode].operand[0].mode;
3898 mode1 = insn_data[icode].operand[1].mode;
3900 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3901 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3902 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3903 op1 = copy_to_mode_reg (mode1, op1);
3905 pat = GEN_FCN (icode) (op0, op1);
3910 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3911 icode = CODE_FOR_altivec_stvx_4sf;
3912 arg0 = TREE_VALUE (arglist);
3913 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3914 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3915 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3916 mode0 = insn_data[icode].operand[0].mode;
3917 mode1 = insn_data[icode].operand[1].mode;
3919 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3920 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3921 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3922 op1 = copy_to_mode_reg (mode1, op1);
3924 pat = GEN_FCN (icode) (op0, op1);
3929 case ALTIVEC_BUILTIN_STVX:
3930 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3931 case ALTIVEC_BUILTIN_STVEBX:
3932 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3933 case ALTIVEC_BUILTIN_STVEHX:
3934 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3935 case ALTIVEC_BUILTIN_STVEWX:
3936 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3937 case ALTIVEC_BUILTIN_STVXL:
3938 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3940 case ALTIVEC_BUILTIN_MFVSCR:
3941 icode = CODE_FOR_altivec_mfvscr;
3942 tmode = insn_data[icode].operand[0].mode;
3945 || GET_MODE (target) != tmode
3946 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3947 target = gen_reg_rtx (tmode);
3949 pat = GEN_FCN (icode) (target);
3955 case ALTIVEC_BUILTIN_MTVSCR:
3956 icode = CODE_FOR_altivec_mtvscr;
3957 arg0 = TREE_VALUE (arglist);
3958 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3959 mode0 = insn_data[icode].operand[0].mode;
3961 /* If we got invalid arguments bail out before generating bad rtl. */
3962 if (arg0 == error_mark_node)
3965 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3966 op0 = copy_to_mode_reg (mode0, op0);
3968 pat = GEN_FCN (icode) (op0);
3973 case ALTIVEC_BUILTIN_DSSALL:
3974 emit_insn (gen_altivec_dssall ());
3977 case ALTIVEC_BUILTIN_DSS:
3978 icode = CODE_FOR_altivec_dss;
3979 arg0 = TREE_VALUE (arglist);
3980 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3981 mode0 = insn_data[icode].operand[0].mode;
3983 /* If we got invalid arguments bail out before generating bad rtl. */
3984 if (arg0 == error_mark_node)
3987 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3988 op0 = copy_to_mode_reg (mode0, op0);
3990 emit_insn (gen_altivec_dss (op0));
3994 /* Handle DST variants. */
3995 d = (struct builtin_description *) bdesc_dst;
3996 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
3997 if (d->code == fcode)
3999 arg0 = TREE_VALUE (arglist);
4000 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4001 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4002 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4003 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4004 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4005 mode0 = insn_data[d->icode].operand[0].mode;
4006 mode1 = insn_data[d->icode].operand[1].mode;
4007 mode2 = insn_data[d->icode].operand[2].mode;
4009 /* Invalid arguments, bail out before generating bad rtl. */
4010 if (arg0 == error_mark_node
4011 || arg1 == error_mark_node
4012 || arg2 == error_mark_node)
4015 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4016 op0 = copy_to_mode_reg (mode0, op0);
4017 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4018 op1 = copy_to_mode_reg (mode1, op1);
4020 if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4022 error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4026 pat = GEN_FCN (d->icode) (op0, op1, op2);
4033 /* Expand abs* operations. */
4034 d = (struct builtin_description *) bdesc_abs;
4035 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4036 if (d->code == fcode)
4037 return altivec_expand_abs_builtin (d->icode, arglist, target);
4039 /* Handle simple unary operations. */
4040 d = (struct builtin_description *) bdesc_1arg;
4041 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4042 if (d->code == fcode)
4043 return altivec_expand_unop_builtin (d->icode, arglist, target);
4045 /* Handle simple binary operations. */
4046 d = (struct builtin_description *) bdesc_2arg;
4047 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4048 if (d->code == fcode)
4049 return altivec_expand_binop_builtin (d->icode, arglist, target);
4051 /* Expand the AltiVec predicates. */
4052 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4053 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4054 if (dp->code == fcode)
4055 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4057 /* LV* are funky. We initialized them differently. */
4060 case ALTIVEC_BUILTIN_LVSL:
4061 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4063 case ALTIVEC_BUILTIN_LVSR:
4064 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4066 case ALTIVEC_BUILTIN_LVEBX:
4067 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4069 case ALTIVEC_BUILTIN_LVEHX:
4070 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4072 case ALTIVEC_BUILTIN_LVEWX:
4073 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4075 case ALTIVEC_BUILTIN_LVXL:
4076 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4078 case ALTIVEC_BUILTIN_LVX:
4079 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4086 /* Handle simple ternary operations. */
4087 d = (struct builtin_description *) bdesc_3arg;
4088 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4089 if (d->code == fcode)
4090 return altivec_expand_ternop_builtin (d->icode, arglist, target);
4096 /* Expand an expression EXP that calls a built-in function,
4097 with result going to TARGET if that's convenient
4098 (and in mode MODE if that's convenient).
4099 SUBTARGET may be used as the target for computing one of EXP's operands.
4100 IGNORE is nonzero if the value is to be ignored. */
4103 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4106 rtx subtarget ATTRIBUTE_UNUSED;
4107 enum machine_mode mode ATTRIBUTE_UNUSED;
4108 int ignore ATTRIBUTE_UNUSED;
4111 return altivec_expand_builtin (exp, target);
4117 rs6000_init_builtins ()
4120 altivec_init_builtins ();
4124 altivec_init_builtins (void)
4126 struct builtin_description *d;
4127 struct builtin_description_predicates *dp;
4130 tree endlink = void_list_node;
4132 tree pint_type_node = build_pointer_type (integer_type_node);
4133 tree pvoid_type_node = build_pointer_type (void_type_node);
4134 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4135 tree pchar_type_node = build_pointer_type (char_type_node);
4136 tree pfloat_type_node = build_pointer_type (float_type_node);
4138 tree v4sf_ftype_v4sf_v4sf_v16qi
4139 = build_function_type (V4SF_type_node,
4140 tree_cons (NULL_TREE, V4SF_type_node,
4141 tree_cons (NULL_TREE, V4SF_type_node,
4142 tree_cons (NULL_TREE,
4145 tree v4si_ftype_v4si_v4si_v16qi
4146 = build_function_type (V4SI_type_node,
4147 tree_cons (NULL_TREE, V4SI_type_node,
4148 tree_cons (NULL_TREE, V4SI_type_node,
4149 tree_cons (NULL_TREE,
4152 tree v8hi_ftype_v8hi_v8hi_v16qi
4153 = build_function_type (V8HI_type_node,
4154 tree_cons (NULL_TREE, V8HI_type_node,
4155 tree_cons (NULL_TREE, V8HI_type_node,
4156 tree_cons (NULL_TREE,
4159 tree v16qi_ftype_v16qi_v16qi_v16qi
4160 = build_function_type (V16QI_type_node,
4161 tree_cons (NULL_TREE, V16QI_type_node,
4162 tree_cons (NULL_TREE, V16QI_type_node,
4163 tree_cons (NULL_TREE,
4167 /* V4SI foo (char). */
4168 tree v4si_ftype_char
4169 = build_function_type (V4SI_type_node,
4170 tree_cons (NULL_TREE, char_type_node, endlink));
4172 /* V8HI foo (char). */
4173 tree v8hi_ftype_char
4174 = build_function_type (V8HI_type_node,
4175 tree_cons (NULL_TREE, char_type_node, endlink));
4177 /* V16QI foo (char). */
4178 tree v16qi_ftype_char
4179 = build_function_type (V16QI_type_node,
4180 tree_cons (NULL_TREE, char_type_node, endlink));
4181 /* V4SF foo (V4SF). */
4182 tree v4sf_ftype_v4sf
4183 = build_function_type (V4SF_type_node,
4184 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4186 /* V4SI foo (int *). */
4187 tree v4si_ftype_pint
4188 = build_function_type (V4SI_type_node,
4189 tree_cons (NULL_TREE, pint_type_node, endlink));
4190 /* V8HI foo (short *). */
4191 tree v8hi_ftype_pshort
4192 = build_function_type (V8HI_type_node,
4193 tree_cons (NULL_TREE, pshort_type_node, endlink));
4194 /* V16QI foo (char *). */
4195 tree v16qi_ftype_pchar
4196 = build_function_type (V16QI_type_node,
4197 tree_cons (NULL_TREE, pchar_type_node, endlink));
4198 /* V4SF foo (float *). */
4199 tree v4sf_ftype_pfloat
4200 = build_function_type (V4SF_type_node,
4201 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4203 /* V8HI foo (V16QI). */
4204 tree v8hi_ftype_v16qi
4205 = build_function_type (V8HI_type_node,
4206 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4208 /* void foo (void *, int, char/literal). */
4209 tree void_ftype_pvoid_int_char
4210 = build_function_type (void_type_node,
4211 tree_cons (NULL_TREE, pvoid_type_node,
4212 tree_cons (NULL_TREE, integer_type_node,
4213 tree_cons (NULL_TREE,
4217 /* void foo (int *, V4SI). */
4218 tree void_ftype_pint_v4si
4219 = build_function_type (void_type_node,
4220 tree_cons (NULL_TREE, pint_type_node,
4221 tree_cons (NULL_TREE, V4SI_type_node,
4223 /* void foo (short *, V8HI). */
4224 tree void_ftype_pshort_v8hi
4225 = build_function_type (void_type_node,
4226 tree_cons (NULL_TREE, pshort_type_node,
4227 tree_cons (NULL_TREE, V8HI_type_node,
4229 /* void foo (char *, V16QI). */
4230 tree void_ftype_pchar_v16qi
4231 = build_function_type (void_type_node,
4232 tree_cons (NULL_TREE, pchar_type_node,
4233 tree_cons (NULL_TREE, V16QI_type_node,
4235 /* void foo (float *, V4SF). */
4236 tree void_ftype_pfloat_v4sf
4237 = build_function_type (void_type_node,
4238 tree_cons (NULL_TREE, pfloat_type_node,
4239 tree_cons (NULL_TREE, V4SF_type_node,
4242 /* void foo (V4SI). */
4243 tree void_ftype_v4si
4244 = build_function_type (void_type_node,
4245 tree_cons (NULL_TREE, V4SI_type_node,
4248 /* void foo (vint, int, void *). */
4249 tree void_ftype_v4si_int_pvoid
4250 = build_function_type (void_type_node,
4251 tree_cons (NULL_TREE, V4SI_type_node,
4252 tree_cons (NULL_TREE, integer_type_node,
4253 tree_cons (NULL_TREE,
4257 /* void foo (vchar, int, void *). */
4258 tree void_ftype_v16qi_int_pvoid
4259 = build_function_type (void_type_node,
4260 tree_cons (NULL_TREE, V16QI_type_node,
4261 tree_cons (NULL_TREE, integer_type_node,
4262 tree_cons (NULL_TREE,
4266 /* void foo (vshort, int, void *). */
4267 tree void_ftype_v8hi_int_pvoid
4268 = build_function_type (void_type_node,
4269 tree_cons (NULL_TREE, V8HI_type_node,
4270 tree_cons (NULL_TREE, integer_type_node,
4271 tree_cons (NULL_TREE,
4275 /* void foo (char). */
4277 = build_function_type (void_type_node,
4278 tree_cons (NULL_TREE, char_type_node,
4281 /* void foo (void). */
4282 tree void_ftype_void
4283 = build_function_type (void_type_node, void_list_node);
4285 /* vshort foo (void). */
4286 tree v8hi_ftype_void
4287 = build_function_type (V8HI_type_node, void_list_node);
4289 tree v4si_ftype_v4si_v4si
4290 = build_function_type (V4SI_type_node,
4291 tree_cons (NULL_TREE, V4SI_type_node,
4292 tree_cons (NULL_TREE, V4SI_type_node,
4295 /* These are for the unsigned 5 bit literals. */
4297 tree v4sf_ftype_v4si_char
4298 = build_function_type (V4SF_type_node,
4299 tree_cons (NULL_TREE, V4SI_type_node,
4300 tree_cons (NULL_TREE, char_type_node,
4302 tree v4si_ftype_v4sf_char
4303 = build_function_type (V4SI_type_node,
4304 tree_cons (NULL_TREE, V4SF_type_node,
4305 tree_cons (NULL_TREE, char_type_node,
4307 tree v4si_ftype_v4si_char
4308 = build_function_type (V4SI_type_node,
4309 tree_cons (NULL_TREE, V4SI_type_node,
4310 tree_cons (NULL_TREE, char_type_node,
4312 tree v8hi_ftype_v8hi_char
4313 = build_function_type (V8HI_type_node,
4314 tree_cons (NULL_TREE, V8HI_type_node,
4315 tree_cons (NULL_TREE, char_type_node,
4317 tree v16qi_ftype_v16qi_char
4318 = build_function_type (V16QI_type_node,
4319 tree_cons (NULL_TREE, V16QI_type_node,
4320 tree_cons (NULL_TREE, char_type_node,
4323 /* These are for the unsigned 4 bit literals. */
4325 tree v16qi_ftype_v16qi_v16qi_char
4326 = build_function_type (V16QI_type_node,
4327 tree_cons (NULL_TREE, V16QI_type_node,
4328 tree_cons (NULL_TREE, V16QI_type_node,
4329 tree_cons (NULL_TREE,
4333 tree v8hi_ftype_v8hi_v8hi_char
4334 = build_function_type (V8HI_type_node,
4335 tree_cons (NULL_TREE, V8HI_type_node,
4336 tree_cons (NULL_TREE, V8HI_type_node,
4337 tree_cons (NULL_TREE,
4341 tree v4si_ftype_v4si_v4si_char
4342 = build_function_type (V4SI_type_node,
4343 tree_cons (NULL_TREE, V4SI_type_node,
4344 tree_cons (NULL_TREE, V4SI_type_node,
4345 tree_cons (NULL_TREE,
4349 tree v4sf_ftype_v4sf_v4sf_char
4350 = build_function_type (V4SF_type_node,
4351 tree_cons (NULL_TREE, V4SF_type_node,
4352 tree_cons (NULL_TREE, V4SF_type_node,
4353 tree_cons (NULL_TREE,
4357 /* End of 4 bit literals. */
4359 tree v4sf_ftype_v4sf_v4sf
4360 = build_function_type (V4SF_type_node,
4361 tree_cons (NULL_TREE, V4SF_type_node,
4362 tree_cons (NULL_TREE, V4SF_type_node,
4364 tree v4sf_ftype_v4sf_v4sf_v4si
4365 = build_function_type (V4SF_type_node,
4366 tree_cons (NULL_TREE, V4SF_type_node,
4367 tree_cons (NULL_TREE, V4SF_type_node,
4368 tree_cons (NULL_TREE,
4371 tree v4sf_ftype_v4sf_v4sf_v4sf
4372 = build_function_type (V4SF_type_node,
4373 tree_cons (NULL_TREE, V4SF_type_node,
4374 tree_cons (NULL_TREE, V4SF_type_node,
4375 tree_cons (NULL_TREE,
4378 tree v4si_ftype_v4si_v4si_v4si
4379 = build_function_type (V4SI_type_node,
4380 tree_cons (NULL_TREE, V4SI_type_node,
4381 tree_cons (NULL_TREE, V4SI_type_node,
4382 tree_cons (NULL_TREE,
4386 tree v8hi_ftype_v8hi_v8hi
4387 = build_function_type (V8HI_type_node,
4388 tree_cons (NULL_TREE, V8HI_type_node,
4389 tree_cons (NULL_TREE, V8HI_type_node,
4391 tree v8hi_ftype_v8hi_v8hi_v8hi
4392 = build_function_type (V8HI_type_node,
4393 tree_cons (NULL_TREE, V8HI_type_node,
4394 tree_cons (NULL_TREE, V8HI_type_node,
4395 tree_cons (NULL_TREE,
4398 tree v4si_ftype_v8hi_v8hi_v4si
4399 = build_function_type (V4SI_type_node,
4400 tree_cons (NULL_TREE, V8HI_type_node,
4401 tree_cons (NULL_TREE, V8HI_type_node,
4402 tree_cons (NULL_TREE,
4405 tree v4si_ftype_v16qi_v16qi_v4si
4406 = build_function_type (V4SI_type_node,
4407 tree_cons (NULL_TREE, V16QI_type_node,
4408 tree_cons (NULL_TREE, V16QI_type_node,
4409 tree_cons (NULL_TREE,
4413 tree v16qi_ftype_v16qi_v16qi
4414 = build_function_type (V16QI_type_node,
4415 tree_cons (NULL_TREE, V16QI_type_node,
4416 tree_cons (NULL_TREE, V16QI_type_node,
4419 tree v4si_ftype_v4sf_v4sf
4420 = build_function_type (V4SI_type_node,
4421 tree_cons (NULL_TREE, V4SF_type_node,
4422 tree_cons (NULL_TREE, V4SF_type_node,
4425 tree v4si_ftype_v4si
4426 = build_function_type (V4SI_type_node,
4427 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4429 tree v8hi_ftype_v8hi
4430 = build_function_type (V8HI_type_node,
4431 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4433 tree v16qi_ftype_v16qi
4434 = build_function_type (V16QI_type_node,
4435 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4437 tree v8hi_ftype_v16qi_v16qi
4438 = build_function_type (V8HI_type_node,
4439 tree_cons (NULL_TREE, V16QI_type_node,
4440 tree_cons (NULL_TREE, V16QI_type_node,
4443 tree v4si_ftype_v8hi_v8hi
4444 = build_function_type (V4SI_type_node,
4445 tree_cons (NULL_TREE, V8HI_type_node,
4446 tree_cons (NULL_TREE, V8HI_type_node,
4449 tree v8hi_ftype_v4si_v4si
4450 = build_function_type (V8HI_type_node,
4451 tree_cons (NULL_TREE, V4SI_type_node,
4452 tree_cons (NULL_TREE, V4SI_type_node,
4455 tree v16qi_ftype_v8hi_v8hi
4456 = build_function_type (V16QI_type_node,
4457 tree_cons (NULL_TREE, V8HI_type_node,
4458 tree_cons (NULL_TREE, V8HI_type_node,
4461 tree v4si_ftype_v16qi_v4si
4462 = build_function_type (V4SI_type_node,
4463 tree_cons (NULL_TREE, V16QI_type_node,
4464 tree_cons (NULL_TREE, V4SI_type_node,
4467 tree v4si_ftype_v16qi_v16qi
4468 = build_function_type (V4SI_type_node,
4469 tree_cons (NULL_TREE, V16QI_type_node,
4470 tree_cons (NULL_TREE, V16QI_type_node,
4473 tree v4si_ftype_v8hi_v4si
4474 = build_function_type (V4SI_type_node,
4475 tree_cons (NULL_TREE, V8HI_type_node,
4476 tree_cons (NULL_TREE, V4SI_type_node,
4479 tree v4si_ftype_v8hi
4480 = build_function_type (V4SI_type_node,
4481 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4483 tree int_ftype_v4si_v4si
4484 = build_function_type (integer_type_node,
4485 tree_cons (NULL_TREE, V4SI_type_node,
4486 tree_cons (NULL_TREE, V4SI_type_node,
4489 tree int_ftype_v4sf_v4sf
4490 = build_function_type (integer_type_node,
4491 tree_cons (NULL_TREE, V4SF_type_node,
4492 tree_cons (NULL_TREE, V4SF_type_node,
4495 tree int_ftype_v16qi_v16qi
4496 = build_function_type (integer_type_node,
4497 tree_cons (NULL_TREE, V16QI_type_node,
4498 tree_cons (NULL_TREE, V16QI_type_node,
4501 tree int_ftype_int_v4si_v4si
4502 = build_function_type
4504 tree_cons (NULL_TREE, integer_type_node,
4505 tree_cons (NULL_TREE, V4SI_type_node,
4506 tree_cons (NULL_TREE, V4SI_type_node,
4509 tree int_ftype_int_v4sf_v4sf
4510 = build_function_type
4512 tree_cons (NULL_TREE, integer_type_node,
4513 tree_cons (NULL_TREE, V4SF_type_node,
4514 tree_cons (NULL_TREE, V4SF_type_node,
4517 tree int_ftype_int_v8hi_v8hi
4518 = build_function_type
4520 tree_cons (NULL_TREE, integer_type_node,
4521 tree_cons (NULL_TREE, V8HI_type_node,
4522 tree_cons (NULL_TREE, V8HI_type_node,
4525 tree int_ftype_int_v16qi_v16qi
4526 = build_function_type
4528 tree_cons (NULL_TREE, integer_type_node,
4529 tree_cons (NULL_TREE, V16QI_type_node,
4530 tree_cons (NULL_TREE, V16QI_type_node,
4533 tree v16qi_ftype_int_pvoid
4534 = build_function_type (V16QI_type_node,
4535 tree_cons (NULL_TREE, integer_type_node,
4536 tree_cons (NULL_TREE, pvoid_type_node,
4539 tree v4si_ftype_int_pvoid
4540 = build_function_type (V4SI_type_node,
4541 tree_cons (NULL_TREE, integer_type_node,
4542 tree_cons (NULL_TREE, pvoid_type_node,
4545 tree v8hi_ftype_int_pvoid
4546 = build_function_type (V8HI_type_node,
4547 tree_cons (NULL_TREE, integer_type_node,
4548 tree_cons (NULL_TREE, pvoid_type_node,
4551 tree int_ftype_v8hi_v8hi
4552 = build_function_type (integer_type_node,
4553 tree_cons (NULL_TREE, V8HI_type_node,
4554 tree_cons (NULL_TREE, V8HI_type_node,
4557 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4558 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4559 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4560 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4561 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4562 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4563 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4564 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4565 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4566 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4567 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4568 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4569 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4570 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4571 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4572 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4573 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4574 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4575 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4576 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4577 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4578 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4579 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4580 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4582 /* Add the simple ternary operators. */
4583 d = (struct builtin_description *) bdesc_3arg;
4584 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4587 enum machine_mode mode0, mode1, mode2, mode3;
4593 mode0 = insn_data[d->icode].operand[0].mode;
4594 mode1 = insn_data[d->icode].operand[1].mode;
4595 mode2 = insn_data[d->icode].operand[2].mode;
4596 mode3 = insn_data[d->icode].operand[3].mode;
4598 /* When all four are of the same mode. */
4599 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4604 type = v4si_ftype_v4si_v4si_v4si;
4607 type = v4sf_ftype_v4sf_v4sf_v4sf;
4610 type = v8hi_ftype_v8hi_v8hi_v8hi;
4613 type = v16qi_ftype_v16qi_v16qi_v16qi;
4619 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4624 type = v4si_ftype_v4si_v4si_v16qi;
4627 type = v4sf_ftype_v4sf_v4sf_v16qi;
4630 type = v8hi_ftype_v8hi_v8hi_v16qi;
4633 type = v16qi_ftype_v16qi_v16qi_v16qi;
4639 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4640 && mode3 == V4SImode)
4641 type = v4si_ftype_v16qi_v16qi_v4si;
4642 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4643 && mode3 == V4SImode)
4644 type = v4si_ftype_v8hi_v8hi_v4si;
4645 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4646 && mode3 == V4SImode)
4647 type = v4sf_ftype_v4sf_v4sf_v4si;
4649 /* vchar, vchar, vchar, 4 bit literal. */
4650 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4652 type = v16qi_ftype_v16qi_v16qi_char;
4654 /* vshort, vshort, vshort, 4 bit literal. */
4655 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4657 type = v8hi_ftype_v8hi_v8hi_char;
4659 /* vint, vint, vint, 4 bit literal. */
4660 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4662 type = v4si_ftype_v4si_v4si_char;
4664 /* vfloat, vfloat, vfloat, 4 bit literal. */
4665 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4667 type = v4sf_ftype_v4sf_v4sf_char;
4672 def_builtin (d->mask, d->name, type, d->code);
4675 /* Add the DST variants. */
4676 d = (struct builtin_description *) bdesc_dst;
4677 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4678 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4680 /* Initialize the predicates. */
4681 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4682 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4684 enum machine_mode mode1;
4687 mode1 = insn_data[dp->icode].operand[1].mode;
4692 type = int_ftype_int_v4si_v4si;
4695 type = int_ftype_int_v8hi_v8hi;
4698 type = int_ftype_int_v16qi_v16qi;
4701 type = int_ftype_int_v4sf_v4sf;
4707 def_builtin (dp->mask, dp->name, type, dp->code);
4710 /* Add the simple binary operators. */
4711 d = (struct builtin_description *) bdesc_2arg;
4712 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4714 enum machine_mode mode0, mode1, mode2;
4720 mode0 = insn_data[d->icode].operand[0].mode;
4721 mode1 = insn_data[d->icode].operand[1].mode;
4722 mode2 = insn_data[d->icode].operand[2].mode;
4724 /* When all three operands are of the same mode. */
4725 if (mode0 == mode1 && mode1 == mode2)
4730 type = v4sf_ftype_v4sf_v4sf;
4733 type = v4si_ftype_v4si_v4si;
4736 type = v16qi_ftype_v16qi_v16qi;
4739 type = v8hi_ftype_v8hi_v8hi;
4746 /* A few other combos we really don't want to do manually. */
4748 /* vint, vfloat, vfloat. */
4749 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4750 type = v4si_ftype_v4sf_v4sf;
4752 /* vshort, vchar, vchar. */
4753 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4754 type = v8hi_ftype_v16qi_v16qi;
4756 /* vint, vshort, vshort. */
4757 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4758 type = v4si_ftype_v8hi_v8hi;
4760 /* vshort, vint, vint. */
4761 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4762 type = v8hi_ftype_v4si_v4si;
4764 /* vchar, vshort, vshort. */
4765 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4766 type = v16qi_ftype_v8hi_v8hi;
4768 /* vint, vchar, vint. */
4769 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4770 type = v4si_ftype_v16qi_v4si;
4772 /* vint, vchar, vchar. */
4773 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4774 type = v4si_ftype_v16qi_v16qi;
4776 /* vint, vshort, vint. */
4777 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4778 type = v4si_ftype_v8hi_v4si;
4780 /* vint, vint, 5 bit literal. */
4781 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4782 type = v4si_ftype_v4si_char;
4784 /* vshort, vshort, 5 bit literal. */
4785 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4786 type = v8hi_ftype_v8hi_char;
4788 /* vchar, vchar, 5 bit literal. */
4789 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4790 type = v16qi_ftype_v16qi_char;
4792 /* vfloat, vint, 5 bit literal. */
4793 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4794 type = v4sf_ftype_v4si_char;
4796 /* vint, vfloat, 5 bit literal. */
4797 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4798 type = v4si_ftype_v4sf_char;
4801 else if (mode0 == SImode)
4806 type = int_ftype_v4si_v4si;
4809 type = int_ftype_v4sf_v4sf;
4812 type = int_ftype_v16qi_v16qi;
4815 type = int_ftype_v8hi_v8hi;
4825 def_builtin (d->mask, d->name, type, d->code);
4828 /* Initialize the abs* operators. */
4829 d = (struct builtin_description *) bdesc_abs;
4830 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4832 enum machine_mode mode0;
4835 mode0 = insn_data[d->icode].operand[0].mode;
4840 type = v4si_ftype_v4si;
4843 type = v8hi_ftype_v8hi;
4846 type = v16qi_ftype_v16qi;
4849 type = v4sf_ftype_v4sf;
4855 def_builtin (d->mask, d->name, type, d->code);
4858 /* Add the simple unary operators. */
4859 d = (struct builtin_description *) bdesc_1arg;
4860 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4862 enum machine_mode mode0, mode1;
4868 mode0 = insn_data[d->icode].operand[0].mode;
4869 mode1 = insn_data[d->icode].operand[1].mode;
4871 if (mode0 == V4SImode && mode1 == QImode)
4872 type = v4si_ftype_char;
4873 else if (mode0 == V8HImode && mode1 == QImode)
4874 type = v8hi_ftype_char;
4875 else if (mode0 == V16QImode && mode1 == QImode)
4876 type = v16qi_ftype_char;
4877 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4878 type = v4sf_ftype_v4sf;
4879 else if (mode0 == V8HImode && mode1 == V16QImode)
4880 type = v8hi_ftype_v16qi;
4881 else if (mode0 == V4SImode && mode1 == V8HImode)
4882 type = v4si_ftype_v8hi;
4886 def_builtin (d->mask, d->name, type, d->code);
4891 /* Generate a memory reference for expand_block_move, copying volatile,
4892 and other bits from an original memory reference. */
4895 expand_block_move_mem (mode, addr, orig_mem)
4896 enum machine_mode mode;
4900 rtx mem = gen_rtx_MEM (mode, addr);
4902 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4906 /* Expand a block move operation, and return 1 if successful. Return 0
4907 if we should let the compiler generate normal code.
4909 operands[0] is the destination
4910 operands[1] is the source
4911 operands[2] is the length
4912 operands[3] is the alignment */
4914 #define MAX_MOVE_REG 4
4917 expand_block_move (operands)
4920 rtx orig_dest = operands[0];
4921 rtx orig_src = operands[1];
4922 rtx bytes_rtx = operands[2];
4923 rtx align_rtx = operands[3];
4924 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4935 rtx stores[MAX_MOVE_REG];
4938 /* If this is not a fixed size move, just call memcpy */
4942 /* If this is not a fixed size alignment, abort */
4943 if (GET_CODE (align_rtx) != CONST_INT)
4945 align = INTVAL (align_rtx);
4947 /* Anything to move? */
4948 bytes = INTVAL (bytes_rtx);
4952 /* store_one_arg depends on expand_block_move to handle at least the size of
4953 reg_parm_stack_space. */
4954 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4957 /* Move the address into scratch registers. */
4958 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4959 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4961 if (TARGET_STRING) /* string instructions are available */
4963 for ( ; bytes > 0; bytes -= move_bytes)
4965 if (bytes > 24 /* move up to 32 bytes at a time */
4973 && ! fixed_regs[12])
4975 move_bytes = (bytes > 32) ? 32 : bytes;
4976 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
4979 expand_block_move_mem (BLKmode,
4982 GEN_INT ((move_bytes == 32)
4986 else if (bytes > 16 /* move up to 24 bytes at a time */
4992 && ! fixed_regs[10])
4994 move_bytes = (bytes > 24) ? 24 : bytes;
4995 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
4998 expand_block_move_mem (BLKmode,
5001 GEN_INT (move_bytes),
5004 else if (bytes > 8 /* move up to 16 bytes at a time */
5010 move_bytes = (bytes > 16) ? 16 : bytes;
5011 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5014 expand_block_move_mem (BLKmode,
5017 GEN_INT (move_bytes),
5020 else if (bytes >= 8 && TARGET_POWERPC64
5021 /* 64-bit loads and stores require word-aligned
5023 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5026 tmp_reg = gen_reg_rtx (DImode);
5027 emit_move_insn (tmp_reg,
5028 expand_block_move_mem (DImode,
5029 src_reg, orig_src));
5030 emit_move_insn (expand_block_move_mem (DImode,
5031 dest_reg, orig_dest),
5034 else if (bytes > 4 && !TARGET_POWERPC64)
5035 { /* move up to 8 bytes at a time */
5036 move_bytes = (bytes > 8) ? 8 : bytes;
5037 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5040 expand_block_move_mem (BLKmode,
5043 GEN_INT (move_bytes),
5046 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5047 { /* move 4 bytes */
5049 tmp_reg = gen_reg_rtx (SImode);
5050 emit_move_insn (tmp_reg,
5051 expand_block_move_mem (SImode,
5052 src_reg, orig_src));
5053 emit_move_insn (expand_block_move_mem (SImode,
5054 dest_reg, orig_dest),
5057 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5058 { /* move 2 bytes */
5060 tmp_reg = gen_reg_rtx (HImode);
5061 emit_move_insn (tmp_reg,
5062 expand_block_move_mem (HImode,
5063 src_reg, orig_src));
5064 emit_move_insn (expand_block_move_mem (HImode,
5065 dest_reg, orig_dest),
5068 else if (bytes == 1) /* move 1 byte */
5071 tmp_reg = gen_reg_rtx (QImode);
5072 emit_move_insn (tmp_reg,
5073 expand_block_move_mem (QImode,
5074 src_reg, orig_src));
5075 emit_move_insn (expand_block_move_mem (QImode,
5076 dest_reg, orig_dest),
5080 { /* move up to 4 bytes at a time */
5081 move_bytes = (bytes > 4) ? 4 : bytes;
5082 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5085 expand_block_move_mem (BLKmode,
5088 GEN_INT (move_bytes),
5092 if (bytes > move_bytes)
5094 if (! TARGET_POWERPC64)
5096 emit_insn (gen_addsi3 (src_reg, src_reg,
5097 GEN_INT (move_bytes)));
5098 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5099 GEN_INT (move_bytes)));
5103 emit_insn (gen_adddi3 (src_reg, src_reg,
5104 GEN_INT (move_bytes)));
5105 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5106 GEN_INT (move_bytes)));
5112 else /* string instructions not available */
5114 num_reg = offset = 0;
5115 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5117 /* Calculate the correct offset for src/dest */
5121 dest_addr = dest_reg;
5125 src_addr = plus_constant (src_reg, offset);
5126 dest_addr = plus_constant (dest_reg, offset);
5129 /* Generate the appropriate load and store, saving the stores
5131 if (bytes >= 8 && TARGET_POWERPC64
5132 /* 64-bit loads and stores require word-aligned
5134 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5137 tmp_reg = gen_reg_rtx (DImode);
5138 emit_insn (gen_movdi (tmp_reg,
5139 expand_block_move_mem (DImode,
5142 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5147 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5150 tmp_reg = gen_reg_rtx (SImode);
5151 emit_insn (gen_movsi (tmp_reg,
5152 expand_block_move_mem (SImode,
5155 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5160 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5163 tmp_reg = gen_reg_rtx (HImode);
5164 emit_insn (gen_movhi (tmp_reg,
5165 expand_block_move_mem (HImode,
5168 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5176 tmp_reg = gen_reg_rtx (QImode);
5177 emit_insn (gen_movqi (tmp_reg,
5178 expand_block_move_mem (QImode,
5181 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5187 if (num_reg >= MAX_MOVE_REG)
5189 for (i = 0; i < num_reg; i++)
5190 emit_insn (stores[i]);
5195 for (i = 0; i < num_reg; i++)
5196 emit_insn (stores[i]);
5203 /* Return 1 if OP is a load multiple operation. It is known to be a
5204 PARALLEL and the first section will be tested. */
5207 load_multiple_operation (op, mode)
5209 enum machine_mode mode ATTRIBUTE_UNUSED;
5211 int count = XVECLEN (op, 0);
5212 unsigned int dest_regno;
5216 /* Perform a quick check so we don't blow up below. */
5218 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5219 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5220 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5223 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5224 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5226 for (i = 1; i < count; i++)
5228 rtx elt = XVECEXP (op, 0, i);
5230 if (GET_CODE (elt) != SET
5231 || GET_CODE (SET_DEST (elt)) != REG
5232 || GET_MODE (SET_DEST (elt)) != SImode
5233 || REGNO (SET_DEST (elt)) != dest_regno + i
5234 || GET_CODE (SET_SRC (elt)) != MEM
5235 || GET_MODE (SET_SRC (elt)) != SImode
5236 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5237 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5238 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5239 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5246 /* Similar, but tests for store multiple. Here, the second vector element
5247 is a CLOBBER. It will be tested later. */
5250 store_multiple_operation (op, mode)
5252 enum machine_mode mode ATTRIBUTE_UNUSED;
5254 int count = XVECLEN (op, 0) - 1;
5255 unsigned int src_regno;
5259 /* Perform a quick check so we don't blow up below. */
5261 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5262 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5263 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5266 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5267 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5269 for (i = 1; i < count; i++)
5271 rtx elt = XVECEXP (op, 0, i + 1);
5273 if (GET_CODE (elt) != SET
5274 || GET_CODE (SET_SRC (elt)) != REG
5275 || GET_MODE (SET_SRC (elt)) != SImode
5276 || REGNO (SET_SRC (elt)) != src_regno + i
5277 || GET_CODE (SET_DEST (elt)) != MEM
5278 || GET_MODE (SET_DEST (elt)) != SImode
5279 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5280 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5281 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5282 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5289 /* Return 1 for a parallel vrsave operation. */
5292 vrsave_operation (op, mode)
5294 enum machine_mode mode ATTRIBUTE_UNUSED;
5296 int count = XVECLEN (op, 0);
5297 unsigned int dest_regno, src_regno;
5301 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5302 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5303 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5306 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5307 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5309 if (dest_regno != VRSAVE_REGNO
5310 && src_regno != VRSAVE_REGNO)
5313 for (i = 1; i < count; i++)
5315 rtx elt = XVECEXP (op, 0, i);
5317 if (GET_CODE (elt) != CLOBBER
5318 && GET_CODE (elt) != SET)
5325 /* Return 1 for an PARALLEL suitable for mtcrf. */
5328 mtcrf_operation (op, mode)
5330 enum machine_mode mode ATTRIBUTE_UNUSED;
5332 int count = XVECLEN (op, 0);
5336 /* Perform a quick check so we don't blow up below. */
5338 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5339 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5340 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5342 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5344 if (GET_CODE (src_reg) != REG
5345 || GET_MODE (src_reg) != SImode
5346 || ! INT_REGNO_P (REGNO (src_reg)))
5349 for (i = 0; i < count; i++)
5351 rtx exp = XVECEXP (op, 0, i);
5355 if (GET_CODE (exp) != SET
5356 || GET_CODE (SET_DEST (exp)) != REG
5357 || GET_MODE (SET_DEST (exp)) != CCmode
5358 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5360 unspec = SET_SRC (exp);
5361 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5363 if (GET_CODE (unspec) != UNSPEC
5364 || XINT (unspec, 1) != 20
5365 || XVECLEN (unspec, 0) != 2
5366 || XVECEXP (unspec, 0, 0) != src_reg
5367 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5368 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5374 /* Return 1 for an PARALLEL suitable for lmw. */
5377 lmw_operation (op, mode)
5379 enum machine_mode mode ATTRIBUTE_UNUSED;
5381 int count = XVECLEN (op, 0);
5382 unsigned int dest_regno;
5384 unsigned int base_regno;
5385 HOST_WIDE_INT offset;
5388 /* Perform a quick check so we don't blow up below. */
5390 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5391 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5392 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5395 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5396 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5399 || count != 32 - (int) dest_regno)
5402 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5405 base_regno = REGNO (src_addr);
5406 if (base_regno == 0)
5409 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5411 offset = INTVAL (XEXP (src_addr, 1));
5412 base_regno = REGNO (XEXP (src_addr, 0));
5417 for (i = 0; i < count; i++)
5419 rtx elt = XVECEXP (op, 0, i);
5422 HOST_WIDE_INT newoffset;
5424 if (GET_CODE (elt) != SET
5425 || GET_CODE (SET_DEST (elt)) != REG
5426 || GET_MODE (SET_DEST (elt)) != SImode
5427 || REGNO (SET_DEST (elt)) != dest_regno + i
5428 || GET_CODE (SET_SRC (elt)) != MEM
5429 || GET_MODE (SET_SRC (elt)) != SImode)
5431 newaddr = XEXP (SET_SRC (elt), 0);
5432 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5437 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5439 addr_reg = XEXP (newaddr, 0);
5440 newoffset = INTVAL (XEXP (newaddr, 1));
5444 if (REGNO (addr_reg) != base_regno
5445 || newoffset != offset + 4 * i)
5452 /* Return 1 for an PARALLEL suitable for stmw. */
5455 stmw_operation (op, mode)
5457 enum machine_mode mode ATTRIBUTE_UNUSED;
5459 int count = XVECLEN (op, 0);
5460 unsigned int src_regno;
5462 unsigned int base_regno;
5463 HOST_WIDE_INT offset;
5466 /* Perform a quick check so we don't blow up below. */
5468 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5469 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5470 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5473 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5474 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5477 || count != 32 - (int) src_regno)
5480 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5483 base_regno = REGNO (dest_addr);
5484 if (base_regno == 0)
5487 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5489 offset = INTVAL (XEXP (dest_addr, 1));
5490 base_regno = REGNO (XEXP (dest_addr, 0));
5495 for (i = 0; i < count; i++)
5497 rtx elt = XVECEXP (op, 0, i);
5500 HOST_WIDE_INT newoffset;
5502 if (GET_CODE (elt) != SET
5503 || GET_CODE (SET_SRC (elt)) != REG
5504 || GET_MODE (SET_SRC (elt)) != SImode
5505 || REGNO (SET_SRC (elt)) != src_regno + i
5506 || GET_CODE (SET_DEST (elt)) != MEM
5507 || GET_MODE (SET_DEST (elt)) != SImode)
5509 newaddr = XEXP (SET_DEST (elt), 0);
5510 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5515 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5517 addr_reg = XEXP (newaddr, 0);
5518 newoffset = INTVAL (XEXP (newaddr, 1));
5522 if (REGNO (addr_reg) != base_regno
5523 || newoffset != offset + 4 * i)
5530 /* A validation routine: say whether CODE, a condition code, and MODE
5531 match. The other alternatives either don't make sense or should
5532 never be generated. */
5535 validate_condition_mode (code, mode)
5537 enum machine_mode mode;
5539 if (GET_RTX_CLASS (code) != '<'
5540 || GET_MODE_CLASS (mode) != MODE_CC)
5543 /* These don't make sense. */
5544 if ((code == GT || code == LT || code == GE || code == LE)
5545 && mode == CCUNSmode)
5548 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5549 && mode != CCUNSmode)
5552 if (mode != CCFPmode
5553 && (code == ORDERED || code == UNORDERED
5554 || code == UNEQ || code == LTGT
5555 || code == UNGT || code == UNLT
5556 || code == UNGE || code == UNLE))
5559 /* These should never be generated except for
5560 flag_unsafe_math_optimizations. */
5561 if (mode == CCFPmode
5562 && ! flag_unsafe_math_optimizations
5563 && (code == LE || code == GE
5564 || code == UNEQ || code == LTGT
5565 || code == UNGT || code == UNLT))
5568 /* These are invalid; the information is not there. */
5569 if (mode == CCEQmode
5570 && code != EQ && code != NE)
5574 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5575 We only check the opcode against the mode of the CC value here. */
5578 branch_comparison_operator (op, mode)
5580 enum machine_mode mode ATTRIBUTE_UNUSED;
5582 enum rtx_code code = GET_CODE (op);
5583 enum machine_mode cc_mode;
5585 if (GET_RTX_CLASS (code) != '<')
5588 cc_mode = GET_MODE (XEXP (op, 0));
5589 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5592 validate_condition_mode (code, cc_mode);
5597 /* Return 1 if OP is a comparison operation that is valid for a branch
5598 insn and which is true if the corresponding bit in the CC register
5602 branch_positive_comparison_operator (op, mode)
5604 enum machine_mode mode;
5608 if (! branch_comparison_operator (op, mode))
5611 code = GET_CODE (op);
5612 return (code == EQ || code == LT || code == GT
5613 || code == LTU || code == GTU
5614 || code == UNORDERED);
5617 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5618 We check the opcode against the mode of the CC value and disallow EQ or
5619 NE comparisons for integers. */
5622 scc_comparison_operator (op, mode)
5624 enum machine_mode mode;
5626 enum rtx_code code = GET_CODE (op);
5627 enum machine_mode cc_mode;
5629 if (GET_MODE (op) != mode && mode != VOIDmode)
5632 if (GET_RTX_CLASS (code) != '<')
5635 cc_mode = GET_MODE (XEXP (op, 0));
5636 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5639 validate_condition_mode (code, cc_mode);
5641 if (code == NE && cc_mode != CCFPmode)
5648 trap_comparison_operator (op, mode)
5650 enum machine_mode mode;
5652 if (mode != VOIDmode && mode != GET_MODE (op))
5654 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5658 boolean_operator (op, mode)
5660 enum machine_mode mode ATTRIBUTE_UNUSED;
5662 enum rtx_code code = GET_CODE (op);
5663 return (code == AND || code == IOR || code == XOR);
5667 boolean_or_operator (op, mode)
5669 enum machine_mode mode ATTRIBUTE_UNUSED;
5671 enum rtx_code code = GET_CODE (op);
5672 return (code == IOR || code == XOR);
5676 min_max_operator (op, mode)
5678 enum machine_mode mode ATTRIBUTE_UNUSED;
5680 enum rtx_code code = GET_CODE (op);
5681 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5684 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5685 mask required to convert the result of a rotate insn into a shift
5686 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5689 includes_lshift_p (shiftop, andop)
5693 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5695 shift_mask <<= INTVAL (shiftop);
5697 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5700 /* Similar, but for right shift. */
5703 includes_rshift_p (shiftop, andop)
5707 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5709 shift_mask >>= INTVAL (shiftop);
5711 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5714 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5715 to perform a left shift. It must have exactly SHIFTOP least
5716 signifigant 0's, then one or more 1's, then zero or more 0's. */
5719 includes_rldic_lshift_p (shiftop, andop)
5723 if (GET_CODE (andop) == CONST_INT)
5725 HOST_WIDE_INT c, lsb, shift_mask;
5728 if (c == 0 || c == ~0)
5732 shift_mask <<= INTVAL (shiftop);
5734 /* Find the least signifigant one bit. */
5737 /* It must coincide with the LSB of the shift mask. */
5738 if (-lsb != shift_mask)
5741 /* Invert to look for the next transition (if any). */
5744 /* Remove the low group of ones (originally low group of zeros). */
5747 /* Again find the lsb, and check we have all 1's above. */
5751 else if (GET_CODE (andop) == CONST_DOUBLE
5752 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5754 HOST_WIDE_INT low, high, lsb;
5755 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5757 low = CONST_DOUBLE_LOW (andop);
5758 if (HOST_BITS_PER_WIDE_INT < 64)
5759 high = CONST_DOUBLE_HIGH (andop);
5761 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5762 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5765 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5767 shift_mask_high = ~0;
5768 if (INTVAL (shiftop) > 32)
5769 shift_mask_high <<= INTVAL (shiftop) - 32;
5773 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5780 return high == -lsb;
5783 shift_mask_low = ~0;
5784 shift_mask_low <<= INTVAL (shiftop);
5788 if (-lsb != shift_mask_low)
5791 if (HOST_BITS_PER_WIDE_INT < 64)
5796 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5799 return high == -lsb;
5803 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5809 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5810 to perform a left shift. It must have SHIFTOP or more least
5811 signifigant 0's, with the remainder of the word 1's. */
5814 includes_rldicr_lshift_p (shiftop, andop)
5818 if (GET_CODE (andop) == CONST_INT)
5820 HOST_WIDE_INT c, lsb, shift_mask;
5823 shift_mask <<= INTVAL (shiftop);
5826 /* Find the least signifigant one bit. */
5829 /* It must be covered by the shift mask.
5830 This test also rejects c == 0. */
5831 if ((lsb & shift_mask) == 0)
5834 /* Check we have all 1's above the transition, and reject all 1's. */
5835 return c == -lsb && lsb != 1;
5837 else if (GET_CODE (andop) == CONST_DOUBLE
5838 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5840 HOST_WIDE_INT low, lsb, shift_mask_low;
5842 low = CONST_DOUBLE_LOW (andop);
5844 if (HOST_BITS_PER_WIDE_INT < 64)
5846 HOST_WIDE_INT high, shift_mask_high;
5848 high = CONST_DOUBLE_HIGH (andop);
5852 shift_mask_high = ~0;
5853 if (INTVAL (shiftop) > 32)
5854 shift_mask_high <<= INTVAL (shiftop) - 32;
5858 if ((lsb & shift_mask_high) == 0)
5861 return high == -lsb;
5867 shift_mask_low = ~0;
5868 shift_mask_low <<= INTVAL (shiftop);
5872 if ((lsb & shift_mask_low) == 0)
5875 return low == -lsb && lsb != 1;
5881 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5882 for lfq and stfq insns.
5884 Note reg1 and reg2 *must* be hard registers. To be sure we will
5885 abort if we are passed pseudo registers. */
5888 registers_ok_for_quad_peep (reg1, reg2)
5891 /* We might have been passed a SUBREG. */
5892 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5895 return (REGNO (reg1) == REGNO (reg2) - 1);
5898 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5899 addr1 and addr2 must be in consecutive memory locations
5900 (addr2 == addr1 + 8). */
5903 addrs_ok_for_quad_peep (addr1, addr2)
5910 /* Extract an offset (if used) from the first addr. */
5911 if (GET_CODE (addr1) == PLUS)
5913 /* If not a REG, return zero. */
5914 if (GET_CODE (XEXP (addr1, 0)) != REG)
5918 reg1 = REGNO (XEXP (addr1, 0));
5919 /* The offset must be constant! */
5920 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5922 offset1 = INTVAL (XEXP (addr1, 1));
5925 else if (GET_CODE (addr1) != REG)
5929 reg1 = REGNO (addr1);
5930 /* This was a simple (mem (reg)) expression. Offset is 0. */
5934 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5935 if (GET_CODE (addr2) != PLUS)
5938 if (GET_CODE (XEXP (addr2, 0)) != REG
5939 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5942 if (reg1 != REGNO (XEXP (addr2, 0)))
5945 /* The offset for the second addr must be 8 more than the first addr. */
5946 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5949 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5954 /* Return the register class of a scratch register needed to copy IN into
5955 or out of a register in CLASS in MODE. If it can be done directly,
5956 NO_REGS is returned. */
5959 secondary_reload_class (class, mode, in)
5960 enum reg_class class;
5961 enum machine_mode mode ATTRIBUTE_UNUSED;
5966 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5968 /* We cannot copy a symbolic operand directly into anything
5969 other than BASE_REGS for TARGET_ELF. So indicate that a
5970 register from BASE_REGS is needed as an intermediate
5973 On Darwin, pic addresses require a load from memory, which
5974 needs a base register. */
5975 if (class != BASE_REGS
5976 && (GET_CODE (in) == SYMBOL_REF
5977 || GET_CODE (in) == HIGH
5978 || GET_CODE (in) == LABEL_REF
5979 || GET_CODE (in) == CONST))
5983 if (GET_CODE (in) == REG)
5986 if (regno >= FIRST_PSEUDO_REGISTER)
5988 regno = true_regnum (in);
5989 if (regno >= FIRST_PSEUDO_REGISTER)
5993 else if (GET_CODE (in) == SUBREG)
5995 regno = true_regnum (in);
5996 if (regno >= FIRST_PSEUDO_REGISTER)
6002 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6004 if (class == GENERAL_REGS || class == BASE_REGS
6005 || (regno >= 0 && INT_REGNO_P (regno)))
6008 /* Constants, memory, and FP registers can go into FP registers. */
6009 if ((regno == -1 || FP_REGNO_P (regno))
6010 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6013 /* Memory, and AltiVec registers can go into AltiVec registers. */
6014 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6015 && class == ALTIVEC_REGS)
6018 /* We can copy among the CR registers. */
6019 if ((class == CR_REGS || class == CR0_REGS)
6020 && regno >= 0 && CR_REGNO_P (regno))
6023 /* Otherwise, we need GENERAL_REGS. */
6024 return GENERAL_REGS;
6027 /* Given a comparison operation, return the bit number in CCR to test. We
6028 know this is a valid comparison.
6030 SCC_P is 1 if this is for an scc. That means that %D will have been
6031 used instead of %C, so the bits will be in different places.
6033 Return -1 if OP isn't a valid comparison for some reason. */
6040 enum rtx_code code = GET_CODE (op);
6041 enum machine_mode cc_mode;
6046 if (GET_RTX_CLASS (code) != '<')
6051 if (GET_CODE (reg) != REG
6052 || ! CR_REGNO_P (REGNO (reg)))
6055 cc_mode = GET_MODE (reg);
6056 cc_regnum = REGNO (reg);
6057 base_bit = 4 * (cc_regnum - CR0_REGNO);
6059 validate_condition_mode (code, cc_mode);
6064 return scc_p ? base_bit + 3 : base_bit + 2;
6066 return base_bit + 2;
6067 case GT: case GTU: case UNLE:
6068 return base_bit + 1;
6069 case LT: case LTU: case UNGE:
6071 case ORDERED: case UNORDERED:
6072 return base_bit + 3;
6075 /* If scc, we will have done a cror to put the bit in the
6076 unordered position. So test that bit. For integer, this is ! LT
6077 unless this is an scc insn. */
6078 return scc_p ? base_bit + 3 : base_bit;
6081 return scc_p ? base_bit + 3 : base_bit + 1;
6088 /* Return the GOT register. */
6091 rs6000_got_register (value)
6092 rtx value ATTRIBUTE_UNUSED;
6094 /* The second flow pass currently (June 1999) can't update
6095 regs_ever_live without disturbing other parts of the compiler, so
6096 update it here to make the prolog/epilogue code happy. */
6097 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6098 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
6100 current_function_uses_pic_offset_table = 1;
6102 return pic_offset_table_rtx;
6105 /* Functions to init, mark and free struct machine_function.
6106 These will be called, via pointer variables,
6107 from push_function_context and pop_function_context. */
6110 rs6000_init_machine_status (p)
6113 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6117 rs6000_free_machine_status (p)
6120 if (p->machine == NULL)
6128 /* Print an operand. Recognize special options, documented below. */
6131 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6132 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6134 #define SMALL_DATA_RELOC "sda21"
6135 #define SMALL_DATA_REG 0
6139 print_operand (file, x, code)
6147 /* These macros test for integers and extract the low-order bits. */
6149 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6150 && GET_MODE (X) == VOIDmode)
6152 #define INT_LOWPART(X) \
6153 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6158 /* Write out an instruction after the call which may be replaced
6159 with glue code by the loader. This depends on the AIX version. */
6160 asm_fprintf (file, RS6000_CALL_GLUE);
6163 /* %a is output_address. */
6166 /* If X is a constant integer whose low-order 5 bits are zero,
6167 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6168 in the AIX assembler where "sri" with a zero shift count
6169 writes a trash instruction. */
6170 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6177 /* If constant, low-order 16 bits of constant, unsigned.
6178 Otherwise, write normally. */
6180 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6182 print_operand (file, x, 0);
6186 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6187 for 64-bit mask direction. */
6188 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6191 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6195 /* There used to be a comment for 'C' reading "This is an
6196 optional cror needed for certain floating-point
6197 comparisons. Otherwise write nothing." */
6199 /* Similar, except that this is for an scc, so we must be able to
6200 encode the test in a single bit that is one. We do the above
6201 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6202 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6203 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6205 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6207 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6209 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6212 else if (GET_CODE (x) == NE)
6214 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6216 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6217 base_bit + 2, base_bit + 2);
6222 /* X is a CR register. Print the number of the EQ bit of the CR */
6223 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6224 output_operand_lossage ("invalid %%E value");
6226 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6230 /* X is a CR register. Print the shift count needed to move it
6231 to the high-order four bits. */
6232 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6233 output_operand_lossage ("invalid %%f value");
6235 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6239 /* Similar, but print the count for the rotate in the opposite
6241 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6242 output_operand_lossage ("invalid %%F value");
6244 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6248 /* X is a constant integer. If it is negative, print "m",
6249 otherwise print "z". This is to make a aze or ame insn. */
6250 if (GET_CODE (x) != CONST_INT)
6251 output_operand_lossage ("invalid %%G value");
6252 else if (INTVAL (x) >= 0)
6259 /* If constant, output low-order five bits. Otherwise, write
6262 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6264 print_operand (file, x, 0);
6268 /* If constant, output low-order six bits. Otherwise, write
6271 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6273 print_operand (file, x, 0);
6277 /* Print `i' if this is a constant, else nothing. */
6283 /* Write the bit number in CCR for jump. */
6286 output_operand_lossage ("invalid %%j code");
6288 fprintf (file, "%d", i);
6292 /* Similar, but add one for shift count in rlinm for scc and pass
6293 scc flag to `ccr_bit'. */
6296 output_operand_lossage ("invalid %%J code");
6298 /* If we want bit 31, write a shift count of zero, not 32. */
6299 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6303 /* X must be a constant. Write the 1's complement of the
6306 output_operand_lossage ("invalid %%k value");
6308 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6312 /* X must be a symbolic constant on ELF. Write an
6313 expression suitable for an 'addi' that adds in the low 16
6315 if (GET_CODE (x) != CONST)
6317 print_operand_address (file, x);
6322 if (GET_CODE (XEXP (x, 0)) != PLUS
6323 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6324 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6325 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6326 output_operand_lossage ("invalid %%K value");
6327 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6329 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6333 /* %l is output_asm_label. */
6336 /* Write second word of DImode or DFmode reference. Works on register
6337 or non-indexed memory only. */
6338 if (GET_CODE (x) == REG)
6339 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6340 else if (GET_CODE (x) == MEM)
6342 /* Handle possible auto-increment. Since it is pre-increment and
6343 we have already done it, we can just use an offset of word. */
6344 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6345 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6346 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6349 output_address (XEXP (adjust_address_nv (x, SImode,
6353 if (small_data_operand (x, GET_MODE (x)))
6354 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6355 reg_names[SMALL_DATA_REG]);
6360 /* MB value for a mask operand. */
6361 if (! mask_operand (x, SImode))
6362 output_operand_lossage ("invalid %%m value");
6364 val = INT_LOWPART (x);
6366 /* If the high bit is set and the low bit is not, the value is zero.
6367 If the high bit is zero, the value is the first 1 bit we find from
6369 if ((val & 0x80000000) && ((val & 1) == 0))
6374 else if ((val & 0x80000000) == 0)
6376 for (i = 1; i < 32; i++)
6377 if ((val <<= 1) & 0x80000000)
6379 fprintf (file, "%d", i);
6383 /* Otherwise, look for the first 0 bit from the right. The result is its
6384 number plus 1. We know the low-order bit is one. */
6385 for (i = 0; i < 32; i++)
6386 if (((val >>= 1) & 1) == 0)
6389 /* If we ended in ...01, i would be 0. The correct value is 31, so
6391 fprintf (file, "%d", 31 - i);
6395 /* ME value for a mask operand. */
6396 if (! mask_operand (x, SImode))
6397 output_operand_lossage ("invalid %%M value");
6399 val = INT_LOWPART (x);
6401 /* If the low bit is set and the high bit is not, the value is 31.
6402 If the low bit is zero, the value is the first 1 bit we find from
6404 if ((val & 1) && ((val & 0x80000000) == 0))
6409 else if ((val & 1) == 0)
6411 for (i = 0; i < 32; i++)
6412 if ((val >>= 1) & 1)
6415 /* If we had ....10, i would be 0. The result should be
6416 30, so we need 30 - i. */
6417 fprintf (file, "%d", 30 - i);
6421 /* Otherwise, look for the first 0 bit from the left. The result is its
6422 number minus 1. We know the high-order bit is one. */
6423 for (i = 0; i < 32; i++)
6424 if (((val <<= 1) & 0x80000000) == 0)
6427 fprintf (file, "%d", i);
6430 /* %n outputs the negative of its operand. */
6433 /* Write the number of elements in the vector times 4. */
6434 if (GET_CODE (x) != PARALLEL)
6435 output_operand_lossage ("invalid %%N value");
6437 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6441 /* Similar, but subtract 1 first. */
6442 if (GET_CODE (x) != PARALLEL)
6443 output_operand_lossage ("invalid %%O value");
6445 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6449 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6451 || INT_LOWPART (x) < 0
6452 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6453 output_operand_lossage ("invalid %%p value");
6455 fprintf (file, "%d", i);
6459 /* The operand must be an indirect memory reference. The result
6460 is the register number. */
6461 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6462 || REGNO (XEXP (x, 0)) >= 32)
6463 output_operand_lossage ("invalid %%P value");
6465 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6469 /* This outputs the logical code corresponding to a boolean
6470 expression. The expression may have one or both operands
6471 negated (if one, only the first one). For condition register
6472 logical operations, it will also treat the negated
6473 CR codes as NOTs, but not handle NOTs of them. */
6475 const char *const *t = 0;
6477 enum rtx_code code = GET_CODE (x);
6478 static const char * const tbl[3][3] = {
6479 { "and", "andc", "nor" },
6480 { "or", "orc", "nand" },
6481 { "xor", "eqv", "xor" } };
6485 else if (code == IOR)
6487 else if (code == XOR)
6490 output_operand_lossage ("invalid %%q value");
6492 if (GET_CODE (XEXP (x, 0)) != NOT)
6496 if (GET_CODE (XEXP (x, 1)) == NOT)
6507 /* X is a CR register. Print the mask for `mtcrf'. */
6508 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6509 output_operand_lossage ("invalid %%R value");
6511 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6515 /* Low 5 bits of 32 - value */
6517 output_operand_lossage ("invalid %%s value");
6519 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6523 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6524 CONST_INT 32-bit mask is considered sign-extended so any
6525 transition must occur within the CONST_INT, not on the boundary. */
6526 if (! mask64_operand (x, DImode))
6527 output_operand_lossage ("invalid %%S value");
6529 val = INT_LOWPART (x);
6531 if (val & 1) /* Clear Left */
6533 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6534 if (!((val >>= 1) & 1))
6537 #if HOST_BITS_PER_WIDE_INT == 32
6538 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6540 val = CONST_DOUBLE_HIGH (x);
6545 for (i = 32; i < 64; i++)
6546 if (!((val >>= 1) & 1))
6550 /* i = index of last set bit from right
6551 mask begins at 63 - i from left */
6553 output_operand_lossage ("%%S computed all 1's mask");
6555 fprintf (file, "%d", 63 - i);
6558 else /* Clear Right */
6560 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6561 if ((val >>= 1) & 1)
6564 #if HOST_BITS_PER_WIDE_INT == 32
6565 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6567 val = CONST_DOUBLE_HIGH (x);
6569 if (val == (HOST_WIDE_INT) -1)
6572 for (i = 32; i < 64; i++)
6573 if ((val >>= 1) & 1)
6577 /* i = index of last clear bit from right
6578 mask ends at 62 - i from left */
6580 output_operand_lossage ("%%S computed all 0's mask");
6582 fprintf (file, "%d", 62 - i);
6587 /* Print the symbolic name of a branch target register. */
6588 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6589 && REGNO (x) != COUNT_REGISTER_REGNUM))
6590 output_operand_lossage ("invalid %%T value");
6591 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6592 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6594 fputs ("ctr", file);
6598 /* High-order 16 bits of constant for use in unsigned operand. */
6600 output_operand_lossage ("invalid %%u value");
6602 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6603 (INT_LOWPART (x) >> 16) & 0xffff);
6607 /* High-order 16 bits of constant for use in signed operand. */
6609 output_operand_lossage ("invalid %%v value");
6611 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6612 (INT_LOWPART (x) >> 16) & 0xffff);
6616 /* Print `u' if this has an auto-increment or auto-decrement. */
6617 if (GET_CODE (x) == MEM
6618 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6619 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6624 /* Print the trap code for this operand. */
6625 switch (GET_CODE (x))
6628 fputs ("eq", file); /* 4 */
6631 fputs ("ne", file); /* 24 */
6634 fputs ("lt", file); /* 16 */
6637 fputs ("le", file); /* 20 */
6640 fputs ("gt", file); /* 8 */
6643 fputs ("ge", file); /* 12 */
6646 fputs ("llt", file); /* 2 */
6649 fputs ("lle", file); /* 6 */
6652 fputs ("lgt", file); /* 1 */
6655 fputs ("lge", file); /* 5 */
6663 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6666 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6667 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6669 print_operand (file, x, 0);
6673 /* MB value for a PowerPC64 rldic operand. */
6674 val = (GET_CODE (x) == CONST_INT
6675 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6680 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6681 if ((val <<= 1) < 0)
6684 #if HOST_BITS_PER_WIDE_INT == 32
6685 if (GET_CODE (x) == CONST_INT && i >= 0)
6686 i += 32; /* zero-extend high-part was all 0's */
6687 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6689 val = CONST_DOUBLE_LOW (x);
6696 for ( ; i < 64; i++)
6697 if ((val <<= 1) < 0)
6702 fprintf (file, "%d", i + 1);
6706 if (GET_CODE (x) == MEM
6707 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6712 /* Like 'L', for third word of TImode */
6713 if (GET_CODE (x) == REG)
6714 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6715 else if (GET_CODE (x) == MEM)
6717 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6718 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6719 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6721 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6722 if (small_data_operand (x, GET_MODE (x)))
6723 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6724 reg_names[SMALL_DATA_REG]);
6729 /* X is a SYMBOL_REF. Write out the name preceded by a
6730 period and without any trailing data in brackets. Used for function
6731 names. If we are configured for System V (or the embedded ABI) on
6732 the PowerPC, do not emit the period, since those systems do not use
6733 TOCs and the like. */
6734 if (GET_CODE (x) != SYMBOL_REF)
6737 if (XSTR (x, 0)[0] != '.')
6739 switch (DEFAULT_ABI)
6749 case ABI_AIX_NODESC:
6755 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6757 assemble_name (file, XSTR (x, 0));
6762 /* Like 'L', for last word of TImode. */
6763 if (GET_CODE (x) == REG)
6764 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6765 else if (GET_CODE (x) == MEM)
6767 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6768 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6769 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6771 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6772 if (small_data_operand (x, GET_MODE (x)))
6773 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6774 reg_names[SMALL_DATA_REG]);
6778 /* Print AltiVec memory operand. */
6783 if (GET_CODE (x) != MEM)
6788 if (GET_CODE (tmp) == REG)
6789 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6790 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6792 if (REGNO (XEXP (tmp, 0)) == 0)
6793 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6794 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6796 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6797 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6805 if (GET_CODE (x) == REG)
6806 fprintf (file, "%s", reg_names[REGNO (x)]);
6807 else if (GET_CODE (x) == MEM)
6809 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6810 know the width from the mode. */
6811 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6812 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6813 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6814 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6815 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6816 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6818 output_address (XEXP (x, 0));
6821 output_addr_const (file, x);
6825 output_operand_lossage ("invalid %%xn code");
6829 /* Print the address of an operand. */
6832 print_operand_address (file, x)
6836 if (GET_CODE (x) == REG)
6837 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6838 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6839 || GET_CODE (x) == LABEL_REF)
6841 output_addr_const (file, x);
6842 if (small_data_operand (x, GET_MODE (x)))
6843 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6844 reg_names[SMALL_DATA_REG]);
6845 else if (TARGET_TOC)
6848 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6850 if (REGNO (XEXP (x, 0)) == 0)
6851 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6852 reg_names[ REGNO (XEXP (x, 0)) ]);
6854 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6855 reg_names[ REGNO (XEXP (x, 1)) ]);
6857 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6859 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6860 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6863 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6864 && CONSTANT_P (XEXP (x, 1)))
6866 output_addr_const (file, XEXP (x, 1));
6867 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6871 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6872 && CONSTANT_P (XEXP (x, 1)))
6874 fprintf (file, "lo16(");
6875 output_addr_const (file, XEXP (x, 1));
6876 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6879 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6881 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6883 rtx contains_minus = XEXP (x, 1);
6887 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6888 turn it into (sym) for output_addr_const. */
6889 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6890 contains_minus = XEXP (contains_minus, 0);
6892 minus = XEXP (contains_minus, 0);
6893 symref = XEXP (minus, 0);
6894 XEXP (contains_minus, 0) = symref;
6899 name = XSTR (symref, 0);
6900 newname = alloca (strlen (name) + sizeof ("@toc"));
6901 strcpy (newname, name);
6902 strcat (newname, "@toc");
6903 XSTR (symref, 0) = newname;
6905 output_addr_const (file, XEXP (x, 1));
6907 XSTR (symref, 0) = name;
6908 XEXP (contains_minus, 0) = minus;
6911 output_addr_const (file, XEXP (x, 1));
6913 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6919 /* Target hook for assembling integer objects. The powerpc version has
6920 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6921 is defined. It also needs to handle DI-mode objects on 64-bit
6925 rs6000_assemble_integer (x, size, aligned_p)
6930 #ifdef RELOCATABLE_NEEDS_FIXUP
6931 /* Special handling for SI values. */
6932 if (size == 4 && aligned_p)
6934 extern int in_toc_section PARAMS ((void));
6935 static int recurse = 0;
6937 /* For -mrelocatable, we mark all addresses that need to be fixed up
6938 in the .fixup section. */
6939 if (TARGET_RELOCATABLE
6940 && !in_toc_section ()
6941 && !in_text_section ()
6943 && GET_CODE (x) != CONST_INT
6944 && GET_CODE (x) != CONST_DOUBLE
6950 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6952 ASM_OUTPUT_LABEL (asm_out_file, buf);
6953 fprintf (asm_out_file, "\t.long\t(");
6954 output_addr_const (asm_out_file, x);
6955 fprintf (asm_out_file, ")@fixup\n");
6956 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6957 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6958 fprintf (asm_out_file, "\t.long\t");
6959 assemble_name (asm_out_file, buf);
6960 fprintf (asm_out_file, "\n\t.previous\n");
6964 /* Remove initial .'s to turn a -mcall-aixdesc function
6965 address into the address of the descriptor, not the function
6967 else if (GET_CODE (x) == SYMBOL_REF
6968 && XSTR (x, 0)[0] == '.'
6969 && DEFAULT_ABI == ABI_AIX)
6971 const char *name = XSTR (x, 0);
6972 while (*name == '.')
6975 fprintf (asm_out_file, "\t.long\t%s\n", name);
6979 #endif /* RELOCATABLE_NEEDS_FIXUP */
6980 return default_assemble_integer (x, size, aligned_p);
6984 rs6000_reverse_condition (mode, code)
6985 enum machine_mode mode;
6988 /* Reversal of FP compares takes care -- an ordered compare
6989 becomes an unordered compare and vice versa. */
6990 if (mode == CCFPmode)
6991 return reverse_condition_maybe_unordered (code);
6993 return reverse_condition (code);
6996 /* Generate a compare for CODE. Return a brand-new rtx that
6997 represents the result of the compare. */
7000 rs6000_generate_compare (code)
7003 enum machine_mode comp_mode;
7006 if (rs6000_compare_fp_p)
7007 comp_mode = CCFPmode;
7008 else if (code == GTU || code == LTU
7009 || code == GEU || code == LEU)
7010 comp_mode = CCUNSmode;
7014 /* First, the compare. */
7015 compare_result = gen_reg_rtx (comp_mode);
7016 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7017 gen_rtx_COMPARE (comp_mode,
7019 rs6000_compare_op1)));
7021 /* Some kinds of FP comparisons need an OR operation;
7022 except for flag_unsafe_math_optimizations we don't bother. */
7023 if (rs6000_compare_fp_p
7024 && ! flag_unsafe_math_optimizations
7025 && (code == LE || code == GE
7026 || code == UNEQ || code == LTGT
7027 || code == UNGT || code == UNLT))
7029 enum rtx_code or1, or2;
7030 rtx or1_rtx, or2_rtx, compare2_rtx;
7031 rtx or_result = gen_reg_rtx (CCEQmode);
7035 case LE: or1 = LT; or2 = EQ; break;
7036 case GE: or1 = GT; or2 = EQ; break;
7037 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
7038 case LTGT: or1 = LT; or2 = GT; break;
7039 case UNGT: or1 = UNORDERED; or2 = GT; break;
7040 case UNLT: or1 = UNORDERED; or2 = LT; break;
7043 validate_condition_mode (or1, comp_mode);
7044 validate_condition_mode (or2, comp_mode);
7045 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7046 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7047 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7048 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7050 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7052 compare_result = or_result;
7056 validate_condition_mode (code, GET_MODE (compare_result));
7058 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7062 /* Emit the RTL for an sCOND pattern. */
7065 rs6000_emit_sCOND (code, result)
7070 enum machine_mode op_mode;
7072 condition_rtx = rs6000_generate_compare (code);
7074 op_mode = GET_MODE (rs6000_compare_op0);
7075 if (op_mode == VOIDmode)
7076 op_mode = GET_MODE (rs6000_compare_op1);
7078 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7080 PUT_MODE (condition_rtx, DImode);
7081 convert_move (result, condition_rtx, 0);
7085 PUT_MODE (condition_rtx, SImode);
7086 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7090 /* Emit a branch of kind CODE to location LOC. */
7093 rs6000_emit_cbranch (code, loc)
7097 rtx condition_rtx, loc_ref;
7099 condition_rtx = rs6000_generate_compare (code);
7100 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7101 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7102 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7106 /* Return the string to output a conditional branch to LABEL, which is
7107 the operand number of the label, or -1 if the branch is really a
7110 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7111 condition code register and its mode specifies what kind of
7114 REVERSED is non-zero if we should reverse the sense of the comparison.
7116 INSN is the insn. */
7119 output_cbranch (op, label, reversed, insn)
7125 static char string[64];
7126 enum rtx_code code = GET_CODE (op);
7127 rtx cc_reg = XEXP (op, 0);
7128 enum machine_mode mode = GET_MODE (cc_reg);
7129 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7130 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7131 int really_reversed = reversed ^ need_longbranch;
7137 validate_condition_mode (code, mode);
7139 /* Work out which way this really branches. We could use
7140 reverse_condition_maybe_unordered here always but this
7141 makes the resulting assembler clearer. */
7142 if (really_reversed)
7143 code = rs6000_reverse_condition (mode, code);
7147 /* Not all of these are actually distinct opcodes, but
7148 we distinguish them for clarity of the resulting assembler. */
7150 ccode = "ne"; break;
7152 ccode = "eq"; break;
7154 ccode = "ge"; break;
7155 case GT: case GTU: case UNGT:
7156 ccode = "gt"; break;
7158 ccode = "le"; break;
7159 case LT: case LTU: case UNLT:
7160 ccode = "lt"; break;
7161 case UNORDERED: ccode = "un"; break;
7162 case ORDERED: ccode = "nu"; break;
7163 case UNGE: ccode = "nl"; break;
7164 case UNLE: ccode = "ng"; break;
7169 /* Maybe we have a guess as to how likely the branch is.
7170 The old mnemonics don't have a way to specify this information. */
7171 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7172 if (note != NULL_RTX)
7174 /* PROB is the difference from 50%. */
7175 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7177 /* For branches that are very close to 50%, assume not-taken. */
7178 if (abs (prob) > REG_BR_PROB_BASE / 20
7179 && ((prob > 0) ^ need_longbranch))
7188 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7190 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7192 /* We need to escape any '%' characters in the reg_names string.
7193 Assume they'd only be the first character... */
7194 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7196 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7200 /* If the branch distance was too far, we may have to use an
7201 unconditional branch to go the distance. */
7202 if (need_longbranch)
7203 s += sprintf (s, ",$+8\n\tb %s", label);
7205 s += sprintf (s, ",%s", label);
7211 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7212 operands of the last comparison is nonzero/true, FALSE_COND if it
7213 is zero/false. Return 0 if the hardware has no such operation. */
7216 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7222 enum rtx_code code = GET_CODE (op);
7223 rtx op0 = rs6000_compare_op0;
7224 rtx op1 = rs6000_compare_op1;
7226 enum machine_mode mode = GET_MODE (op0);
7229 /* First, work out if the hardware can do this at all, or
7230 if it's too slow... */
7231 /* If the comparison is an integer one, since we only have fsel
7232 it'll be cheaper to use a branch. */
7233 if (! rs6000_compare_fp_p)
7236 /* Eliminate half of the comparisons by switching operands, this
7237 makes the remaining code simpler. */
7238 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7239 || code == LTGT || code == LT)
7241 code = reverse_condition_maybe_unordered (code);
7243 true_cond = false_cond;
7247 /* UNEQ and LTGT take four instructions for a comparison with zero,
7248 it'll probably be faster to use a branch here too. */
7252 if (GET_CODE (op1) == CONST_DOUBLE)
7253 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7255 /* We're going to try to implement comparions by performing
7256 a subtract, then comparing against zero. Unfortunately,
7257 Inf - Inf is NaN which is not zero, and so if we don't
7258 know that the the operand is finite and the comparison
7259 would treat EQ different to UNORDERED, we can't do it. */
7260 if (! flag_unsafe_math_optimizations
7261 && code != GT && code != UNGE
7262 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7263 /* Constructs of the form (a OP b ? a : b) are safe. */
7264 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7265 || (! rtx_equal_p (op0, true_cond)
7266 && ! rtx_equal_p (op1, true_cond))))
7268 /* At this point we know we can use fsel. */
7270 /* Reduce the comparison to a comparison against zero. */
7271 temp = gen_reg_rtx (mode);
7272 emit_insn (gen_rtx_SET (VOIDmode, temp,
7273 gen_rtx_MINUS (mode, op0, op1)));
7275 op1 = CONST0_RTX (mode);
7277 /* If we don't care about NaNs we can reduce some of the comparisons
7278 down to faster ones. */
7279 if (flag_unsafe_math_optimizations)
7285 true_cond = false_cond;
7298 /* Now, reduce everything down to a GE. */
7305 temp = gen_reg_rtx (mode);
7306 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7311 temp = gen_reg_rtx (mode);
7312 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
7317 temp = gen_reg_rtx (mode);
7318 emit_insn (gen_rtx_SET (VOIDmode, temp,
7320 gen_rtx_ABS (mode, op0))));
7325 temp = gen_reg_rtx (mode);
7326 emit_insn (gen_rtx_SET (VOIDmode, temp,
7327 gen_rtx_IF_THEN_ELSE (mode,
7328 gen_rtx_GE (VOIDmode,
7330 true_cond, false_cond)));
7332 true_cond = false_cond;
7334 temp = gen_reg_rtx (mode);
7335 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7340 temp = gen_reg_rtx (mode);
7341 emit_insn (gen_rtx_SET (VOIDmode, temp,
7342 gen_rtx_IF_THEN_ELSE (mode,
7343 gen_rtx_GE (VOIDmode,
7345 true_cond, false_cond)));
7347 false_cond = true_cond;
7349 temp = gen_reg_rtx (mode);
7350 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7358 emit_insn (gen_rtx_SET (VOIDmode, dest,
7359 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
7360 gen_rtx_GE (VOIDmode,
7362 true_cond, false_cond)));
7367 rs6000_emit_minmax (dest, code, op0, op1)
7373 enum machine_mode mode = GET_MODE (op0);
7375 if (code == SMAX || code == UMAX)
7376 target = emit_conditional_move (dest, GE, op0, op1, mode,
7379 target = emit_conditional_move (dest, GE, op0, op1, mode,
7381 if (target == NULL_RTX)
7384 emit_move_insn (dest, target);
7387 /* This page contains routines that are used to determine what the
7388 function prologue and epilogue code will do and write them out. */
7390 /* Return the first fixed-point register that is required to be
7391 saved. 32 if none. */
7394 first_reg_to_save ()
7398 /* Find lowest numbered live register. */
7399 for (first_reg = 13; first_reg <= 31; first_reg++)
7400 if (regs_ever_live[first_reg]
7401 && (! call_used_regs[first_reg]
7402 || (first_reg == PIC_OFFSET_TABLE_REGNUM
7403 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7404 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7407 if (current_function_profile)
7409 /* AIX must save/restore every register that contains a parameter
7410 before/after the .__mcount call plus an additional register
7411 for the static chain, if needed; use registers from 30 down to 22
7413 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7415 int last_parm_reg, profile_first_reg;
7417 /* Figure out last used parameter register. The proper thing
7418 to do is to walk incoming args of the function. A function
7419 might have live parameter registers even if it has no
7421 for (last_parm_reg = 10;
7422 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7426 /* Calculate first reg for saving parameter registers
7428 Skip reg 31 which may contain the frame pointer. */
7429 profile_first_reg = (33 - last_parm_reg
7430 - (current_function_needs_context ? 1 : 0));
7432 /* Need to skip another reg to account for R31 being PICBASE
7433 (when flag_pic is set) or R30 being used as the frame
7434 pointer (when flag_pic is not set). */
7435 --profile_first_reg;
7437 /* Do not save frame pointer if no parameters needs to be saved. */
7438 if (profile_first_reg == 31)
7439 profile_first_reg = 32;
7441 if (first_reg > profile_first_reg)
7442 first_reg = profile_first_reg;
7445 /* SVR4 may need one register to preserve the static chain. */
7446 else if (current_function_needs_context)
7448 /* Skip reg 31 which may contain the frame pointer. */
7455 if (flag_pic && current_function_uses_pic_offset_table &&
7456 (first_reg > PIC_OFFSET_TABLE_REGNUM))
7457 return PIC_OFFSET_TABLE_REGNUM;
7463 /* Similar, for FP regs. */
7466 first_fp_reg_to_save ()
7470 /* Find lowest numbered live register. */
7471 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7472 if (regs_ever_live[first_reg])
7478 /* Similar, for AltiVec regs. */
7481 first_altivec_reg_to_save ()
7485 /* Stack frame remains as is unless we are in AltiVec ABI. */
7486 if (! TARGET_ALTIVEC_ABI)
7487 return LAST_ALTIVEC_REGNO + 1;
7489 /* Find lowest numbered live register. */
7490 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7491 if (regs_ever_live[i])
7497 /* Return a 32-bit mask of the AltiVec registers we need to set in
7498 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7499 the 32-bit word is 0. */
7502 compute_vrsave_mask ()
7504 unsigned int i, mask = 0;
7506 /* First, find out if we use _any_ altivec registers. */
7507 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7508 if (regs_ever_live[i])
7509 mask |= ALTIVEC_REG_BIT (i);
7514 /* Next, add all registers that are call-clobbered. We do this
7515 because post-reload register optimizers such as regrename_optimize
7516 may choose to use them. They never change the register class
7517 chosen by reload, so cannot create new uses of altivec registers
7518 if there were none before, so the early exit above is safe. */
7519 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7520 altivec registers not saved in the mask, which might well make the
7521 adjustments below more effective in eliding the save/restore of
7522 VRSAVE in small functions. */
7523 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7524 if (call_used_regs[i])
7525 mask |= ALTIVEC_REG_BIT (i);
7527 /* Next, remove the argument registers from the set. These must
7528 be in the VRSAVE mask set by the caller, so we don't need to add
7529 them in again. More importantly, the mask we compute here is
7530 used to generate CLOBBERs in the set_vrsave insn, and we do not
7531 wish the argument registers to die. */
7532 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7533 mask &= ~ALTIVEC_REG_BIT (i);
7535 /* Similarly, remove the return value from the set. */
7538 diddle_return_value (is_altivec_return_reg, &yes);
7540 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7547 is_altivec_return_reg (reg, xyes)
7551 bool *yes = (bool *) xyes;
7552 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7557 /* Calculate the stack information for the current function. This is
7558 complicated by having two separate calling sequences, the AIX calling
7559 sequence and the V.4 calling sequence.
7561 AIX (and Darwin/Mac OS X) stack frames look like:
7563 SP----> +---------------------------------------+
7564 | back chain to caller | 0 0
7565 +---------------------------------------+
7566 | saved CR | 4 8 (8-11)
7567 +---------------------------------------+
7569 +---------------------------------------+
7570 | reserved for compilers | 12 24
7571 +---------------------------------------+
7572 | reserved for binders | 16 32
7573 +---------------------------------------+
7574 | saved TOC pointer | 20 40
7575 +---------------------------------------+
7576 | Parameter save area (P) | 24 48
7577 +---------------------------------------+
7578 | Alloca space (A) | 24+P etc.
7579 +---------------------------------------+
7580 | Local variable space (L) | 24+P+A
7581 +---------------------------------------+
7582 | Float/int conversion temporary (X) | 24+P+A+L
7583 +---------------------------------------+
7584 | Save area for AltiVec registers (W) | 24+P+A+L+X
7585 +---------------------------------------+
7586 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7587 +---------------------------------------+
7588 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7589 +---------------------------------------+
7590 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7591 +---------------------------------------+
7592 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7593 +---------------------------------------+
7594 old SP->| back chain to caller's caller |
7595 +---------------------------------------+
7597 The required alignment for AIX configurations is two words (i.e., 8
7601 V.4 stack frames look like:
7603 SP----> +---------------------------------------+
7604 | back chain to caller | 0
7605 +---------------------------------------+
7606 | caller's saved LR | 4
7607 +---------------------------------------+
7608 | Parameter save area (P) | 8
7609 +---------------------------------------+
7610 | Alloca space (A) | 8+P
7611 +---------------------------------------+
7612 | Varargs save area (V) | 8+P+A
7613 +---------------------------------------+
7614 | Local variable space (L) | 8+P+A+V
7615 +---------------------------------------+
7616 | Float/int conversion temporary (X) | 8+P+A+V+L
7617 +---------------------------------------+
7618 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7619 +---------------------------------------+
7620 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7621 +---------------------------------------+
7622 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7623 +---------------------------------------+
7624 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7625 +---------------------------------------+
7626 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7627 +---------------------------------------+
7628 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7629 +---------------------------------------+
7630 old SP->| back chain to caller's caller |
7631 +---------------------------------------+
7633 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7634 given. (But note below and in sysv4.h that we require only 8 and
7635 may round up the size of our stack frame anyways. The historical
7636 reason is early versions of powerpc-linux which didn't properly
7637 align the stack at program startup. A happy side-effect is that
7638 -mno-eabi libraries can be used with -meabi programs.)
7640 The EABI configuration defaults to the V.4 layout, unless
7641 -mcall-aix is used, in which case the AIX layout is used. However,
7642 the stack alignment requirements may differ. If -mno-eabi is not
7643 given, the required stack alignment is 8 bytes; if -mno-eabi is
7644 given, the required alignment is 16 bytes. (But see V.4 comment
7647 #ifndef ABI_STACK_BOUNDARY
7648 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7652 rs6000_stack_info ()
7654 static rs6000_stack_t info, zero_info;
7655 rs6000_stack_t *info_ptr = &info;
7656 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7657 enum rs6000_abi abi;
7661 /* Zero all fields portably. */
7664 /* Select which calling sequence. */
7665 info_ptr->abi = abi = DEFAULT_ABI;
7667 /* Calculate which registers need to be saved & save area size. */
7668 info_ptr->first_gp_reg_save = first_reg_to_save ();
7669 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
7670 even if it currently looks like we won't. */
7671 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7672 || (flag_pic == 1 && abi == ABI_V4)
7673 || (flag_pic && abi == ABI_DARWIN))
7674 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
7675 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
7677 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7679 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7680 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7682 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7683 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7684 - info_ptr->first_altivec_reg_save);
7686 /* Does this function call anything? */
7687 info_ptr->calls_p = (! current_function_is_leaf
7688 || cfun->machine->ra_needs_full_frame);
7690 /* Determine if we need to save the link register. */
7691 if (rs6000_ra_ever_killed ()
7692 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7693 #ifdef TARGET_RELOCATABLE
7694 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7696 || (info_ptr->first_fp_reg_save != 64
7697 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7698 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7699 || (abi == ABI_V4 && current_function_calls_alloca)
7700 || (DEFAULT_ABI == ABI_DARWIN
7702 && current_function_uses_pic_offset_table)
7703 || info_ptr->calls_p)
7705 info_ptr->lr_save_p = 1;
7706 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7709 /* Determine if we need to save the condition code registers. */
7710 if (regs_ever_live[CR2_REGNO]
7711 || regs_ever_live[CR3_REGNO]
7712 || regs_ever_live[CR4_REGNO])
7714 info_ptr->cr_save_p = 1;
7716 info_ptr->cr_size = reg_size;
7719 /* If the current function calls __builtin_eh_return, then we need
7720 to allocate stack space for registers that will hold data for
7721 the exception handler. */
7722 if (current_function_calls_eh_return)
7725 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7727 ehrd_size = i * UNITS_PER_WORD;
7732 /* Determine various sizes. */
7733 info_ptr->reg_size = reg_size;
7734 info_ptr->fixed_size = RS6000_SAVE_AREA;
7735 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7736 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7737 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7740 if (TARGET_ALTIVEC_ABI)
7742 info_ptr->vrsave_mask = compute_vrsave_mask ();
7743 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7747 info_ptr->vrsave_mask = 0;
7748 info_ptr->vrsave_size = 0;
7751 /* Calculate the offsets. */
7759 case ABI_AIX_NODESC:
7761 info_ptr->fp_save_offset = - info_ptr->fp_size;
7762 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7764 if (TARGET_ALTIVEC_ABI)
7766 info_ptr->vrsave_save_offset
7767 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7769 /* Align stack so vector save area is on a quadword boundary. */
7770 if (info_ptr->altivec_size != 0)
7771 info_ptr->altivec_padding_size
7772 = 16 - (-info_ptr->vrsave_save_offset % 16);
7774 info_ptr->altivec_padding_size = 0;
7776 info_ptr->altivec_save_offset
7777 = info_ptr->vrsave_save_offset
7778 - info_ptr->altivec_padding_size
7779 - info_ptr->altivec_size;
7781 /* Adjust for AltiVec case. */
7782 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7785 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7786 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7787 info_ptr->lr_save_offset = 2*reg_size;
7791 info_ptr->fp_save_offset = - info_ptr->fp_size;
7792 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7793 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7795 if (TARGET_ALTIVEC_ABI)
7797 info_ptr->vrsave_save_offset
7798 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7800 /* Align stack so vector save area is on a quadword boundary. */
7801 if (info_ptr->altivec_size != 0)
7802 info_ptr->altivec_padding_size
7803 = 16 - (-info_ptr->vrsave_save_offset % 16);
7805 info_ptr->altivec_padding_size = 0;
7807 info_ptr->altivec_save_offset
7808 = info_ptr->vrsave_save_offset
7809 - info_ptr->altivec_padding_size
7810 - info_ptr->altivec_size;
7812 /* Adjust for AltiVec case. */
7813 info_ptr->toc_save_offset
7814 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7817 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7818 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7819 info_ptr->lr_save_offset = reg_size;
7823 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7825 + info_ptr->altivec_size
7826 + info_ptr->altivec_padding_size
7827 + info_ptr->vrsave_size
7831 + info_ptr->vrsave_size
7832 + info_ptr->toc_size,
7833 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7836 total_raw_size = (info_ptr->vars_size
7837 + info_ptr->parm_size
7838 + info_ptr->save_size
7839 + info_ptr->varargs_size
7840 + info_ptr->fixed_size);
7842 info_ptr->total_size =
7843 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7845 /* Determine if we need to allocate any stack frame:
7847 For AIX we need to push the stack if a frame pointer is needed
7848 (because the stack might be dynamically adjusted), if we are
7849 debugging, if we make calls, or if the sum of fp_save, gp_save,
7850 and local variables are more than the space needed to save all
7851 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7852 + 18*8 = 288 (GPR13 reserved).
7854 For V.4 we don't have the stack cushion that AIX uses, but assume
7855 that the debugger can handle stackless frames. */
7857 if (info_ptr->calls_p)
7858 info_ptr->push_p = 1;
7860 else if (abi == ABI_V4)
7861 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7864 info_ptr->push_p = (frame_pointer_needed
7865 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7866 || ((total_raw_size - info_ptr->fixed_size)
7867 > (TARGET_32BIT ? 220 : 288)));
7869 /* Zero offsets if we're not saving those registers. */
7870 if (info_ptr->fp_size == 0)
7871 info_ptr->fp_save_offset = 0;
7873 if (info_ptr->gp_size == 0)
7874 info_ptr->gp_save_offset = 0;
7876 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7877 info_ptr->altivec_save_offset = 0;
7879 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7880 info_ptr->vrsave_save_offset = 0;
7882 if (! info_ptr->lr_save_p)
7883 info_ptr->lr_save_offset = 0;
7885 if (! info_ptr->cr_save_p)
7886 info_ptr->cr_save_offset = 0;
7888 if (! info_ptr->toc_save_p)
7889 info_ptr->toc_save_offset = 0;
7895 debug_stack_info (info)
7896 rs6000_stack_t *info;
7898 const char *abi_string;
7901 info = rs6000_stack_info ();
7903 fprintf (stderr, "\nStack information for function %s:\n",
7904 ((current_function_decl && DECL_NAME (current_function_decl))
7905 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7910 default: abi_string = "Unknown"; break;
7911 case ABI_NONE: abi_string = "NONE"; break;
7913 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7914 case ABI_DARWIN: abi_string = "Darwin"; break;
7915 case ABI_V4: abi_string = "V.4"; break;
7918 fprintf (stderr, "\tABI = %5s\n", abi_string);
7920 if (TARGET_ALTIVEC_ABI)
7921 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7923 if (info->first_gp_reg_save != 32)
7924 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7926 if (info->first_fp_reg_save != 64)
7927 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7929 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7930 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7931 info->first_altivec_reg_save);
7933 if (info->lr_save_p)
7934 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7936 if (info->cr_save_p)
7937 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7939 if (info->toc_save_p)
7940 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7942 if (info->vrsave_mask)
7943 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7946 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7949 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7951 if (info->gp_save_offset)
7952 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7954 if (info->fp_save_offset)
7955 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7957 if (info->altivec_save_offset)
7958 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7959 info->altivec_save_offset);
7961 if (info->vrsave_save_offset)
7962 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7963 info->vrsave_save_offset);
7965 if (info->lr_save_offset)
7966 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7968 if (info->cr_save_offset)
7969 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7971 if (info->toc_save_offset)
7972 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7974 if (info->varargs_save_offset)
7975 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
7977 if (info->total_size)
7978 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
7980 if (info->varargs_size)
7981 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
7983 if (info->vars_size)
7984 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
7986 if (info->parm_size)
7987 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
7989 if (info->fixed_size)
7990 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
7993 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
7996 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
7998 if (info->altivec_size)
7999 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
8001 if (info->vrsave_size)
8002 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
8004 if (info->altivec_padding_size)
8005 fprintf (stderr, "\taltivec_padding_size= %5d\n",
8006 info->altivec_padding_size);
8009 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
8012 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
8015 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
8017 if (info->save_size)
8018 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
8020 if (info->reg_size != 4)
8021 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
8023 fprintf (stderr, "\n");
8027 rs6000_return_addr (count, frame)
8031 /* Currently we don't optimize very well between prolog and body
8032 code and for PIC code the code can be actually quite bad, so
8033 don't try to be too clever here. */
8034 if (count != 0 || flag_pic != 0)
8036 cfun->machine->ra_needs_full_frame = 1;
8043 plus_constant (copy_to_reg
8044 (gen_rtx_MEM (Pmode,
8045 memory_address (Pmode, frame))),
8046 RETURN_ADDRESS_OFFSET)));
8049 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8053 rs6000_ra_ever_killed ()
8057 #ifdef ASM_OUTPUT_MI_THUNK
8058 if (current_function_is_thunk)
8061 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8062 || cfun->machine->ra_needs_full_frame)
8063 return regs_ever_live[LINK_REGISTER_REGNUM];
8065 push_topmost_sequence ();
8067 pop_topmost_sequence ();
8069 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8073 /* Add a REG_MAYBE_DEAD note to the insn. */
8075 rs6000_maybe_dead (insn)
8078 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8083 /* Emit instructions needed to load the TOC register.
8084 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8085 a constant pool; or for SVR4 -fpic. */
8088 rs6000_emit_load_toc_table (fromprolog)
8092 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
8094 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8096 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8098 rtx temp = (fromprolog
8099 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8100 : gen_reg_rtx (Pmode));
8101 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8102 rs6000_maybe_dead (emit_move_insn (dest, temp));
8104 else if (flag_pic == 2)
8107 rtx tempLR = (fromprolog
8108 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8109 : gen_reg_rtx (Pmode));
8110 rtx temp0 = (fromprolog
8111 ? gen_rtx_REG (Pmode, 0)
8112 : gen_reg_rtx (Pmode));
8115 /* possibly create the toc section */
8116 if (! toc_initialized)
8119 function_section (current_function_decl);
8126 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8127 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8129 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8130 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8132 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8134 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8135 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8142 static int reload_toc_labelno = 0;
8144 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8146 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8147 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8149 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8152 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8153 rs6000_maybe_dead (emit_move_insn (temp0,
8154 gen_rtx_MEM (Pmode, dest)));
8156 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8158 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8160 /* This is for AIX code running in non-PIC ELF. */
8163 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8164 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8166 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8167 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8175 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8177 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8182 get_TOC_alias_set ()
8184 static int set = -1;
8186 set = new_alias_set ();
8190 /* This retuns nonzero if the current function uses the TOC. This is
8191 determined by the presence of (unspec ... 7), which is generated by
8192 the various load_toc_* patterns. */
8199 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8202 rtx pat = PATTERN (insn);
8205 if (GET_CODE (pat) == PARALLEL)
8206 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8207 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8208 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8215 create_TOC_reference (symbol)
8218 return gen_rtx_PLUS (Pmode,
8219 gen_rtx_REG (Pmode, TOC_REGISTER),
8220 gen_rtx_CONST (Pmode,
8221 gen_rtx_MINUS (Pmode, symbol,
8222 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8226 /* __throw will restore its own return address to be the same as the
8227 return address of the function that the throw is being made to.
8228 This is unfortunate, because we want to check the original
8229 return address to see if we need to restore the TOC.
8230 So we have to squirrel it away here.
8231 This is used only in compiling __throw and __rethrow.
8233 Most of this code should be removed by CSE. */
8234 static rtx insn_after_throw;
8236 /* This does the saving... */
8238 rs6000_aix_emit_builtin_unwind_init ()
8241 rtx stack_top = gen_reg_rtx (Pmode);
8242 rtx opcode_addr = gen_reg_rtx (Pmode);
8244 insn_after_throw = gen_reg_rtx (SImode);
8246 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8247 emit_move_insn (stack_top, mem);
8249 mem = gen_rtx_MEM (Pmode,
8250 gen_rtx_PLUS (Pmode, stack_top,
8251 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8252 emit_move_insn (opcode_addr, mem);
8253 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8256 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8257 in _eh.o). Only used on AIX.
8259 The idea is that on AIX, function calls look like this:
8260 bl somefunction-trampoline
8264 somefunction-trampoline:
8266 ... load function address in the count register ...
8268 or like this, if the linker determines that this is not a cross-module call
8269 and so the TOC need not be restored:
8272 or like this, if the compiler could determine that this is not a
8275 now, the tricky bit here is that register 2 is saved and restored
8276 by the _linker_, so we can't readily generate debugging information
8277 for it. So we need to go back up the call chain looking at the
8278 insns at return addresses to see which calls saved the TOC register
8279 and so see where it gets restored from.
8281 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8282 just before the actual epilogue.
8284 On the bright side, this incurs no space or time overhead unless an
8285 exception is thrown, except for the extra code in libgcc.a.
8287 The parameter STACKSIZE is a register containing (at runtime)
8288 the amount to be popped off the stack in addition to the stack frame
8289 of this routine (which will be __throw or __rethrow, and so is
8290 guaranteed to have a stack frame). */
8293 rs6000_emit_eh_toc_restore (stacksize)
8297 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8298 rtx tocompare = gen_reg_rtx (SImode);
8299 rtx opcode = gen_reg_rtx (SImode);
8300 rtx opcode_addr = gen_reg_rtx (Pmode);
8302 rtx loop_start = gen_label_rtx ();
8303 rtx no_toc_restore_needed = gen_label_rtx ();
8304 rtx loop_exit = gen_label_rtx ();
8306 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8307 set_mem_alias_set (mem, rs6000_sr_alias_set);
8308 emit_move_insn (bottom_of_stack, mem);
8310 top_of_stack = expand_binop (Pmode, add_optab,
8311 bottom_of_stack, stacksize,
8312 NULL_RTX, 1, OPTAB_WIDEN);
8314 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
8315 : 0xE8410028, SImode));
8317 if (insn_after_throw == NULL_RTX)
8319 emit_move_insn (opcode, insn_after_throw);
8321 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8322 emit_label (loop_start);
8324 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8325 SImode, NULL_RTX, NULL_RTX,
8326 no_toc_restore_needed);
8328 mem = gen_rtx_MEM (Pmode,
8329 gen_rtx_PLUS (Pmode, bottom_of_stack,
8330 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8331 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8333 emit_label (no_toc_restore_needed);
8334 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8335 Pmode, NULL_RTX, NULL_RTX,
8338 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8339 set_mem_alias_set (mem, rs6000_sr_alias_set);
8340 emit_move_insn (bottom_of_stack, mem);
8342 mem = gen_rtx_MEM (Pmode,
8343 gen_rtx_PLUS (Pmode, bottom_of_stack,
8344 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8345 emit_move_insn (opcode_addr, mem);
8346 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8348 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8349 emit_jump (loop_start);
8350 emit_note (NULL, NOTE_INSN_LOOP_END);
8351 emit_label (loop_exit);
8353 #endif /* TARGET_AIX */
8355 /* This ties together stack memory (MEM with an alias set of
8356 rs6000_sr_alias_set) and the change to the stack pointer. */
8359 rs6000_emit_stack_tie ()
8361 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8363 set_mem_alias_set (mem, rs6000_sr_alias_set);
8364 emit_insn (gen_stack_tie (mem));
8367 /* Emit the correct code for allocating stack space, as insns.
8368 If COPY_R12, make sure a copy of the old frame is left in r12.
8369 The generated code may use hard register 0 as a temporary. */
8372 rs6000_emit_allocate_stack (size, copy_r12)
8377 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8378 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8379 rtx todec = GEN_INT (-size);
8381 if (current_function_limit_stack)
8383 if (REG_P (stack_limit_rtx)
8384 && REGNO (stack_limit_rtx) > 1
8385 && REGNO (stack_limit_rtx) <= 31)
8387 emit_insn (Pmode == SImode
8388 ? gen_addsi3 (tmp_reg,
8391 : gen_adddi3 (tmp_reg,
8395 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8398 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8400 && DEFAULT_ABI == ABI_V4)
8402 rtx toload = gen_rtx_CONST (VOIDmode,
8403 gen_rtx_PLUS (Pmode,
8407 emit_insn (gen_elf_high (tmp_reg, toload));
8408 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8409 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8413 warning ("stack limit expression is not supported");
8416 if (copy_r12 || ! TARGET_UPDATE)
8417 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8423 /* Need a note here so that try_split doesn't get confused. */
8424 if (get_last_insn() == NULL_RTX)
8425 emit_note (0, NOTE_INSN_DELETED);
8426 insn = emit_move_insn (tmp_reg, todec);
8427 try_split (PATTERN (insn), insn, 0);
8431 if (Pmode == SImode)
8432 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8435 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8440 if (Pmode == SImode)
8441 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8443 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8444 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8445 gen_rtx_REG (Pmode, 12));
8448 RTX_FRAME_RELATED_P (insn) = 1;
8450 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8451 gen_rtx_SET (VOIDmode, stack_reg,
8452 gen_rtx_PLUS (Pmode, stack_reg,
8457 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8460 (mem (plus (blah) (regXX)))
8464 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8467 altivec_frame_fixup (insn, reg, val)
8473 real = copy_rtx (PATTERN (insn));
8475 real = replace_rtx (real, reg, GEN_INT (val));
8477 RTX_FRAME_RELATED_P (insn) = 1;
8478 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8483 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8484 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8485 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8486 deduce these equivalences by itself so it wasn't necessary to hold
8487 its hand so much. */
8490 rs6000_frame_related (insn, reg, val, reg2, rreg)
8499 real = copy_rtx (PATTERN (insn));
8501 real = replace_rtx (real, reg,
8502 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8503 STACK_POINTER_REGNUM),
8506 /* We expect that 'real' is either a SET or a PARALLEL containing
8507 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8508 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8510 if (GET_CODE (real) == SET)
8514 temp = simplify_rtx (SET_SRC (set));
8516 SET_SRC (set) = temp;
8517 temp = simplify_rtx (SET_DEST (set));
8519 SET_DEST (set) = temp;
8520 if (GET_CODE (SET_DEST (set)) == MEM)
8522 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8524 XEXP (SET_DEST (set), 0) = temp;
8527 else if (GET_CODE (real) == PARALLEL)
8530 for (i = 0; i < XVECLEN (real, 0); i++)
8531 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8533 rtx set = XVECEXP (real, 0, i);
8535 temp = simplify_rtx (SET_SRC (set));
8537 SET_SRC (set) = temp;
8538 temp = simplify_rtx (SET_DEST (set));
8540 SET_DEST (set) = temp;
8541 if (GET_CODE (SET_DEST (set)) == MEM)
8543 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8545 XEXP (SET_DEST (set), 0) = temp;
8547 RTX_FRAME_RELATED_P (set) = 1;
8553 if (reg2 != NULL_RTX)
8554 real = replace_rtx (real, reg2, rreg);
8556 RTX_FRAME_RELATED_P (insn) = 1;
8557 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8562 /* Returns an insn that has a vrsave set operation with the
8563 appropriate CLOBBERs. */
8566 generate_set_vrsave (reg, info, epiloguep)
8568 rs6000_stack_t *info;
8572 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8573 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8576 = gen_rtx_SET (VOIDmode,
8578 gen_rtx_UNSPEC_VOLATILE (SImode,
8579 gen_rtvec (2, reg, vrsave),
8584 /* We need to clobber the registers in the mask so the scheduler
8585 does not move sets to VRSAVE before sets of AltiVec registers.
8587 However, if the function receives nonlocal gotos, reload will set
8588 all call saved registers live. We will end up with:
8590 (set (reg 999) (mem))
8591 (parallel [ (set (reg vrsave) (unspec blah))
8592 (clobber (reg 999))])
8594 The clobber will cause the store into reg 999 to be dead, and
8595 flow will attempt to delete an epilogue insn. In this case, we
8596 need an unspec use/set of the register. */
8598 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8599 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8601 if (!epiloguep || call_used_regs [i])
8602 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8603 gen_rtx_REG (V4SImode, i));
8606 rtx reg = gen_rtx_REG (V4SImode, i);
8609 = gen_rtx_SET (VOIDmode,
8611 gen_rtx_UNSPEC (V4SImode,
8612 gen_rtvec (1, reg), 27));
8616 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8618 for (i = 0; i < nclobs; ++i)
8619 XVECEXP (insn, 0, i) = clobs[i];
8624 /* Emit function prologue as insns. */
8627 rs6000_emit_prologue ()
8629 rs6000_stack_t *info = rs6000_stack_info ();
8630 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8631 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8632 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8633 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8634 rtx frame_reg_rtx = sp_reg_rtx;
8635 rtx cr_save_rtx = NULL;
8637 int saving_FPRs_inline;
8638 int using_store_multiple;
8639 HOST_WIDE_INT sp_offset = 0;
8641 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8642 && info->first_gp_reg_save < 31);
8643 saving_FPRs_inline = (info->first_fp_reg_save == 64
8644 || FP_SAVE_INLINE (info->first_fp_reg_save));
8646 /* For V.4, update stack before we do any saving and set back pointer. */
8647 if (info->push_p && DEFAULT_ABI == ABI_V4)
8649 if (info->total_size < 32767)
8650 sp_offset = info->total_size;
8652 frame_reg_rtx = frame_ptr_rtx;
8653 rs6000_emit_allocate_stack (info->total_size,
8654 (frame_reg_rtx != sp_reg_rtx
8657 || info->first_fp_reg_save < 64
8658 || info->first_gp_reg_save < 32
8660 if (frame_reg_rtx != sp_reg_rtx)
8661 rs6000_emit_stack_tie ();
8664 /* Save AltiVec registers if needed. */
8665 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8669 /* There should be a non inline version of this, for when we
8670 are saving lots of vector registers. */
8671 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8672 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8674 rtx areg, savereg, mem;
8677 offset = info->altivec_save_offset + sp_offset
8678 + 16 * (i - info->first_altivec_reg_save);
8680 savereg = gen_rtx_REG (V4SImode, i);
8682 areg = gen_rtx_REG (Pmode, 0);
8683 emit_move_insn (areg, GEN_INT (offset));
8685 /* AltiVec addressing mode is [reg+reg]. */
8686 mem = gen_rtx_MEM (V4SImode,
8687 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8689 set_mem_alias_set (mem, rs6000_sr_alias_set);
8691 insn = emit_move_insn (mem, savereg);
8693 altivec_frame_fixup (insn, areg, offset);
8697 /* VRSAVE is a bit vector representing which AltiVec registers
8698 are used. The OS uses this to determine which vector
8699 registers to save on a context switch. We need to save
8700 VRSAVE on the stack frame, add whatever AltiVec registers we
8701 used in this function, and do the corresponding magic in the
8704 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8706 rtx reg, mem, vrsave;
8709 /* Get VRSAVE onto a GPR. */
8710 reg = gen_rtx_REG (SImode, 12);
8711 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8713 emit_insn (gen_get_vrsave_internal (reg));
8715 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8718 offset = info->vrsave_save_offset + sp_offset;
8720 = gen_rtx_MEM (SImode,
8721 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8722 set_mem_alias_set (mem, rs6000_sr_alias_set);
8723 insn = emit_move_insn (mem, reg);
8725 /* Include the registers in the mask. */
8726 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8728 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8731 /* If we use the link register, get it into r0. */
8732 if (info->lr_save_p)
8733 emit_move_insn (gen_rtx_REG (Pmode, 0),
8734 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8736 /* If we need to save CR, put it into r12. */
8737 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8739 cr_save_rtx = gen_rtx_REG (SImode, 12);
8740 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8743 /* Do any required saving of fpr's. If only one or two to save, do
8744 it ourselves. Otherwise, call function. */
8745 if (saving_FPRs_inline)
8748 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8749 if ((regs_ever_live[info->first_fp_reg_save+i]
8750 && ! call_used_regs[info->first_fp_reg_save+i]))
8753 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8754 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8755 GEN_INT (info->fp_save_offset
8758 mem = gen_rtx_MEM (DFmode, addr);
8759 set_mem_alias_set (mem, rs6000_sr_alias_set);
8761 insn = emit_move_insn (mem, reg);
8762 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8763 NULL_RTX, NULL_RTX);
8766 else if (info->first_fp_reg_save != 64)
8770 const char *alloc_rname;
8772 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8774 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8776 LINK_REGISTER_REGNUM));
8777 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8778 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8779 alloc_rname = ggc_strdup (rname);
8780 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8781 gen_rtx_SYMBOL_REF (Pmode,
8783 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8786 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8787 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8788 GEN_INT (info->fp_save_offset
8789 + sp_offset + 8*i));
8790 mem = gen_rtx_MEM (DFmode, addr);
8791 set_mem_alias_set (mem, rs6000_sr_alias_set);
8793 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8795 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8796 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8797 NULL_RTX, NULL_RTX);
8800 /* Save GPRs. This is done as a PARALLEL if we are using
8801 the store-multiple instructions. */
8802 if (using_store_multiple)
8806 p = rtvec_alloc (32 - info->first_gp_reg_save);
8807 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8808 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8811 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8812 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8813 GEN_INT (info->gp_save_offset
8816 mem = gen_rtx_MEM (reg_mode, addr);
8817 set_mem_alias_set (mem, rs6000_sr_alias_set);
8819 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8821 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8822 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8823 NULL_RTX, NULL_RTX);
8828 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8829 if ((regs_ever_live[info->first_gp_reg_save+i]
8830 && ! call_used_regs[info->first_gp_reg_save+i])
8831 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8832 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8833 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8836 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8837 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8838 GEN_INT (info->gp_save_offset
8841 mem = gen_rtx_MEM (reg_mode, addr);
8842 set_mem_alias_set (mem, rs6000_sr_alias_set);
8844 insn = emit_move_insn (mem, reg);
8845 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8846 NULL_RTX, NULL_RTX);
8850 /* ??? There's no need to emit actual instructions here, but it's the
8851 easiest way to get the frame unwind information emitted. */
8852 if (current_function_calls_eh_return)
8854 unsigned int i, regno;
8860 regno = EH_RETURN_DATA_REGNO (i);
8861 if (regno == INVALID_REGNUM)
8864 reg = gen_rtx_REG (reg_mode, regno);
8865 addr = plus_constant (frame_reg_rtx,
8866 info->ehrd_offset + sp_offset
8867 + reg_size * (int) i);
8868 mem = gen_rtx_MEM (reg_mode, addr);
8869 set_mem_alias_set (mem, rs6000_sr_alias_set);
8871 insn = emit_move_insn (mem, reg);
8872 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8873 NULL_RTX, NULL_RTX);
8877 /* Save lr if we used it. */
8878 if (info->lr_save_p)
8880 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8881 GEN_INT (info->lr_save_offset + sp_offset));
8882 rtx reg = gen_rtx_REG (Pmode, 0);
8883 rtx mem = gen_rtx_MEM (Pmode, addr);
8884 /* This should not be of rs6000_sr_alias_set, because of
8885 __builtin_return_address. */
8887 insn = emit_move_insn (mem, reg);
8888 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8889 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8892 /* Save CR if we use any that must be preserved. */
8893 if (info->cr_save_p)
8895 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8896 GEN_INT (info->cr_save_offset + sp_offset));
8897 rtx mem = gen_rtx_MEM (SImode, addr);
8899 set_mem_alias_set (mem, rs6000_sr_alias_set);
8901 /* If r12 was used to hold the original sp, copy cr into r0 now
8903 if (REGNO (frame_reg_rtx) == 12)
8905 cr_save_rtx = gen_rtx_REG (SImode, 0);
8906 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8908 insn = emit_move_insn (mem, cr_save_rtx);
8910 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8911 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8912 OK. All we have to do is specify that _one_ condition code
8913 register is saved in this stack slot. The thrower's epilogue
8914 will then restore all the call-saved registers.
8915 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8916 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8917 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8920 /* Update stack and set back pointer unless this is V.4,
8921 for which it was done previously. */
8922 if (info->push_p && DEFAULT_ABI != ABI_V4)
8923 rs6000_emit_allocate_stack (info->total_size, FALSE);
8925 /* Set frame pointer, if needed. */
8926 if (frame_pointer_needed)
8928 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8930 RTX_FRAME_RELATED_P (insn) = 1;
8933 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8934 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8935 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8936 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
8938 /* If emit_load_toc_table will use the link register, we need to save
8939 it. We use R11 for this purpose because emit_load_toc_table
8940 can use register 0. This allows us to use a plain 'blr' to return
8941 from the procedure more often. */
8942 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8943 && ! info->lr_save_p
8944 && EXIT_BLOCK_PTR->pred != NULL);
8945 if (save_LR_around_toc_setup)
8946 emit_move_insn (gen_rtx_REG (Pmode, 11),
8947 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8949 rs6000_emit_load_toc_table (TRUE);
8951 if (save_LR_around_toc_setup)
8952 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8953 gen_rtx_REG (Pmode, 11));
8956 if (DEFAULT_ABI == ABI_DARWIN
8957 && flag_pic && current_function_uses_pic_offset_table)
8959 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8961 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8964 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
8965 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8969 /* Write function prologue. */
8972 rs6000_output_function_prologue (file, size)
8974 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
8976 rs6000_stack_t *info = rs6000_stack_info ();
8978 if (TARGET_DEBUG_STACK)
8979 debug_stack_info (info);
8981 /* Write .extern for any function we will call to save and restore
8983 if (info->first_fp_reg_save < 64
8984 && !FP_SAVE_INLINE (info->first_fp_reg_save))
8985 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
8986 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
8987 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
8990 /* Write .extern for AIX common mode routines, if needed. */
8991 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
8993 fputs ("\t.extern __mulh\n", file);
8994 fputs ("\t.extern __mull\n", file);
8995 fputs ("\t.extern __divss\n", file);
8996 fputs ("\t.extern __divus\n", file);
8997 fputs ("\t.extern __quoss\n", file);
8998 fputs ("\t.extern __quous\n", file);
8999 common_mode_defined = 1;
9002 if (! HAVE_prologue)
9006 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9007 the "toplevel" insn chain. */
9008 emit_note (0, NOTE_INSN_DELETED);
9009 rs6000_emit_prologue ();
9010 emit_note (0, NOTE_INSN_DELETED);
9012 if (TARGET_DEBUG_STACK)
9013 debug_rtx_list (get_insns (), 100);
9014 final (get_insns (), file, FALSE, FALSE);
9018 rs6000_pic_labelno++;
9021 /* Emit function epilogue as insns.
9023 At present, dwarf2out_frame_debug_expr doesn't understand
9024 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9025 anywhere in the epilogue. Most of the insns below would in any case
9026 need special notes to explain where r11 is in relation to the stack. */
9029 rs6000_emit_epilogue (sibcall)
9032 rs6000_stack_t *info;
9033 int restoring_FPRs_inline;
9034 int using_load_multiple;
9035 int using_mfcr_multiple;
9036 int use_backchain_to_restore_sp;
9038 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9039 rtx frame_reg_rtx = sp_reg_rtx;
9040 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9041 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9044 info = rs6000_stack_info ();
9045 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9046 && info->first_gp_reg_save < 31);
9047 restoring_FPRs_inline = (sibcall
9048 || current_function_calls_eh_return
9049 || info->first_fp_reg_save == 64
9050 || FP_SAVE_INLINE (info->first_fp_reg_save));
9051 use_backchain_to_restore_sp = (frame_pointer_needed
9052 || current_function_calls_alloca
9053 || info->total_size > 32767);
9054 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9055 || rs6000_cpu == PROCESSOR_PPC603
9056 || rs6000_cpu == PROCESSOR_PPC750
9059 /* If we have a frame pointer, a call to alloca, or a large stack
9060 frame, restore the old stack pointer using the backchain. Otherwise,
9061 we know what size to update it with. */
9062 if (use_backchain_to_restore_sp)
9064 /* Under V.4, don't reset the stack pointer until after we're done
9065 loading the saved registers. */
9066 if (DEFAULT_ABI == ABI_V4)
9067 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9069 emit_move_insn (frame_reg_rtx,
9070 gen_rtx_MEM (Pmode, sp_reg_rtx));
9073 else if (info->push_p)
9075 if (DEFAULT_ABI == ABI_V4)
9076 sp_offset = info->total_size;
9079 emit_insn (TARGET_32BIT
9080 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9081 GEN_INT (info->total_size))
9082 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9083 GEN_INT (info->total_size)));
9087 /* Restore AltiVec registers if needed. */
9088 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9092 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9093 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9095 rtx addr, areg, mem;
9097 areg = gen_rtx_REG (Pmode, 0);
9099 (areg, GEN_INT (info->altivec_save_offset
9101 + 16 * (i - info->first_altivec_reg_save)));
9103 /* AltiVec addressing mode is [reg+reg]. */
9104 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9105 mem = gen_rtx_MEM (V4SImode, addr);
9106 set_mem_alias_set (mem, rs6000_sr_alias_set);
9108 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9112 /* Restore VRSAVE if needed. */
9113 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9117 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9118 GEN_INT (info->vrsave_save_offset + sp_offset));
9119 mem = gen_rtx_MEM (SImode, addr);
9120 set_mem_alias_set (mem, rs6000_sr_alias_set);
9121 reg = gen_rtx_REG (SImode, 12);
9122 emit_move_insn (reg, mem);
9124 emit_insn (generate_set_vrsave (reg, info, 1));
9127 /* Get the old lr if we saved it. */
9128 if (info->lr_save_p)
9130 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9131 GEN_INT (info->lr_save_offset + sp_offset));
9132 rtx mem = gen_rtx_MEM (Pmode, addr);
9134 set_mem_alias_set (mem, rs6000_sr_alias_set);
9136 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9139 /* Get the old cr if we saved it. */
9140 if (info->cr_save_p)
9142 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9143 GEN_INT (info->cr_save_offset + sp_offset));
9144 rtx mem = gen_rtx_MEM (SImode, addr);
9146 set_mem_alias_set (mem, rs6000_sr_alias_set);
9148 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9151 /* Set LR here to try to overlap restores below. */
9152 if (info->lr_save_p)
9153 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9154 gen_rtx_REG (Pmode, 0));
9156 /* Load exception handler data registers, if needed. */
9157 if (current_function_calls_eh_return)
9159 unsigned int i, regno;
9165 regno = EH_RETURN_DATA_REGNO (i);
9166 if (regno == INVALID_REGNUM)
9169 addr = plus_constant (frame_reg_rtx,
9170 info->ehrd_offset + sp_offset
9171 + reg_size * (int) i);
9172 mem = gen_rtx_MEM (reg_mode, addr);
9173 set_mem_alias_set (mem, rs6000_sr_alias_set);
9175 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9179 /* Restore GPRs. This is done as a PARALLEL if we are using
9180 the load-multiple instructions. */
9181 if (using_load_multiple)
9184 p = rtvec_alloc (32 - info->first_gp_reg_save);
9185 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9187 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9188 GEN_INT (info->gp_save_offset
9191 rtx mem = gen_rtx_MEM (reg_mode, addr);
9193 set_mem_alias_set (mem, rs6000_sr_alias_set);
9196 gen_rtx_SET (VOIDmode,
9197 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9200 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9203 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9204 if ((regs_ever_live[info->first_gp_reg_save+i]
9205 && ! call_used_regs[info->first_gp_reg_save+i])
9206 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
9207 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9208 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9210 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9211 GEN_INT (info->gp_save_offset
9214 rtx mem = gen_rtx_MEM (reg_mode, addr);
9216 set_mem_alias_set (mem, rs6000_sr_alias_set);
9218 emit_move_insn (gen_rtx_REG (reg_mode,
9219 info->first_gp_reg_save + i),
9223 /* Restore fpr's if we need to do it without calling a function. */
9224 if (restoring_FPRs_inline)
9225 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9226 if ((regs_ever_live[info->first_fp_reg_save+i]
9227 && ! call_used_regs[info->first_fp_reg_save+i]))
9230 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9231 GEN_INT (info->fp_save_offset
9234 mem = gen_rtx_MEM (DFmode, addr);
9235 set_mem_alias_set (mem, rs6000_sr_alias_set);
9237 emit_move_insn (gen_rtx_REG (DFmode,
9238 info->first_fp_reg_save + i),
9242 /* If we saved cr, restore it here. Just those that were used. */
9243 if (info->cr_save_p)
9245 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9248 if (using_mfcr_multiple)
9250 for (i = 0; i < 8; i++)
9251 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9257 if (using_mfcr_multiple && count > 1)
9262 p = rtvec_alloc (count);
9265 for (i = 0; i < 8; i++)
9266 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9268 rtvec r = rtvec_alloc (2);
9269 RTVEC_ELT (r, 0) = r12_rtx;
9270 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9271 RTVEC_ELT (p, ndx) =
9272 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9273 gen_rtx_UNSPEC (CCmode, r, 20));
9276 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9281 for (i = 0; i < 8; i++)
9282 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9284 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9290 /* If this is V.4, unwind the stack pointer after all of the loads
9291 have been done. We need to emit a block here so that sched
9292 doesn't decide to move the sp change before the register restores
9293 (which may not have any obvious dependency on the stack). This
9294 doesn't hurt performance, because there is no scheduling that can
9295 be done after this point. */
9296 if (DEFAULT_ABI == ABI_V4)
9298 if (frame_reg_rtx != sp_reg_rtx)
9299 rs6000_emit_stack_tie ();
9301 if (use_backchain_to_restore_sp)
9303 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9305 else if (sp_offset != 0)
9307 emit_insn (Pmode == SImode
9308 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9309 GEN_INT (sp_offset))
9310 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9311 GEN_INT (sp_offset)));
9315 if (current_function_calls_eh_return)
9317 rtx sa = EH_RETURN_STACKADJ_RTX;
9318 emit_insn (Pmode == SImode
9319 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9320 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9326 if (! restoring_FPRs_inline)
9327 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9329 p = rtvec_alloc (2);
9331 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9332 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9334 LINK_REGISTER_REGNUM));
9336 /* If we have to restore more than two FP registers, branch to the
9337 restore function. It will return to our caller. */
9338 if (! restoring_FPRs_inline)
9342 const char *alloc_rname;
9344 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9345 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9346 alloc_rname = ggc_strdup (rname);
9347 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9348 gen_rtx_SYMBOL_REF (Pmode,
9351 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9354 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9355 GEN_INT (info->fp_save_offset + 8*i));
9356 mem = gen_rtx_MEM (DFmode, addr);
9357 set_mem_alias_set (mem, rs6000_sr_alias_set);
9359 RTVEC_ELT (p, i+3) =
9360 gen_rtx_SET (VOIDmode,
9361 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9366 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9370 /* Write function epilogue. */
9373 rs6000_output_function_epilogue (file, size)
9375 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9377 rs6000_stack_t *info = rs6000_stack_info ();
9378 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9380 if (! HAVE_epilogue)
9382 rtx insn = get_last_insn ();
9383 /* If the last insn was a BARRIER, we don't have to write anything except
9385 if (GET_CODE (insn) == NOTE)
9386 insn = prev_nonnote_insn (insn);
9387 if (insn == 0 || GET_CODE (insn) != BARRIER)
9389 /* This is slightly ugly, but at least we don't have two
9390 copies of the epilogue-emitting code. */
9393 /* A NOTE_INSN_DELETED is supposed to be at the start
9394 and end of the "toplevel" insn chain. */
9395 emit_note (0, NOTE_INSN_DELETED);
9396 rs6000_emit_epilogue (FALSE);
9397 emit_note (0, NOTE_INSN_DELETED);
9399 if (TARGET_DEBUG_STACK)
9400 debug_rtx_list (get_insns (), 100);
9401 final (get_insns (), file, FALSE, FALSE);
9406 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9409 We don't output a traceback table if -finhibit-size-directive was
9410 used. The documentation for -finhibit-size-directive reads
9411 ``don't output a @code{.size} assembler directive, or anything
9412 else that would cause trouble if the function is split in the
9413 middle, and the two halves are placed at locations far apart in
9414 memory.'' The traceback table has this property, since it
9415 includes the offset from the start of the function to the
9416 traceback table itself.
9418 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9419 different traceback table. */
9420 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9422 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9423 const char *language_string = lang_hooks.name;
9424 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9427 while (*fname == '.') /* V.4 encodes . in the name */
9430 /* Need label immediately before tbtab, so we can compute its offset
9431 from the function start. */
9434 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9435 ASM_OUTPUT_LABEL (file, fname);
9437 /* The .tbtab pseudo-op can only be used for the first eight
9438 expressions, since it can't handle the possibly variable
9439 length fields that follow. However, if you omit the optional
9440 fields, the assembler outputs zeros for all optional fields
9441 anyways, giving each variable length field is minimum length
9442 (as defined in sys/debug.h). Thus we can not use the .tbtab
9443 pseudo-op at all. */
9445 /* An all-zero word flags the start of the tbtab, for debuggers
9446 that have to find it by searching forward from the entry
9447 point or from the current pc. */
9448 fputs ("\t.long 0\n", file);
9450 /* Tbtab format type. Use format type 0. */
9451 fputs ("\t.byte 0,", file);
9453 /* Language type. Unfortunately, there doesn't seem to be any
9454 official way to get this info, so we use language_string. C
9455 is 0. C++ is 9. No number defined for Obj-C, so use the
9456 value for C for now. There is no official value for Java,
9457 although IBM appears to be using 13. There is no official value
9458 for Chill, so we've chosen 44 pseudo-randomly. */
9459 if (! strcmp (language_string, "GNU C")
9460 || ! strcmp (language_string, "GNU Objective-C"))
9462 else if (! strcmp (language_string, "GNU F77"))
9464 else if (! strcmp (language_string, "GNU Ada"))
9466 else if (! strcmp (language_string, "GNU Pascal"))
9468 else if (! strcmp (language_string, "GNU C++"))
9470 else if (! strcmp (language_string, "GNU Java"))
9472 else if (! strcmp (language_string, "GNU CHILL"))
9476 fprintf (file, "%d,", i);
9478 /* 8 single bit fields: global linkage (not set for C extern linkage,
9479 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9480 from start of procedure stored in tbtab, internal function, function
9481 has controlled storage, function has no toc, function uses fp,
9482 function logs/aborts fp operations. */
9483 /* Assume that fp operations are used if any fp reg must be saved. */
9484 fprintf (file, "%d,",
9485 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9487 /* 6 bitfields: function is interrupt handler, name present in
9488 proc table, function calls alloca, on condition directives
9489 (controls stack walks, 3 bits), saves condition reg, saves
9491 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9492 set up as a frame pointer, even when there is no alloca call. */
9493 fprintf (file, "%d,",
9494 ((optional_tbtab << 6)
9495 | ((optional_tbtab & frame_pointer_needed) << 5)
9496 | (info->cr_save_p << 1)
9497 | (info->lr_save_p)));
9499 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9501 fprintf (file, "%d,",
9502 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9504 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9505 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9509 /* Compute the parameter info from the function decl argument
9512 int next_parm_info_bit = 31;
9514 for (decl = DECL_ARGUMENTS (current_function_decl);
9515 decl; decl = TREE_CHAIN (decl))
9517 rtx parameter = DECL_INCOMING_RTL (decl);
9518 enum machine_mode mode = GET_MODE (parameter);
9520 if (GET_CODE (parameter) == REG)
9522 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9530 else if (mode == DFmode)
9535 /* If only one bit will fit, don't or in this entry. */
9536 if (next_parm_info_bit > 0)
9537 parm_info |= (bits << (next_parm_info_bit - 1));
9538 next_parm_info_bit -= 2;
9542 fixed_parms += ((GET_MODE_SIZE (mode)
9543 + (UNITS_PER_WORD - 1))
9545 next_parm_info_bit -= 1;
9551 /* Number of fixed point parameters. */
9552 /* This is actually the number of words of fixed point parameters; thus
9553 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9554 fprintf (file, "%d,", fixed_parms);
9556 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9558 /* This is actually the number of fp registers that hold parameters;
9559 and thus the maximum value is 13. */
9560 /* Set parameters on stack bit if parameters are not in their original
9561 registers, regardless of whether they are on the stack? Xlc
9562 seems to set the bit when not optimizing. */
9563 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9565 if (! optional_tbtab)
9568 /* Optional fields follow. Some are variable length. */
9570 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9572 /* There is an entry for each parameter in a register, in the order that
9573 they occur in the parameter list. Any intervening arguments on the
9574 stack are ignored. If the list overflows a long (max possible length
9575 34 bits) then completely leave off all elements that don't fit. */
9576 /* Only emit this long if there was at least one parameter. */
9577 if (fixed_parms || float_parms)
9578 fprintf (file, "\t.long %d\n", parm_info);
9580 /* Offset from start of code to tb table. */
9581 fputs ("\t.long ", file);
9582 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9584 RS6000_OUTPUT_BASENAME (file, fname);
9586 assemble_name (file, fname);
9590 RS6000_OUTPUT_BASENAME (file, fname);
9592 assemble_name (file, fname);
9596 /* Interrupt handler mask. */
9597 /* Omit this long, since we never set the interrupt handler bit
9600 /* Number of CTL (controlled storage) anchors. */
9601 /* Omit this long, since the has_ctl bit is never set above. */
9603 /* Displacement into stack of each CTL anchor. */
9604 /* Omit this list of longs, because there are no CTL anchors. */
9606 /* Length of function name. */
9607 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9609 /* Function name. */
9610 assemble_string (fname, strlen (fname));
9612 /* Register for alloca automatic storage; this is always reg 31.
9613 Only emit this if the alloca bit was set above. */
9614 if (frame_pointer_needed)
9615 fputs ("\t.byte 31\n", file);
9617 fputs ("\t.align 2\n", file);
9622 /* A C compound statement that outputs the assembler code for a thunk
9623 function, used to implement C++ virtual function calls with
9624 multiple inheritance. The thunk acts as a wrapper around a virtual
9625 function, adjusting the implicit object parameter before handing
9626 control off to the real function.
9628 First, emit code to add the integer DELTA to the location that
9629 contains the incoming first argument. Assume that this argument
9630 contains a pointer, and is the one used to pass the `this' pointer
9631 in C++. This is the incoming argument *before* the function
9632 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9633 values of all other incoming arguments.
9635 After the addition, emit code to jump to FUNCTION, which is a
9636 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9637 not touch the return address. Hence returning from FUNCTION will
9638 return to whoever called the current `thunk'.
9640 The effect must be as if FUNCTION had been called directly with the
9641 adjusted first argument. This macro is responsible for emitting
9642 all of the code for a thunk function; output_function_prologue()
9643 and output_function_epilogue() are not invoked.
9645 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9646 been extracted from it.) It might possibly be useful on some
9647 targets, but probably not.
9649 If you do not define this macro, the target-independent code in the
9650 C++ frontend will generate a less efficient heavyweight thunk that
9651 calls FUNCTION instead of jumping to it. The generic approach does
9652 not support varargs. */
9655 output_mi_thunk (file, thunk_fndecl, delta, function)
9657 tree thunk_fndecl ATTRIBUTE_UNUSED;
9661 const char *this_reg =
9662 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9665 const char *r0 = reg_names[0];
9666 const char *toc = reg_names[2];
9667 const char *schain = reg_names[11];
9668 const char *r12 = reg_names[12];
9670 static int labelno = 0;
9672 /* Small constants that can be done by one add instruction. */
9673 if (delta >= -32768 && delta <= 32767)
9675 if (! TARGET_NEW_MNEMONICS)
9676 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9678 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9681 /* Large constants that can be done by one addis instruction. */
9682 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9683 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9686 /* 32-bit constants that can be done by an add and addis instruction. */
9687 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9689 /* Break into two pieces, propagating the sign bit from the low
9690 word to the upper word. */
9691 int delta_high = delta >> 16;
9692 int delta_low = delta & 0xffff;
9693 if ((delta_low & 0x8000) != 0)
9696 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9699 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9702 if (! TARGET_NEW_MNEMONICS)
9703 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9705 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9708 /* 64-bit constants, fixme */
9712 /* Get the prefix in front of the names. */
9713 switch (DEFAULT_ABI)
9723 case ABI_AIX_NODESC:
9728 /* If the function is compiled in this module, jump to it directly.
9729 Otherwise, load up its address and jump to it. */
9731 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9733 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9734 && ! lookup_attribute ("longcall",
9735 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9737 fprintf (file, "\tb %s", prefix);
9738 assemble_name (file, fname);
9739 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9745 switch (DEFAULT_ABI)
9751 /* Set up a TOC entry for the function. */
9752 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9754 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9757 if (TARGET_MINIMAL_TOC)
9758 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9761 fputs ("\t.tc ", file);
9762 assemble_name (file, fname);
9763 fputs ("[TC],", file);
9765 assemble_name (file, fname);
9768 if (TARGET_MINIMAL_TOC)
9769 asm_fprintf (file, (TARGET_32BIT)
9770 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9771 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9772 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9773 assemble_name (file, buf);
9774 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9775 fputs ("-(.LCTOC1)", file);
9776 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9778 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9782 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9785 asm_fprintf (file, "\tmtctr %s\n", r0);
9787 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9790 asm_fprintf (file, "\tbctr\n");
9793 case ABI_AIX_NODESC:
9795 fprintf (file, "\tb %s", prefix);
9796 assemble_name (file, fname);
9797 if (flag_pic) fputs ("@plt", file);
9803 fprintf (file, "\tb %s", prefix);
9804 if (flag_pic && !machopic_name_defined_p (fname))
9805 assemble_name (file, machopic_stub_name (fname));
9807 assemble_name (file, fname);
9816 /* A quick summary of the various types of 'constant-pool tables'
9819 Target Flags Name One table per
9820 AIX (none) AIX TOC object file
9821 AIX -mfull-toc AIX TOC object file
9822 AIX -mminimal-toc AIX minimal TOC translation unit
9823 SVR4/EABI (none) SVR4 SDATA object file
9824 SVR4/EABI -fpic SVR4 pic object file
9825 SVR4/EABI -fPIC SVR4 PIC translation unit
9826 SVR4/EABI -mrelocatable EABI TOC function
9827 SVR4/EABI -maix AIX TOC object file
9828 SVR4/EABI -maix -mminimal-toc
9829 AIX minimal TOC translation unit
9831 Name Reg. Set by entries contains:
9832 made by addrs? fp? sum?
9834 AIX TOC 2 crt0 as Y option option
9835 AIX minimal TOC 30 prolog gcc Y Y option
9836 SVR4 SDATA 13 crt0 gcc N Y N
9837 SVR4 pic 30 prolog ld Y not yet N
9838 SVR4 PIC 30 prolog gcc Y option option
9839 EABI TOC 30 prolog gcc Y option option
9843 /* Hash table stuff for keeping track of TOC entries. */
9845 struct toc_hash_struct
9847 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9848 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9850 enum machine_mode key_mode;
9854 static htab_t toc_hash_table;
9856 /* Hash functions for the hash table. */
9859 rs6000_hash_constant (k)
9862 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9863 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9864 int flen = strlen (format);
9867 if (GET_CODE (k) == LABEL_REF)
9868 return result * 1231 + X0INT (XEXP (k, 0), 3);
9870 if (GET_CODE (k) == CONST_DOUBLE)
9872 else if (GET_CODE (k) == CODE_LABEL)
9877 for (; fidx < flen; fidx++)
9878 switch (format[fidx])
9883 const char *str = XSTR (k, fidx);
9885 result = result * 613 + len;
9886 for (i = 0; i < len; i++)
9887 result = result * 613 + (unsigned) str[i];
9892 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9896 result = result * 613 + (unsigned) XINT (k, fidx);
9899 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9900 result = result * 613 + (unsigned) XWINT (k, fidx);
9904 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9905 result = result * 613 + (unsigned) (XWINT (k, fidx)
9916 toc_hash_function (hash_entry)
9917 const void * hash_entry;
9919 const struct toc_hash_struct *thc =
9920 (const struct toc_hash_struct *) hash_entry;
9921 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9924 /* Compare H1 and H2 for equivalence. */
9927 toc_hash_eq (h1, h2)
9931 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9932 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9934 if (((const struct toc_hash_struct *) h1)->key_mode
9935 != ((const struct toc_hash_struct *) h2)->key_mode)
9938 /* Gotcha: One of these const_doubles will be in memory.
9939 The other may be on the constant-pool chain.
9940 So rtx_equal_p will think they are different... */
9943 if (GET_CODE (r1) != GET_CODE (r2)
9944 || GET_MODE (r1) != GET_MODE (r2))
9946 if (GET_CODE (r1) == CONST_DOUBLE)
9948 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9950 for (i = 1; i < format_len; i++)
9951 if (XWINT (r1, i) != XWINT (r2, i))
9956 else if (GET_CODE (r1) == LABEL_REF)
9957 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9958 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9960 return rtx_equal_p (r1, r2);
9963 /* Mark the hash table-entry HASH_ENTRY. */
9966 toc_hash_mark_entry (hash_slot, unused)
9968 void * unused ATTRIBUTE_UNUSED;
9970 const struct toc_hash_struct * hash_entry =
9971 *(const struct toc_hash_struct **) hash_slot;
9972 rtx r = hash_entry->key;
9973 ggc_set_mark (hash_entry);
9974 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
9975 if (GET_CODE (r) == LABEL_REF)
9978 ggc_set_mark (XEXP (r, 0));
9985 /* Mark all the elements of the TOC hash-table *HT. */
9988 toc_hash_mark_table (vht)
9993 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
9996 /* These are the names given by the C++ front-end to vtables, and
9997 vtable-like objects. Ideally, this logic should not be here;
9998 instead, there should be some programmatic way of inquiring as
9999 to whether or not an object is a vtable. */
10001 #define VTABLE_NAME_P(NAME) \
10002 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10003 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10004 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10005 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10008 rs6000_output_symbol_ref (file, x)
10012 /* Currently C++ toc references to vtables can be emitted before it
10013 is decided whether the vtable is public or private. If this is
10014 the case, then the linker will eventually complain that there is
10015 a reference to an unknown section. Thus, for vtables only,
10016 we emit the TOC reference to reference the symbol and not the
10018 const char *name = XSTR (x, 0);
10020 if (VTABLE_NAME_P (name))
10022 RS6000_OUTPUT_BASENAME (file, name);
10025 assemble_name (file, name);
10028 /* Output a TOC entry. We derive the entry name from what is being
10032 output_toc (file, x, labelno, mode)
10036 enum machine_mode mode;
10039 const char *name = buf;
10040 const char *real_name;
10047 /* When the linker won't eliminate them, don't output duplicate
10048 TOC entries (this happens on AIX if there is any kind of TOC,
10049 and on SVR4 under -fPIC or -mrelocatable). */
10052 struct toc_hash_struct *h;
10055 h = ggc_alloc (sizeof (*h));
10057 h->key_mode = mode;
10058 h->labelno = labelno;
10060 found = htab_find_slot (toc_hash_table, h, 1);
10061 if (*found == NULL)
10063 else /* This is indeed a duplicate.
10064 Set this label equal to that label. */
10066 fputs ("\t.set ", file);
10067 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10068 fprintf (file, "%d,", labelno);
10069 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10070 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10076 /* If we're going to put a double constant in the TOC, make sure it's
10077 aligned properly when strict alignment is on. */
10078 if (GET_CODE (x) == CONST_DOUBLE
10079 && STRICT_ALIGNMENT
10080 && GET_MODE_BITSIZE (mode) >= 64
10081 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10082 ASM_OUTPUT_ALIGN (file, 3);
10085 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10087 /* Handle FP constants specially. Note that if we have a minimal
10088 TOC, things we put here aren't actually in the TOC, so we can allow
10090 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10092 REAL_VALUE_TYPE rv;
10095 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10096 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10100 if (TARGET_MINIMAL_TOC)
10101 fputs (DOUBLE_INT_ASM_OP, file);
10103 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10104 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
10109 if (TARGET_MINIMAL_TOC)
10110 fputs ("\t.long ", file);
10112 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10113 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
10117 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10119 REAL_VALUE_TYPE rv;
10122 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10123 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10127 if (TARGET_MINIMAL_TOC)
10128 fputs (DOUBLE_INT_ASM_OP, file);
10130 fprintf (file, "\t.tc FS_%lx[TC],", l);
10131 fprintf (file, "0x%lx00000000\n", l);
10136 if (TARGET_MINIMAL_TOC)
10137 fputs ("\t.long ", file);
10139 fprintf (file, "\t.tc FS_%lx[TC],", l);
10140 fprintf (file, "0x%lx\n", l);
10144 else if (GET_MODE (x) == VOIDmode
10145 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10147 unsigned HOST_WIDE_INT low;
10148 HOST_WIDE_INT high;
10150 if (GET_CODE (x) == CONST_DOUBLE)
10152 low = CONST_DOUBLE_LOW (x);
10153 high = CONST_DOUBLE_HIGH (x);
10156 #if HOST_BITS_PER_WIDE_INT == 32
10159 high = (low & 0x80000000) ? ~0 : 0;
10163 low = INTVAL (x) & 0xffffffff;
10164 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10168 /* TOC entries are always Pmode-sized, but since this
10169 is a bigendian machine then if we're putting smaller
10170 integer constants in the TOC we have to pad them.
10171 (This is still a win over putting the constants in
10172 a separate constant pool, because then we'd have
10173 to have both a TOC entry _and_ the actual constant.)
10175 For a 32-bit target, CONST_INT values are loaded and shifted
10176 entirely within `low' and can be stored in one TOC entry. */
10178 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10179 abort ();/* It would be easy to make this work, but it doesn't now. */
10181 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10182 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10183 POINTER_SIZE, &low, &high, 0);
10187 if (TARGET_MINIMAL_TOC)
10188 fputs (DOUBLE_INT_ASM_OP, file);
10190 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long) high, (long) low);
10191 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10196 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10198 if (TARGET_MINIMAL_TOC)
10199 fputs ("\t.long ", file);
10201 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10202 (long) high, (long) low);
10203 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10207 if (TARGET_MINIMAL_TOC)
10208 fputs ("\t.long ", file);
10210 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10211 fprintf (file, "0x%lx\n", (long) low);
10217 if (GET_CODE (x) == CONST)
10219 if (GET_CODE (XEXP (x, 0)) != PLUS)
10222 base = XEXP (XEXP (x, 0), 0);
10223 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10226 if (GET_CODE (base) == SYMBOL_REF)
10227 name = XSTR (base, 0);
10228 else if (GET_CODE (base) == LABEL_REF)
10229 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10230 else if (GET_CODE (base) == CODE_LABEL)
10231 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10235 STRIP_NAME_ENCODING (real_name, name);
10236 if (TARGET_MINIMAL_TOC)
10237 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10240 fprintf (file, "\t.tc %s", real_name);
10243 fprintf (file, ".N%d", - offset);
10245 fprintf (file, ".P%d", offset);
10247 fputs ("[TC],", file);
10250 /* Currently C++ toc references to vtables can be emitted before it
10251 is decided whether the vtable is public or private. If this is
10252 the case, then the linker will eventually complain that there is
10253 a TOC reference to an unknown section. Thus, for vtables only,
10254 we emit the TOC reference to reference the symbol and not the
10256 if (VTABLE_NAME_P (name))
10258 RS6000_OUTPUT_BASENAME (file, name);
10260 fprintf (file, "%d", offset);
10261 else if (offset > 0)
10262 fprintf (file, "+%d", offset);
10265 output_addr_const (file, x);
10269 /* Output an assembler pseudo-op to write an ASCII string of N characters
10270 starting at P to FILE.
10272 On the RS/6000, we have to do this using the .byte operation and
10273 write out special characters outside the quoted string.
10274 Also, the assembler is broken; very long strings are truncated,
10275 so we must artificially break them up early. */
10278 output_ascii (file, p, n)
10284 int i, count_string;
10285 const char *for_string = "\t.byte \"";
10286 const char *for_decimal = "\t.byte ";
10287 const char *to_close = NULL;
10290 for (i = 0; i < n; i++)
10293 if (c >= ' ' && c < 0177)
10296 fputs (for_string, file);
10299 /* Write two quotes to get one. */
10307 for_decimal = "\"\n\t.byte ";
10311 if (count_string >= 512)
10313 fputs (to_close, file);
10315 for_string = "\t.byte \"";
10316 for_decimal = "\t.byte ";
10324 fputs (for_decimal, file);
10325 fprintf (file, "%d", c);
10327 for_string = "\n\t.byte \"";
10328 for_decimal = ", ";
10334 /* Now close the string if we have written one. Then end the line. */
10336 fputs (to_close, file);
10339 /* Generate a unique section name for FILENAME for a section type
10340 represented by SECTION_DESC. Output goes into BUF.
10342 SECTION_DESC can be any string, as long as it is different for each
10343 possible section type.
10345 We name the section in the same manner as xlc. The name begins with an
10346 underscore followed by the filename (after stripping any leading directory
10347 names) with the last period replaced by the string SECTION_DESC. If
10348 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10352 rs6000_gen_section_name (buf, filename, section_desc)
10354 const char *filename;
10355 const char *section_desc;
10357 const char *q, *after_last_slash, *last_period = 0;
10361 after_last_slash = filename;
10362 for (q = filename; *q; q++)
10365 after_last_slash = q + 1;
10366 else if (*q == '.')
10370 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10371 *buf = (char *) permalloc (len);
10376 for (q = after_last_slash; *q; q++)
10378 if (q == last_period)
10380 strcpy (p, section_desc);
10381 p += strlen (section_desc);
10384 else if (ISALNUM (*q))
10388 if (last_period == 0)
10389 strcpy (p, section_desc);
10394 /* Emit profile function. */
10397 output_profile_hook (labelno)
10400 if (DEFAULT_ABI == ABI_AIX)
10403 const char *label_name;
10408 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10409 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10410 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10412 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10415 else if (DEFAULT_ABI == ABI_DARWIN)
10417 const char *mcount_name = RS6000_MCOUNT;
10418 int caller_addr_regno = LINK_REGISTER_REGNUM;
10420 /* Be conservative and always set this, at least for now. */
10421 current_function_uses_pic_offset_table = 1;
10424 /* For PIC code, set up a stub and collect the caller's address
10425 from r0, which is where the prologue puts it. */
10428 mcount_name = machopic_stub_name (mcount_name);
10429 if (current_function_uses_pic_offset_table)
10430 caller_addr_regno = 0;
10433 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10435 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10439 /* Write function profiler code. */
10442 output_function_profiler (file, labelno)
10448 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10449 switch (DEFAULT_ABI)
10455 case ABI_AIX_NODESC:
10456 fprintf (file, "\tmflr %s\n", reg_names[0]);
10459 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10460 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10461 reg_names[0], reg_names[1]);
10462 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10463 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10464 assemble_name (file, buf);
10465 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10467 else if (flag_pic > 1)
10469 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10470 reg_names[0], reg_names[1]);
10471 /* Now, we need to get the address of the label. */
10472 fputs ("\tbl 1f\n\t.long ", file);
10473 assemble_name (file, buf);
10474 fputs ("-.\n1:", file);
10475 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10476 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10477 reg_names[0], reg_names[11]);
10478 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10479 reg_names[0], reg_names[0], reg_names[11]);
10483 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10484 assemble_name (file, buf);
10485 fputs ("@ha\n", file);
10486 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10487 reg_names[0], reg_names[1]);
10488 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10489 assemble_name (file, buf);
10490 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10493 if (current_function_needs_context)
10494 asm_fprintf (file, "\tmr %s,%s\n",
10495 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10496 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10497 if (current_function_needs_context)
10498 asm_fprintf (file, "\tmr %s,%s\n",
10499 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10504 /* Don't do anything, done in output_profile_hook (). */
10510 /* Adjust the cost of a scheduling dependency. Return the new cost of
10511 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10514 rs6000_adjust_cost (insn, link, dep_insn, cost)
10517 rtx dep_insn ATTRIBUTE_UNUSED;
10520 if (! recog_memoized (insn))
10523 if (REG_NOTE_KIND (link) != 0)
10526 if (REG_NOTE_KIND (link) == 0)
10528 /* Data dependency; DEP_INSN writes a register that INSN reads
10529 some cycles later. */
10530 switch (get_attr_type (insn))
10533 /* Tell the first scheduling pass about the latency between
10534 a mtctr and bctr (and mtlr and br/blr). The first
10535 scheduling pass will not know about this latency since
10536 the mtctr instruction, which has the latency associated
10537 to it, will be generated by reload. */
10538 return TARGET_POWER ? 5 : 4;
10540 /* Leave some extra cycles between a compare and its
10541 dependent branch, to inhibit expensive mispredicts. */
10542 if ((rs6000_cpu_attr == CPU_PPC750
10543 || rs6000_cpu_attr == CPU_PPC7400
10544 || rs6000_cpu_attr == CPU_PPC7450)
10545 && recog_memoized (dep_insn)
10546 && (INSN_CODE (dep_insn) >= 0)
10547 && (get_attr_type (dep_insn) == TYPE_COMPARE
10548 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10549 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10550 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10555 /* Fall out to return default cost. */
10561 /* A C statement (sans semicolon) to update the integer scheduling
10562 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10563 INSN earlier, increase the priority to execute INSN later. Do not
10564 define this macro if you do not need to adjust the scheduling
10565 priorities of insns. */
10568 rs6000_adjust_priority (insn, priority)
10569 rtx insn ATTRIBUTE_UNUSED;
10572 /* On machines (like the 750) which have asymmetric integer units,
10573 where one integer unit can do multiply and divides and the other
10574 can't, reduce the priority of multiply/divide so it is scheduled
10575 before other integer operations. */
10578 if (! INSN_P (insn))
10581 if (GET_CODE (PATTERN (insn)) == USE)
10584 switch (rs6000_cpu_attr) {
10586 switch (get_attr_type (insn))
10593 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10594 priority, priority);
10595 if (priority >= 0 && priority < 0x01000000)
10605 /* Return how many instructions the machine can issue per cycle. */
10608 rs6000_issue_rate ()
10610 switch (rs6000_cpu_attr) {
10611 case CPU_RIOS1: /* ? */
10613 case CPU_PPC601: /* ? */
10632 /* Length in units of the trampoline for entering a nested function. */
10635 rs6000_trampoline_size ()
10639 switch (DEFAULT_ABI)
10645 ret = (TARGET_32BIT) ? 12 : 24;
10650 case ABI_AIX_NODESC:
10651 ret = (TARGET_32BIT) ? 40 : 48;
10658 /* Emit RTL insns to initialize the variable parts of a trampoline.
10659 FNADDR is an RTX for the address of the function's pure code.
10660 CXT is an RTX for the static chain value for the function. */
10663 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10668 enum machine_mode pmode = Pmode;
10669 int regsize = (TARGET_32BIT) ? 4 : 8;
10670 rtx ctx_reg = force_reg (pmode, cxt);
10672 switch (DEFAULT_ABI)
10677 /* Macros to shorten the code expansions below. */
10678 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10679 #define MEM_PLUS(addr,offset) \
10680 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10682 /* Under AIX, just build the 3 word function descriptor */
10685 rtx fn_reg = gen_reg_rtx (pmode);
10686 rtx toc_reg = gen_reg_rtx (pmode);
10687 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10688 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10689 emit_move_insn (MEM_DEREF (addr), fn_reg);
10690 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10691 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10695 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10698 case ABI_AIX_NODESC:
10699 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10700 FALSE, VOIDmode, 4,
10702 GEN_INT (rs6000_trampoline_size ()), SImode,
10712 /* Table of valid machine attributes. */
10714 const struct attribute_spec rs6000_attribute_table[] =
10716 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10717 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10718 { NULL, 0, 0, false, false, false, NULL }
10721 /* Handle a "longcall" attribute; arguments as in struct
10722 attribute_spec.handler. */
10725 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10728 tree args ATTRIBUTE_UNUSED;
10729 int flags ATTRIBUTE_UNUSED;
10730 bool *no_add_attrs;
10732 if (TREE_CODE (*node) != FUNCTION_TYPE
10733 && TREE_CODE (*node) != FIELD_DECL
10734 && TREE_CODE (*node) != TYPE_DECL)
10736 warning ("`%s' attribute only applies to functions",
10737 IDENTIFIER_POINTER (name));
10738 *no_add_attrs = true;
10744 /* Return a reference suitable for calling a function with the
10745 longcall attribute. */
10748 rs6000_longcall_ref (call_ref)
10751 const char *call_name;
10754 if (GET_CODE (call_ref) != SYMBOL_REF)
10757 /* System V adds '.' to the internal name, so skip them. */
10758 call_name = XSTR (call_ref, 0);
10759 if (*call_name == '.')
10761 while (*call_name == '.')
10764 node = get_identifier (call_name);
10765 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10768 return force_reg (Pmode, call_ref);
10772 /* A C statement or statements to switch to the appropriate section
10773 for output of RTX in mode MODE. You can assume that RTX is some
10774 kind of constant in RTL. The argument MODE is redundant except in
10775 the case of a `const_int' rtx. Select the section by calling
10776 `text_section' or one of the alternatives for other sections.
10778 Do not define this macro if you put all constants in the read-only
10781 #ifdef USING_ELFOS_H
10784 rs6000_select_rtx_section (mode, x)
10785 enum machine_mode mode;
10788 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10791 && (GET_CODE (x) == SYMBOL_REF
10792 || GET_CODE (x) == LABEL_REF
10793 || GET_CODE (x) == CONST))
10799 /* A C statement or statements to switch to the appropriate
10800 section for output of DECL. DECL is either a `VAR_DECL' node
10801 or a constant of some sort. RELOC indicates whether forming
10802 the initial value of DECL requires link-time relocations. */
10805 rs6000_select_section (decl, reloc)
10809 int size = int_size_in_bytes (TREE_TYPE (decl));
10812 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10819 needs_sdata = (size > 0
10820 && size <= g_switch_value
10821 && rs6000_sdata != SDATA_NONE
10822 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10824 if (TREE_CODE (decl) == STRING_CST)
10825 readonly = ! flag_writable_strings;
10826 else if (TREE_CODE (decl) == VAR_DECL)
10827 readonly = (! (flag_pic && reloc)
10828 && TREE_READONLY (decl)
10829 && ! TREE_SIDE_EFFECTS (decl)
10830 && DECL_INITIAL (decl)
10831 && DECL_INITIAL (decl) != error_mark_node
10832 && TREE_CONSTANT (DECL_INITIAL (decl)));
10833 else if (TREE_CODE (decl) == CONSTRUCTOR)
10834 readonly = (! (flag_pic && reloc)
10835 && ! TREE_SIDE_EFFECTS (decl)
10836 && TREE_CONSTANT (decl));
10839 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10842 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10845 /* A C statement to build up a unique section name, expressed as a
10846 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10847 RELOC indicates whether the initial value of EXP requires
10848 link-time relocations. If you do not define this macro, GCC will use
10849 the symbol name prefixed by `.' as the section name. Note - this
10850 macro can now be called for uninitialized data items as well as
10851 initialised data and functions. */
10854 rs6000_unique_section (decl, reloc)
10862 const char *prefix;
10864 static const char *const prefixes[7][2] =
10866 { ".rodata.", ".gnu.linkonce.r." },
10867 { ".sdata2.", ".gnu.linkonce.s2." },
10868 { ".data.", ".gnu.linkonce.d." },
10869 { ".sdata.", ".gnu.linkonce.s." },
10870 { ".bss.", ".gnu.linkonce.b." },
10871 { ".sbss.", ".gnu.linkonce.sb." },
10872 { ".text.", ".gnu.linkonce.t." }
10875 if (TREE_CODE (decl) == FUNCTION_DECL)
10884 if (TREE_CODE (decl) == STRING_CST)
10885 readonly = ! flag_writable_strings;
10886 else if (TREE_CODE (decl) == VAR_DECL)
10887 readonly = (! (flag_pic && reloc)
10888 && TREE_READONLY (decl)
10889 && ! TREE_SIDE_EFFECTS (decl)
10890 && TREE_CONSTANT (DECL_INITIAL (decl)));
10892 size = int_size_in_bytes (TREE_TYPE (decl));
10893 needs_sdata = (size > 0
10894 && size <= g_switch_value
10895 && rs6000_sdata != SDATA_NONE
10896 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10898 if (DECL_INITIAL (decl) == 0
10899 || DECL_INITIAL (decl) == error_mark_node)
10901 else if (! readonly)
10908 /* .sdata2 is only for EABI. */
10909 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10915 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10916 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10917 len = strlen (name) + strlen (prefix);
10918 string = alloca (len + 1);
10920 sprintf (string, "%s%s", prefix, name);
10922 DECL_SECTION_NAME (decl) = build_string (len, string);
10926 /* If we are referencing a function that is static or is known to be
10927 in this file, make the SYMBOL_REF special. We can use this to indicate
10928 that we can branch to this function without emitting a no-op after the
10929 call. For real AIX calling sequences, we also replace the
10930 function name with the real name (1 or 2 leading .'s), rather than
10931 the function descriptor name. This saves a lot of overriding code
10932 to read the prefixes. */
10935 rs6000_encode_section_info (decl, first)
10942 if (TREE_CODE (decl) == FUNCTION_DECL)
10944 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10945 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10946 && ! DECL_WEAK (decl))
10947 SYMBOL_REF_FLAG (sym_ref) = 1;
10949 if (DEFAULT_ABI == ABI_AIX)
10951 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10952 size_t len2 = strlen (XSTR (sym_ref, 0));
10953 char *str = alloca (len1 + len2 + 1);
10956 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10958 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10961 else if (rs6000_sdata != SDATA_NONE
10962 && DEFAULT_ABI == ABI_V4
10963 && TREE_CODE (decl) == VAR_DECL)
10965 int size = int_size_in_bytes (TREE_TYPE (decl));
10966 tree section_name = DECL_SECTION_NAME (decl);
10967 const char *name = (char *)0;
10972 if (TREE_CODE (section_name) == STRING_CST)
10974 name = TREE_STRING_POINTER (section_name);
10975 len = TREE_STRING_LENGTH (section_name);
10981 if ((size > 0 && size <= g_switch_value)
10983 && ((len == sizeof (".sdata") - 1
10984 && strcmp (name, ".sdata") == 0)
10985 || (len == sizeof (".sdata2") - 1
10986 && strcmp (name, ".sdata2") == 0)
10987 || (len == sizeof (".sbss") - 1
10988 && strcmp (name, ".sbss") == 0)
10989 || (len == sizeof (".sbss2") - 1
10990 && strcmp (name, ".sbss2") == 0)
10991 || (len == sizeof (".PPC.EMB.sdata0") - 1
10992 && strcmp (name, ".PPC.EMB.sdata0") == 0)
10993 || (len == sizeof (".PPC.EMB.sbss0") - 1
10994 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
10996 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10997 size_t len = strlen (XSTR (sym_ref, 0));
10998 char *str = alloca (len + 2);
11001 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11002 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11007 #endif /* USING_ELFOS_H */
11010 /* Return a REG that occurs in ADDR with coefficient 1.
11011 ADDR can be effectively incremented by incrementing REG.
11013 r0 is special and we must not select it as an address
11014 register by this routine since our caller will try to
11015 increment the returned register via an "la" instruction. */
11018 find_addr_reg (addr)
11021 while (GET_CODE (addr) == PLUS)
11023 if (GET_CODE (XEXP (addr, 0)) == REG
11024 && REGNO (XEXP (addr, 0)) != 0)
11025 addr = XEXP (addr, 0);
11026 else if (GET_CODE (XEXP (addr, 1)) == REG
11027 && REGNO (XEXP (addr, 1)) != 0)
11028 addr = XEXP (addr, 1);
11029 else if (CONSTANT_P (XEXP (addr, 0)))
11030 addr = XEXP (addr, 1);
11031 else if (CONSTANT_P (XEXP (addr, 1)))
11032 addr = XEXP (addr, 0);
11036 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11042 rs6000_fatal_bad_address (op)
11045 fatal_insn ("bad address", op);
11048 /* Called to register all of our global variables with the garbage
11052 rs6000_add_gc_roots ()
11054 ggc_add_rtx_root (&rs6000_compare_op0, 1);
11055 ggc_add_rtx_root (&rs6000_compare_op1, 1);
11057 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11058 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11059 toc_hash_mark_table);
11062 machopic_add_gc_roots ();
11069 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11070 reference and a constant. */
11073 symbolic_operand (op)
11076 switch (GET_CODE (op))
11083 return (GET_CODE (op) == SYMBOL_REF ||
11084 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11085 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11086 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11093 #ifdef RS6000_LONG_BRANCH
11095 static tree stub_list = 0;
11097 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11098 procedure calls to the linked list. */
11101 add_compiler_stub (label_name, function_name, line_number)
11103 tree function_name;
11106 tree stub = build_tree_list (function_name, label_name);
11107 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11108 TREE_CHAIN (stub) = stub_list;
11112 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11113 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11114 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11116 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11117 handling procedure calls from the linked list and initializes the
11121 output_compiler_stub ()
11124 char label_buf[256];
11126 tree tmp_stub, stub;
11129 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11131 fprintf (asm_out_file,
11132 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11134 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11135 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11136 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11137 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11139 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11141 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11144 label_buf[0] = '_';
11145 strcpy (label_buf+1,
11146 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11149 strcpy (tmp_buf, "lis r12,hi16(");
11150 strcat (tmp_buf, label_buf);
11151 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11152 strcat (tmp_buf, label_buf);
11153 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11154 output_asm_insn (tmp_buf, 0);
11156 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11157 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11158 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11159 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11165 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11166 already there or not. */
11169 no_previous_def (function_name)
11170 tree function_name;
11173 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11174 if (function_name == STUB_FUNCTION_NAME (stub))
11179 /* GET_PREV_LABEL gets the label name from the previous definition of
11183 get_prev_label (function_name)
11184 tree function_name;
11187 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11188 if (function_name == STUB_FUNCTION_NAME (stub))
11189 return STUB_LABEL_NAME (stub);
11193 /* INSN is either a function call or a millicode call. It may have an
11194 unconditional jump in its delay slot.
11196 CALL_DEST is the routine we are calling. */
11199 output_call (insn, call_dest, operand_number)
11202 int operand_number;
11204 static char buf[256];
11205 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11208 tree funname = get_identifier (XSTR (call_dest, 0));
11210 if (no_previous_def (funname))
11213 rtx label_rtx = gen_label_rtx ();
11214 char *label_buf, temp_buf[256];
11215 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11216 CODE_LABEL_NUMBER (label_rtx));
11217 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11218 labelname = get_identifier (label_buf);
11219 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11221 line_number = NOTE_LINE_NUMBER (insn);
11222 add_compiler_stub (labelname, funname, line_number);
11225 labelname = get_prev_label (funname);
11227 sprintf (buf, "jbsr %%z%d,%.246s",
11228 operand_number, IDENTIFIER_POINTER (labelname));
11233 sprintf (buf, "bl %%z%d", operand_number);
11238 #endif /* RS6000_LONG_BRANCH */
11240 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11242 const char *const symbol_ = (SYMBOL); \
11243 char *buffer_ = (BUF); \
11244 if (symbol_[0] == '"') \
11246 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11248 else if (name_needs_quotes(symbol_)) \
11250 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11254 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11259 /* Generate PIC and indirect symbol stubs. */
11262 machopic_output_stub (file, symb, stub)
11264 const char *symb, *stub;
11266 unsigned int length;
11267 char *symbol_name, *lazy_ptr_name;
11268 char *local_label_0;
11269 static int label = 0;
11271 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11272 STRIP_NAME_ENCODING (symb, symb);
11276 length = strlen (symb);
11277 symbol_name = alloca (length + 32);
11278 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11280 lazy_ptr_name = alloca (length + 32);
11281 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11283 local_label_0 = alloca (length + 32);
11284 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11287 machopic_picsymbol_stub_section ();
11289 machopic_symbol_stub_section ();
11291 fprintf (file, "%s:\n", stub);
11292 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11296 fprintf (file, "\tmflr r0\n");
11297 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11298 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11299 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11300 lazy_ptr_name, local_label_0);
11301 fprintf (file, "\tmtlr r0\n");
11302 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11303 lazy_ptr_name, local_label_0);
11304 fprintf (file, "\tmtctr r12\n");
11305 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11306 lazy_ptr_name, local_label_0);
11307 fprintf (file, "\tbctr\n");
11310 fprintf (file, "non-pure not supported\n");
11312 machopic_lazy_symbol_ptr_section ();
11313 fprintf (file, "%s:\n", lazy_ptr_name);
11314 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11315 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11318 /* Legitimize PIC addresses. If the address is already
11319 position-independent, we return ORIG. Newly generated
11320 position-independent addresses go into a reg. This is REG if non
11321 zero, otherwise we allocate register(s) as necessary. */
11323 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11326 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11328 enum machine_mode mode;
11333 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11334 reg = gen_reg_rtx (Pmode);
11336 if (GET_CODE (orig) == CONST)
11338 if (GET_CODE (XEXP (orig, 0)) == PLUS
11339 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11342 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11345 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11348 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11354 if (GET_CODE (offset) == CONST_INT)
11356 if (SMALL_INT (offset))
11357 return plus_constant (base, INTVAL (offset));
11358 else if (! reload_in_progress && ! reload_completed)
11359 offset = force_reg (Pmode, offset);
11362 rtx mem = force_const_mem (Pmode, orig);
11363 return machopic_legitimize_pic_address (mem, Pmode, reg);
11366 return gen_rtx (PLUS, Pmode, base, offset);
11369 /* Fall back on generic machopic code. */
11370 return machopic_legitimize_pic_address (orig, mode, reg);
11373 /* This is just a placeholder to make linking work without having to
11374 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11375 ever needed for Darwin (not too likely!) this would have to get a
11376 real definition. */
11383 #endif /* TARGET_MACHO */
11386 static unsigned int
11387 rs6000_elf_section_type_flags (decl, name, reloc)
11392 unsigned int flags = default_section_type_flags (decl, name, reloc);
11394 if (TARGET_RELOCATABLE)
11395 flags |= SECTION_WRITE;
11400 /* Record an element in the table of global constructors. SYMBOL is
11401 a SYMBOL_REF of the function to be called; PRIORITY is a number
11402 between 0 and MAX_INIT_PRIORITY.
11404 This differs from default_named_section_asm_out_constructor in
11405 that we have special handling for -mrelocatable. */
11408 rs6000_elf_asm_out_constructor (symbol, priority)
11412 const char *section = ".ctors";
11415 if (priority != DEFAULT_INIT_PRIORITY)
11417 sprintf (buf, ".ctors.%.5u",
11418 /* Invert the numbering so the linker puts us in the proper
11419 order; constructors are run from right to left, and the
11420 linker sorts in increasing order. */
11421 MAX_INIT_PRIORITY - priority);
11425 named_section_flags (section, SECTION_WRITE);
11426 assemble_align (POINTER_SIZE);
11428 if (TARGET_RELOCATABLE)
11430 fputs ("\t.long (", asm_out_file);
11431 output_addr_const (asm_out_file, symbol);
11432 fputs (")@fixup\n", asm_out_file);
11435 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11439 rs6000_elf_asm_out_destructor (symbol, priority)
11443 const char *section = ".dtors";
11446 if (priority != DEFAULT_INIT_PRIORITY)
11448 sprintf (buf, ".dtors.%.5u",
11449 /* Invert the numbering so the linker puts us in the proper
11450 order; constructors are run from right to left, and the
11451 linker sorts in increasing order. */
11452 MAX_INIT_PRIORITY - priority);
11456 named_section_flags (section, SECTION_WRITE);
11457 assemble_align (POINTER_SIZE);
11459 if (TARGET_RELOCATABLE)
11461 fputs ("\t.long (", asm_out_file);
11462 output_addr_const (asm_out_file, symbol);
11463 fputs (")@fixup\n", asm_out_file);
11466 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11470 #ifdef OBJECT_FORMAT_COFF
11472 xcoff_asm_named_section (name, flags)
11474 unsigned int flags ATTRIBUTE_UNUSED;
11476 fprintf (asm_out_file, "\t.csect %s\n", name);